hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f720a75e584185882c002770a51c3e80de659a39 | 1,945 | py | Python | src/slamcore_ros2_examples/python/slamcore_ros2_examples/xacro_file_contents.py | slamcore/slamcore-ros2-examples | f101a277d7bbf07e081b89ca8efb77110abc2110 | [
"BSD-3-Clause"
] | 1 | 2022-01-31T16:00:39.000Z | 2022-01-31T16:00:39.000Z | src/slamcore_ros2_examples/python/slamcore_ros2_examples/xacro_file_contents.py | slamcore/slamcore-ros2-examples | f101a277d7bbf07e081b89ca8efb77110abc2110 | [
"BSD-3-Clause"
] | null | null | null | src/slamcore_ros2_examples/python/slamcore_ros2_examples/xacro_file_contents.py | slamcore/slamcore-ros2-examples | f101a277d7bbf07e081b89ca8efb77110abc2110 | [
"BSD-3-Clause"
] | null | null | null | """Module for the XacroFile substitution class."""
from pathlib import Path
from typing import Text, cast
import xacro
from launch.launch_context import LaunchContext
from launch.some_substitutions_type import SomeSubstitutionsType
from launch.substitution import Substitution
from launch.substitutions import SubstitutionFailure
from launch.utilities import normalize_to_list_of_substitutions
class XacroFileContents(Substitution):
"""
Reads the xacro file provided and returns its context during evalution.
"""
name = "XacroFileContents"
def __init__(self, substitution: SomeSubstitutionsType) -> None:
"""Create a class instance."""
self.__substitution = normalize_to_list_of_substitutions((substitution,))[0] # type: ignore
@property
def substitution(self) -> Substitution:
"""Getter."""
return self.__substitution
def describe(self) -> Text:
"""Return a description of this substitution as a string."""
return f"{self.name}({self.substitution.describe()})"
@classmethod
def read_xacro(cls, path: Path) -> str:
"""Read the xacro contents and return the corresponding string."""
doc = xacro.process_file(path)
xacro_contents = doc.toprettyxml(indent=" ") # type: ignore
return cast(str, xacro_contents)
def perform(self, context: LaunchContext) -> Text:
"""Perform the substitution - return the contents of the given xacro file."""
path = Path(self.substitution.perform(context))
if not path.is_file():
raise SubstitutionFailure(f"Not a file: {path.absolute()}")
xacro_contents = self.read_xacro(path)
# I have to escape double quotes, then double quote the whole string so that the YAML
# parser is happy
xacro_contents = xacro_contents.replace('"', '\\"')
xacro_contents = f'"{xacro_contents}"'
return xacro_contents
| 36.018519 | 100 | 0.69563 |
from pathlib import Path
from typing import Text, cast
import xacro
from launch.launch_context import LaunchContext
from launch.some_substitutions_type import SomeSubstitutionsType
from launch.substitution import Substitution
from launch.substitutions import SubstitutionFailure
from launch.utilities import normalize_to_list_of_substitutions
class XacroFileContents(Substitution):
name = "XacroFileContents"
def __init__(self, substitution: SomeSubstitutionsType) -> None:
self.__substitution = normalize_to_list_of_substitutions((substitution,))[0]
@property
def substitution(self) -> Substitution:
return self.__substitution
def describe(self) -> Text:
return f"{self.name}({self.substitution.describe()})"
@classmethod
def read_xacro(cls, path: Path) -> str:
doc = xacro.process_file(path)
xacro_contents = doc.toprettyxml(indent=" ")
return cast(str, xacro_contents)
def perform(self, context: LaunchContext) -> Text:
path = Path(self.substitution.perform(context))
if not path.is_file():
raise SubstitutionFailure(f"Not a file: {path.absolute()}")
xacro_contents = self.read_xacro(path)
xacro_contents = xacro_contents.replace('"', '\\"')
xacro_contents = f'"{xacro_contents}"'
return xacro_contents
| true | true |
f720a8cc460fe602c10d1ad4a160e2c5c625a3d0 | 9,766 | py | Python | cloud_provider/aws/aws_bid_advisor_test.py | mridhul/minion-manager | 7301ac6360a82dfdd27e682d070c34e90f080149 | [
"Apache-2.0"
] | 54 | 2018-07-06T18:06:54.000Z | 2019-06-03T15:21:01.000Z | cloud_provider/aws/aws_bid_advisor_test.py | mridhul/minion-manager | 7301ac6360a82dfdd27e682d070c34e90f080149 | [
"Apache-2.0"
] | 28 | 2018-07-05T23:32:22.000Z | 2019-07-19T17:19:26.000Z | cloud_provider/aws/aws_bid_advisor_test.py | mridhul/minion-manager | 7301ac6360a82dfdd27e682d070c34e90f080149 | [
"Apache-2.0"
] | 15 | 2018-07-28T04:51:01.000Z | 2019-07-30T14:50:25.000Z | """The file has unit tests for the AWSBidAdvisor."""
import unittest
from mock import patch, MagicMock
import datetime
from dateutil.tz import tzutc
from cloud_provider.aws.aws_bid_advisor import AWSBidAdvisor
REFRESH_INTERVAL = 10
REGION = 'us-west-2'
MOCK_SPOT_PRICE={'NextToken': '', 'SpotPriceHistory': [{'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.300000', 'Timestamp': datetime.datetime(2019, 7, 13, 20, 30, 22, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.291400', 'Timestamp': datetime.datetime(2019, 7, 13, 20, 13, 34, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.320100', 'Timestamp': datetime.datetime(2019, 7, 13, 18, 33, 30, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 13, 17, 7, 9, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 13, 17, 7, 9, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 13, 17, 7, 9, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.300400', 'Timestamp': datetime.datetime(2019, 7, 13, 15, 46, 1, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.291500', 'Timestamp': datetime.datetime(2019, 7, 13, 14, 47, 14, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.321600', 'Timestamp': datetime.datetime(2019, 7, 13, 13, 40, 47, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2d', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.270400', 'Timestamp': datetime.datetime(2019, 7, 13, 6, 23, 5, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 12, 17, 7, 5, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 12, 17, 7, 5, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 12, 17, 7, 5, tzinfo=tzutc())}], 'ResponseMetadata': {'RequestId': 'f428bcba-016f-476f-b9ed-755f71af2d36', 'HTTPStatusCode': 200, 'HTTPHeaders': {'content-type': 'text/xml;charset=UTF-8', 'content-length': '4341', 'vary': 'accept-encoding', 'date': 'Sun, 14 Jul 2019 00:45:52 GMT', 'server': 'AmazonEC2'}, 'RetryAttempts': 0}}
class AWSBidAdvisorTest(unittest.TestCase):
"""
Tests for AWSBidAdvisor.
"""
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_lifecycle(self):
"""
Tests that the AWSBidVisor starts threads and stops them correctly.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
assert len(bidadv.all_bid_advisor_threads) == 0
bidadv.run()
assert len(bidadv.all_bid_advisor_threads) == 2
bidadv.shutdown()
assert len(bidadv.all_bid_advisor_threads) == 0
def test_ba_on_demand_pricing(self):
"""
Tests that the AWSBidVisor correctly gets the on-demand pricing.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
assert len(bidadv.on_demand_price_dict) == 0
updater = bidadv.OnDemandUpdater(bidadv)
updater.get_on_demand_pricing()
assert len(bidadv.on_demand_price_dict) > 0
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_spot_pricing(self):
"""
Tests that the AWSBidVisor correctly gets the spot instance pricing.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
assert len(bidadv.spot_price_list) == 0
updater = bidadv.SpotInstancePriceUpdater(bidadv)
updater.get_spot_price_info()
assert len(bidadv.spot_price_list) > 0
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_price_update(self):
"""
Tests that the AXBidVisor actually updates the pricing info.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
od_updater = bidadv.OnDemandUpdater(bidadv)
od_updater.get_on_demand_pricing()
sp_updater = bidadv.SpotInstancePriceUpdater(bidadv)
sp_updater.get_spot_price_info()
# Verify that the pricing info was populated.
assert len(bidadv.on_demand_price_dict) > 0
assert len(bidadv.spot_price_list) > 0
# Make the price dicts empty to check if they get updated.
bidadv.on_demand_price_dict = {}
bidadv.spot_price_list = {}
od_updater.get_on_demand_pricing()
sp_updater.get_spot_price_info()
# Verify that the pricing info is populated again.
assert len(bidadv.on_demand_price_dict) > 0
assert len(bidadv.spot_price_list) > 0
def test_ba_get_bid(self):
"""
Tests that the bid_advisor's get_new_bid() method returns correct
bid information.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
instance_type = "m3.large"
zones = ["us-west-2b"]
# Manually populate the prices so that spot-instance prices are chosen.
bidadv.on_demand_price_dict["m3.large"] = "100"
bidadv.spot_price_list = [{'InstanceType': instance_type,
'SpotPrice': '80',
'AvailabilityZone': "us-west-2b"}]
bid_info = bidadv.get_new_bid(zones, instance_type)
assert bid_info is not None, "BidAdvisor didn't return any " + \
"now bid information."
assert bid_info["type"] == "spot"
assert isinstance(bid_info["price"], str)
# Manually populate the prices so that on-demand instances are chosen.
bidadv.spot_price_list = [{'InstanceType': instance_type,
'SpotPrice': '85',
'AvailabilityZone': "us-west-2b"}]
bid_info = bidadv.get_new_bid(zones, instance_type)
assert bid_info is not None, "BidAdvisor didn't return any now " + \
"bid information."
assert bid_info["type"] == "on-demand"
def test_ba_get_bid_no_data(self):
"""
Tests that the BidAdvisor returns the default if the pricing
information hasn't be obtained yet.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
bid_info = bidadv.get_new_bid(['us-west-2a'], 'm3.large')
assert bid_info["type"] == "on-demand"
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_get_current_price(self):
"""
Tests that the BidAdvisor returns the most recent price information.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
od_updater = bidadv.OnDemandUpdater(bidadv)
od_updater.get_on_demand_pricing()
sp_updater = bidadv.SpotInstancePriceUpdater(bidadv)
sp_updater.get_spot_price_info()
# Verify that the pricing info was populated.
assert len(bidadv.on_demand_price_dict) > 0
assert len(bidadv.spot_price_list) > 0
price_info_map = bidadv.get_current_price()
assert price_info_map["spot"] is not None
assert price_info_map["on-demand"] is not None
def test_ba_parse_row(self):
"""
Tests that the BidAdvisor parses the rows in on-demand price information.
"""
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
od_updater = bidadv.OnDemandUpdater(bidadv)
row = {}
row['RateCode'] = "JRTCKXETXF.6YS6EN2CT7"
row["TermType"] = "OnDemand"
row["PriceDescription"] = "On Demand Linux"
row["Location"] = "US West (Oregon)"
row["Operating System"] = "Linux"
row["Pre Installed S/W"] = "NA"
row["Tenancy"] = "Shared"
row["PricePerUnit"] = "0.453"
row["Instance Type"] = "m5.4xlarge"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.453"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.453"
row["PricePerUnit"] = "0.658"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.658"
row["PricePerUnit"] = "0.00"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.658"
row['RateCode'] = "Some Random RateCode"
od_updater.parse_price_row(row)
| 57.111111 | 2,928 | 0.668646 |
import unittest
from mock import patch, MagicMock
import datetime
from dateutil.tz import tzutc
from cloud_provider.aws.aws_bid_advisor import AWSBidAdvisor
REFRESH_INTERVAL = 10
REGION = 'us-west-2'
MOCK_SPOT_PRICE={'NextToken': '', 'SpotPriceHistory': [{'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.300000', 'Timestamp': datetime.datetime(2019, 7, 13, 20, 30, 22, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.291400', 'Timestamp': datetime.datetime(2019, 7, 13, 20, 13, 34, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.320100', 'Timestamp': datetime.datetime(2019, 7, 13, 18, 33, 30, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 13, 17, 7, 9, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 13, 17, 7, 9, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 13, 17, 7, 9, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.300400', 'Timestamp': datetime.datetime(2019, 7, 13, 15, 46, 1, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.291500', 'Timestamp': datetime.datetime(2019, 7, 13, 14, 47, 14, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.321600', 'Timestamp': datetime.datetime(2019, 7, 13, 13, 40, 47, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2d', 'InstanceType': 'm5.4xlarge', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.270400', 'Timestamp': datetime.datetime(2019, 7, 13, 6, 23, 5, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2c', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 12, 17, 7, 5, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2b', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 12, 17, 7, 5, tzinfo=tzutc())}, {'AvailabilityZone': 'us-west-2a', 'InstanceType': 'm3.medium', 'ProductDescription': 'Linux/UNIX', 'SpotPrice': '0.006700', 'Timestamp': datetime.datetime(2019, 7, 12, 17, 7, 5, tzinfo=tzutc())}], 'ResponseMetadata': {'RequestId': 'f428bcba-016f-476f-b9ed-755f71af2d36', 'HTTPStatusCode': 200, 'HTTPHeaders': {'content-type': 'text/xml;charset=UTF-8', 'content-length': '4341', 'vary': 'accept-encoding', 'date': 'Sun, 14 Jul 2019 00:45:52 GMT', 'server': 'AmazonEC2'}, 'RetryAttempts': 0}}
class AWSBidAdvisorTest(unittest.TestCase):
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_lifecycle(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
assert len(bidadv.all_bid_advisor_threads) == 0
bidadv.run()
assert len(bidadv.all_bid_advisor_threads) == 2
bidadv.shutdown()
assert len(bidadv.all_bid_advisor_threads) == 0
def test_ba_on_demand_pricing(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
assert len(bidadv.on_demand_price_dict) == 0
updater = bidadv.OnDemandUpdater(bidadv)
updater.get_on_demand_pricing()
assert len(bidadv.on_demand_price_dict) > 0
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_spot_pricing(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
assert len(bidadv.spot_price_list) == 0
updater = bidadv.SpotInstancePriceUpdater(bidadv)
updater.get_spot_price_info()
assert len(bidadv.spot_price_list) > 0
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_price_update(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
od_updater = bidadv.OnDemandUpdater(bidadv)
od_updater.get_on_demand_pricing()
sp_updater = bidadv.SpotInstancePriceUpdater(bidadv)
sp_updater.get_spot_price_info()
assert len(bidadv.on_demand_price_dict) > 0
assert len(bidadv.spot_price_list) > 0
bidadv.on_demand_price_dict = {}
bidadv.spot_price_list = {}
od_updater.get_on_demand_pricing()
sp_updater.get_spot_price_info()
assert len(bidadv.on_demand_price_dict) > 0
assert len(bidadv.spot_price_list) > 0
def test_ba_get_bid(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
instance_type = "m3.large"
zones = ["us-west-2b"]
bidadv.on_demand_price_dict["m3.large"] = "100"
bidadv.spot_price_list = [{'InstanceType': instance_type,
'SpotPrice': '80',
'AvailabilityZone': "us-west-2b"}]
bid_info = bidadv.get_new_bid(zones, instance_type)
assert bid_info is not None, "BidAdvisor didn't return any " + \
"now bid information."
assert bid_info["type"] == "spot"
assert isinstance(bid_info["price"], str)
# Manually populate the prices so that on-demand instances are chosen.
bidadv.spot_price_list = [{'InstanceType': instance_type,
'SpotPrice': '85',
'AvailabilityZone': "us-west-2b"}]
bid_info = bidadv.get_new_bid(zones, instance_type)
assert bid_info is not None, "BidAdvisor didn't return any now " + \
"bid information."
assert bid_info["type"] == "on-demand"
def test_ba_get_bid_no_data(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
bid_info = bidadv.get_new_bid(['us-west-2a'], 'm3.large')
assert bid_info["type"] == "on-demand"
@patch.object(AWSBidAdvisor.SpotInstancePriceUpdater, 'ec2_get_spot_price_history', MagicMock(return_value=MOCK_SPOT_PRICE))
def test_ba_get_current_price(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
od_updater = bidadv.OnDemandUpdater(bidadv)
od_updater.get_on_demand_pricing()
sp_updater = bidadv.SpotInstancePriceUpdater(bidadv)
sp_updater.get_spot_price_info()
assert len(bidadv.on_demand_price_dict) > 0
assert len(bidadv.spot_price_list) > 0
price_info_map = bidadv.get_current_price()
assert price_info_map["spot"] is not None
assert price_info_map["on-demand"] is not None
def test_ba_parse_row(self):
bidadv = AWSBidAdvisor(REFRESH_INTERVAL, REFRESH_INTERVAL, REGION)
od_updater = bidadv.OnDemandUpdater(bidadv)
row = {}
row['RateCode'] = "JRTCKXETXF.6YS6EN2CT7"
row["TermType"] = "OnDemand"
row["PriceDescription"] = "On Demand Linux"
row["Location"] = "US West (Oregon)"
row["Operating System"] = "Linux"
row["Pre Installed S/W"] = "NA"
row["Tenancy"] = "Shared"
row["PricePerUnit"] = "0.453"
row["Instance Type"] = "m5.4xlarge"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.453"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.453"
row["PricePerUnit"] = "0.658"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.658"
row["PricePerUnit"] = "0.00"
od_updater.parse_price_row(row)
assert od_updater.bid_advisor.on_demand_price_dict['m5.4xlarge'] == "0.658"
row['RateCode'] = "Some Random RateCode"
od_updater.parse_price_row(row)
| true | true |
f720aa2b27c1b9f527f04697350eace8a44cc17c | 214 | py | Python | diypy3/tests/arr_stk.py | anqurvanillapy/diypy | 56ced55011e95a19b7238992c2fc612b196ff17d | [
"CC0-1.0"
] | 1 | 2015-12-08T10:35:21.000Z | 2015-12-08T10:35:21.000Z | diypy3/tests/arr_stk.py | anqurvanillapy/diypy3 | 56ced55011e95a19b7238992c2fc612b196ff17d | [
"CC0-1.0"
] | null | null | null | diypy3/tests/arr_stk.py | anqurvanillapy/diypy3 | 56ced55011e95a19b7238992c2fc612b196ff17d | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""\
This script creates a stack array
"""
import diypy3
d = diypy3.Diypy3()
arr_stk = (1, 2, 3, 4, 5)
max_size = 100
inc = 10
d.array_stack(max_size, inc, arr_stk)
| 14.266667 | 37 | 0.635514 |
import diypy3
d = diypy3.Diypy3()
arr_stk = (1, 2, 3, 4, 5)
max_size = 100
inc = 10
d.array_stack(max_size, inc, arr_stk)
| true | true |
f720aae2cd32fb0ceac540aa226171b36ea197e1 | 9,383 | py | Python | yandex/cloud/mdb/mysql/v1alpha/backup_service_pb2.py | kbespalov/python-sdk | e86563ee850e46a35b4c84053ecd4affdf66a963 | [
"MIT"
] | null | null | null | yandex/cloud/mdb/mysql/v1alpha/backup_service_pb2.py | kbespalov/python-sdk | e86563ee850e46a35b4c84053ecd4affdf66a963 | [
"MIT"
] | null | null | null | yandex/cloud/mdb/mysql/v1alpha/backup_service_pb2.py | kbespalov/python-sdk | e86563ee850e46a35b4c84053ecd4affdf66a963 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: yandex/cloud/mdb/mysql/v1alpha/backup_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from yandex.cloud import validation_pb2 as yandex_dot_cloud_dot_validation__pb2
from yandex.cloud.mdb.mysql.v1alpha import backup_pb2 as yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='yandex/cloud/mdb/mysql/v1alpha/backup_service.proto',
package='yandex.cloud.mdb.mysql.v1alpha',
syntax='proto3',
serialized_options=_b('\n\"yandex.cloud.api.mdb.mysql.v1alphaZHgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/mysql/v1alpha;mysql'),
serialized_pb=_b('\n3yandex/cloud/mdb/mysql/v1alpha/backup_service.proto\x12\x1eyandex.cloud.mdb.mysql.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x1dyandex/cloud/validation.proto\x1a+yandex/cloud/mdb/mysql/v1alpha/backup.proto\"+\n\x10GetBackupRequest\x12\x17\n\tbackup_id\x18\x01 \x01(\tB\x04\xe8\xc7\x31\x01\"s\n\x12ListBackupsRequest\x12\x1f\n\tfolder_id\x18\x01 \x01(\tB\x0c\xe8\xc7\x31\x01\x8a\xc8\x31\x04<=50\x12\x1d\n\tpage_size\x18\x02 \x01(\x03\x42\n\xfa\xc7\x31\x06<=1000\x12\x1d\n\npage_token\x18\x03 \x01(\tB\t\x8a\xc8\x31\x05<=100\"r\n\x13ListBackupsResponse\x12\x37\n\x07\x62\x61\x63kups\x18\x01 \x03(\x0b\x32&.yandex.cloud.mdb.mysql.v1alpha.Backup\x12\"\n\x0fnext_page_token\x18\x02 \x01(\tB\t\x8a\xc8\x31\x05<=1002\xbf\x02\n\rBackupService\x12\x93\x01\n\x03Get\x12\x30.yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest\x1a&.yandex.cloud.mdb.mysql.v1alpha.Backup\"2\x82\xd3\xe4\x93\x02,\x12*/managed-mysql/v1alpha/backups/{backup_id}\x12\x97\x01\n\x04List\x12\x32.yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest\x1a\x33.yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse\"&\x82\xd3\xe4\x93\x02 \x12\x1e/managed-mysql/v1alpha/backupsBn\n\"yandex.cloud.api.mdb.mysql.v1alphaZHgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/mysql/v1alpha;mysqlb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,yandex_dot_cloud_dot_validation__pb2.DESCRIPTOR,yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2.DESCRIPTOR,])
_GETBACKUPREQUEST = _descriptor.Descriptor(
name='GetBackupRequest',
full_name='yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='backup_id', full_name='yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest.backup_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\350\3071\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=193,
serialized_end=236,
)
_LISTBACKUPSREQUEST = _descriptor.Descriptor(
name='ListBackupsRequest',
full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='folder_id', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest.folder_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\350\3071\001\212\3101\004<=50'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_size', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest.page_size', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372\3071\006<=1000'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_token', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest.page_token', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\212\3101\005<=100'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=238,
serialized_end=353,
)
_LISTBACKUPSRESPONSE = _descriptor.Descriptor(
name='ListBackupsResponse',
full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='backups', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse.backups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\212\3101\005<=100'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=355,
serialized_end=469,
)
_LISTBACKUPSRESPONSE.fields_by_name['backups'].message_type = yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2._BACKUP
DESCRIPTOR.message_types_by_name['GetBackupRequest'] = _GETBACKUPREQUEST
DESCRIPTOR.message_types_by_name['ListBackupsRequest'] = _LISTBACKUPSREQUEST
DESCRIPTOR.message_types_by_name['ListBackupsResponse'] = _LISTBACKUPSRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetBackupRequest = _reflection.GeneratedProtocolMessageType('GetBackupRequest', (_message.Message,), {
'DESCRIPTOR' : _GETBACKUPREQUEST,
'__module__' : 'yandex.cloud.mdb.mysql.v1alpha.backup_service_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest)
})
_sym_db.RegisterMessage(GetBackupRequest)
ListBackupsRequest = _reflection.GeneratedProtocolMessageType('ListBackupsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTBACKUPSREQUEST,
'__module__' : 'yandex.cloud.mdb.mysql.v1alpha.backup_service_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest)
})
_sym_db.RegisterMessage(ListBackupsRequest)
ListBackupsResponse = _reflection.GeneratedProtocolMessageType('ListBackupsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTBACKUPSRESPONSE,
'__module__' : 'yandex.cloud.mdb.mysql.v1alpha.backup_service_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse)
})
_sym_db.RegisterMessage(ListBackupsResponse)
DESCRIPTOR._options = None
_GETBACKUPREQUEST.fields_by_name['backup_id']._options = None
_LISTBACKUPSREQUEST.fields_by_name['folder_id']._options = None
_LISTBACKUPSREQUEST.fields_by_name['page_size']._options = None
_LISTBACKUPSREQUEST.fields_by_name['page_token']._options = None
_LISTBACKUPSRESPONSE.fields_by_name['next_page_token']._options = None
_BACKUPSERVICE = _descriptor.ServiceDescriptor(
name='BackupService',
full_name='yandex.cloud.mdb.mysql.v1alpha.BackupService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=472,
serialized_end=791,
methods=[
_descriptor.MethodDescriptor(
name='Get',
full_name='yandex.cloud.mdb.mysql.v1alpha.BackupService.Get',
index=0,
containing_service=None,
input_type=_GETBACKUPREQUEST,
output_type=yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2._BACKUP,
serialized_options=_b('\202\323\344\223\002,\022*/managed-mysql/v1alpha/backups/{backup_id}'),
),
_descriptor.MethodDescriptor(
name='List',
full_name='yandex.cloud.mdb.mysql.v1alpha.BackupService.List',
index=1,
containing_service=None,
input_type=_LISTBACKUPSREQUEST,
output_type=_LISTBACKUPSRESPONSE,
serialized_options=_b('\202\323\344\223\002 \022\036/managed-mysql/v1alpha/backups'),
),
])
_sym_db.RegisterServiceDescriptor(_BACKUPSERVICE)
DESCRIPTOR.services_by_name['BackupService'] = _BACKUPSERVICE
# @@protoc_insertion_point(module_scope)
| 43.845794 | 1,281 | 0.775338 |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from yandex.cloud import validation_pb2 as yandex_dot_cloud_dot_validation__pb2
from yandex.cloud.mdb.mysql.v1alpha import backup_pb2 as yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='yandex/cloud/mdb/mysql/v1alpha/backup_service.proto',
package='yandex.cloud.mdb.mysql.v1alpha',
syntax='proto3',
serialized_options=_b('\n\"yandex.cloud.api.mdb.mysql.v1alphaZHgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/mysql/v1alpha;mysql'),
serialized_pb=_b('\n3yandex/cloud/mdb/mysql/v1alpha/backup_service.proto\x12\x1eyandex.cloud.mdb.mysql.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x1dyandex/cloud/validation.proto\x1a+yandex/cloud/mdb/mysql/v1alpha/backup.proto\"+\n\x10GetBackupRequest\x12\x17\n\tbackup_id\x18\x01 \x01(\tB\x04\xe8\xc7\x31\x01\"s\n\x12ListBackupsRequest\x12\x1f\n\tfolder_id\x18\x01 \x01(\tB\x0c\xe8\xc7\x31\x01\x8a\xc8\x31\x04<=50\x12\x1d\n\tpage_size\x18\x02 \x01(\x03\x42\n\xfa\xc7\x31\x06<=1000\x12\x1d\n\npage_token\x18\x03 \x01(\tB\t\x8a\xc8\x31\x05<=100\"r\n\x13ListBackupsResponse\x12\x37\n\x07\x62\x61\x63kups\x18\x01 \x03(\x0b\x32&.yandex.cloud.mdb.mysql.v1alpha.Backup\x12\"\n\x0fnext_page_token\x18\x02 \x01(\tB\t\x8a\xc8\x31\x05<=1002\xbf\x02\n\rBackupService\x12\x93\x01\n\x03Get\x12\x30.yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest\x1a&.yandex.cloud.mdb.mysql.v1alpha.Backup\"2\x82\xd3\xe4\x93\x02,\x12*/managed-mysql/v1alpha/backups/{backup_id}\x12\x97\x01\n\x04List\x12\x32.yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest\x1a\x33.yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse\"&\x82\xd3\xe4\x93\x02 \x12\x1e/managed-mysql/v1alpha/backupsBn\n\"yandex.cloud.api.mdb.mysql.v1alphaZHgithub.com/yandex-cloud/go-genproto/yandex/cloud/mdb/mysql/v1alpha;mysqlb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,yandex_dot_cloud_dot_validation__pb2.DESCRIPTOR,yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2.DESCRIPTOR,])
_GETBACKUPREQUEST = _descriptor.Descriptor(
name='GetBackupRequest',
full_name='yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='backup_id', full_name='yandex.cloud.mdb.mysql.v1alpha.GetBackupRequest.backup_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\350\3071\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=193,
serialized_end=236,
)
_LISTBACKUPSREQUEST = _descriptor.Descriptor(
name='ListBackupsRequest',
full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='folder_id', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest.folder_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\350\3071\001\212\3101\004<=50'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_size', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest.page_size', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372\3071\006<=1000'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_token', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsRequest.page_token', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\212\3101\005<=100'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=238,
serialized_end=353,
)
_LISTBACKUPSRESPONSE = _descriptor.Descriptor(
name='ListBackupsResponse',
full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='backups', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse.backups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='yandex.cloud.mdb.mysql.v1alpha.ListBackupsResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\212\3101\005<=100'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=355,
serialized_end=469,
)
_LISTBACKUPSRESPONSE.fields_by_name['backups'].message_type = yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2._BACKUP
DESCRIPTOR.message_types_by_name['GetBackupRequest'] = _GETBACKUPREQUEST
DESCRIPTOR.message_types_by_name['ListBackupsRequest'] = _LISTBACKUPSREQUEST
DESCRIPTOR.message_types_by_name['ListBackupsResponse'] = _LISTBACKUPSRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetBackupRequest = _reflection.GeneratedProtocolMessageType('GetBackupRequest', (_message.Message,), {
'DESCRIPTOR' : _GETBACKUPREQUEST,
'__module__' : 'yandex.cloud.mdb.mysql.v1alpha.backup_service_pb2'
})
_sym_db.RegisterMessage(GetBackupRequest)
ListBackupsRequest = _reflection.GeneratedProtocolMessageType('ListBackupsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTBACKUPSREQUEST,
'__module__' : 'yandex.cloud.mdb.mysql.v1alpha.backup_service_pb2'
})
_sym_db.RegisterMessage(ListBackupsRequest)
ListBackupsResponse = _reflection.GeneratedProtocolMessageType('ListBackupsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTBACKUPSRESPONSE,
'__module__' : 'yandex.cloud.mdb.mysql.v1alpha.backup_service_pb2'
})
_sym_db.RegisterMessage(ListBackupsResponse)
DESCRIPTOR._options = None
_GETBACKUPREQUEST.fields_by_name['backup_id']._options = None
_LISTBACKUPSREQUEST.fields_by_name['folder_id']._options = None
_LISTBACKUPSREQUEST.fields_by_name['page_size']._options = None
_LISTBACKUPSREQUEST.fields_by_name['page_token']._options = None
_LISTBACKUPSRESPONSE.fields_by_name['next_page_token']._options = None
_BACKUPSERVICE = _descriptor.ServiceDescriptor(
name='BackupService',
full_name='yandex.cloud.mdb.mysql.v1alpha.BackupService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=472,
serialized_end=791,
methods=[
_descriptor.MethodDescriptor(
name='Get',
full_name='yandex.cloud.mdb.mysql.v1alpha.BackupService.Get',
index=0,
containing_service=None,
input_type=_GETBACKUPREQUEST,
output_type=yandex_dot_cloud_dot_mdb_dot_mysql_dot_v1alpha_dot_backup__pb2._BACKUP,
serialized_options=_b('\202\323\344\223\002,\022*/managed-mysql/v1alpha/backups/{backup_id}'),
),
_descriptor.MethodDescriptor(
name='List',
full_name='yandex.cloud.mdb.mysql.v1alpha.BackupService.List',
index=1,
containing_service=None,
input_type=_LISTBACKUPSREQUEST,
output_type=_LISTBACKUPSRESPONSE,
serialized_options=_b('\202\323\344\223\002 \022\036/managed-mysql/v1alpha/backups'),
),
])
_sym_db.RegisterServiceDescriptor(_BACKUPSERVICE)
DESCRIPTOR.services_by_name['BackupService'] = _BACKUPSERVICE
| true | true |
f720ac0decc8999fbcd4cf2a426c611b2bce56d7 | 2,029 | py | Python | src/tm/tm2/tm2_meta/tmmelder.py | YouhuaLi/parsimony | d59fc49497c4c956c4a46f088adbbc65c40a3236 | [
"MIT"
] | null | null | null | src/tm/tm2/tm2_meta/tmmelder.py | YouhuaLi/parsimony | d59fc49497c4c956c4a46f088adbbc65c40a3236 | [
"MIT"
] | null | null | null | src/tm/tm2/tm2_meta/tmmelder.py | YouhuaLi/parsimony | d59fc49497c4c956c4a46f088adbbc65c40a3236 | [
"MIT"
] | null | null | null | # This program melds together two Turing machines;
# that is, if the first machine ends up in an "OUT" state,
# this program outputs a TM where the out state of the first machine
# is the start state of the second
import sys
import tmsim
def alphabetMSToTS():
return ["a", "b"]
def convertStatesToString(listOfStates, output):
numberOfStates = len(listOfStates)
output.write("States: " + str(numberOfStates) + "\n")
output.write("\n")
statesIveAlreadyPrinted = {}
for state in listOfStates:
try:
assert (not state.stateName in statesIveAlreadyPrinted)
except AssertionError:
print state.stateName
raise
statesIveAlreadyPrinted[state.stateName] = None
if state.isStartState:
output.write("START ")
output.write(state.stateName + ":\n")
for symbol in alphabetMSToTS():
output.write("\t" + symbol + " -> " + state.getNextStateName(symbol) + "; " + \
state.getHeadMove(symbol) + "; " + state.getWrite(symbol) + "\n")
output.write("\n")
if __name__ == "__main__":
inMachineName = sys.argv[1]
outMachineName = sys.argv[2]
try:
assert inMachineName != outMachineName
except:
print "Error: cannot meld two machines that have the same name."
raise
inMachine = tmsim.SingleTapeTuringMachine("../tm2_files/" + sys.argv[1] + ".tm2", \
alphabetMSToTS())
outMachine = tmsim.SingleTapeTuringMachine("../tm2_files/" + sys.argv[2] + ".tm2", \
alphabetMSToTS())
for state in inMachine.listOfRealStates:
for symbol in alphabetMSToTS():
nextState = state.getNextState(symbol)
if nextState.stateName == "OUT":
state.setNextState(symbol, outMachine.startState)
for state in outMachine.listOfRealStates:
state.isStartState = False
convertStatesToString(inMachine.listOfRealStates + outMachine.listOfRealStates, \
open("../tm2_files/" + sys.argv[3] + ".tm2", "w")) | 30.742424 | 91 | 0.643174 |
import sys
import tmsim
def alphabetMSToTS():
return ["a", "b"]
def convertStatesToString(listOfStates, output):
numberOfStates = len(listOfStates)
output.write("States: " + str(numberOfStates) + "\n")
output.write("\n")
statesIveAlreadyPrinted = {}
for state in listOfStates:
try:
assert (not state.stateName in statesIveAlreadyPrinted)
except AssertionError:
print state.stateName
raise
statesIveAlreadyPrinted[state.stateName] = None
if state.isStartState:
output.write("START ")
output.write(state.stateName + ":\n")
for symbol in alphabetMSToTS():
output.write("\t" + symbol + " -> " + state.getNextStateName(symbol) + "; " + \
state.getHeadMove(symbol) + "; " + state.getWrite(symbol) + "\n")
output.write("\n")
if __name__ == "__main__":
inMachineName = sys.argv[1]
outMachineName = sys.argv[2]
try:
assert inMachineName != outMachineName
except:
print "Error: cannot meld two machines that have the same name."
raise
inMachine = tmsim.SingleTapeTuringMachine("../tm2_files/" + sys.argv[1] + ".tm2", \
alphabetMSToTS())
outMachine = tmsim.SingleTapeTuringMachine("../tm2_files/" + sys.argv[2] + ".tm2", \
alphabetMSToTS())
for state in inMachine.listOfRealStates:
for symbol in alphabetMSToTS():
nextState = state.getNextState(symbol)
if nextState.stateName == "OUT":
state.setNextState(symbol, outMachine.startState)
for state in outMachine.listOfRealStates:
state.isStartState = False
convertStatesToString(inMachine.listOfRealStates + outMachine.listOfRealStates, \
open("../tm2_files/" + sys.argv[3] + ".tm2", "w")) | false | true |
f720ac11c40ba6b8dd0d3806ca655474e9e8841f | 344 | py | Python | convert/_3D/to/_1D.py | flew-software/Dem | 20b7eb9bc7c11f1baf23acfe7bfbab359ddd97fb | [
"MIT"
] | 1 | 2021-02-17T08:30:05.000Z | 2021-02-17T08:30:05.000Z | convert/_3D/to/_1D.py | flew-software/Dem | 20b7eb9bc7c11f1baf23acfe7bfbab359ddd97fb | [
"MIT"
] | null | null | null | convert/_3D/to/_1D.py | flew-software/Dem | 20b7eb9bc7c11f1baf23acfe7bfbab359ddd97fb | [
"MIT"
] | null | null | null | def row_major(l: list) -> tuple[list, int]:
""" converts a 2d list to a 1d list using row major algorithm and returns a 1d list and row count """
out = []
i = 0
while i < len(l):
ii = 0
a = l[i]
while ii < len(a):
out.append(a[ii])
ii += 1
i += 1
return out, len(l)
| 22.933333 | 105 | 0.479651 | def row_major(l: list) -> tuple[list, int]:
out = []
i = 0
while i < len(l):
ii = 0
a = l[i]
while ii < len(a):
out.append(a[ii])
ii += 1
i += 1
return out, len(l)
| true | true |
f720ae0b4dc5919f8c14b48866a4d15a378b186e | 2,887 | py | Python | EDSR/common.py | NateLol/BAM_A_lightweight_but_efficient_Balanced_attention_mechanism_for_super_resolution | f23c043c6cd5c064e58b6b11bd7100fc55224702 | [
"MIT"
] | 33 | 2021-04-30T02:40:05.000Z | 2022-03-09T09:35:49.000Z | EDSR/common.py | chisyliu/BAM_A_lightweight_but_efficient_Balanced_attention_mechanism_for_super_resolution | 4c977ea1586e7836248acb5cbd648e124b43aca3 | [
"MIT"
] | 6 | 2021-05-10T23:19:35.000Z | 2021-12-13T02:13:16.000Z | EDSR/common.py | chisyliu/BAM_A_lightweight_but_efficient_Balanced_attention_mechanism_for_super_resolution | 4c977ea1586e7836248acb5cbd648e124b43aca3 | [
"MIT"
] | 13 | 2021-05-18T12:21:48.000Z | 2022-01-21T07:17:19.000Z | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
def default_conv(in_channels, out_channels, kernel_size, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), bias=bias)
class MeanShift(nn.Conv2d):
def __init__(self, rgb_range, rgb_mean=(0.4488, 0.4371, 0.4040), rgb_std=(1.0, 1.0, 1.0), sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1)
self.weight.data.div_(std.view(3, 1, 1, 1))
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean)
self.bias.data.div_(std)
self.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, in_channels, out_channels, kernel_size, stride=1, bias=False,
bn=True, act=nn.ReLU(True)):
m = [nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), stride=stride, bias=bias)
]
if bn: m.append(nn.BatchNorm2d(out_channels))
if act is not None: m.append(act)
super(BasicBlock, self).__init__(*m)
class ResBlock(nn.Module):
def __init__(
self, conv, n_feats, kernel_size,
bias=True, bn=False, act=nn.ReLU(True), res_scale=1):
super(ResBlock, self).__init__()
m = []
for i in range(2):
m.append(conv(n_feats, n_feats, kernel_size, bias=bias))
if bn: m.append(nn.BatchNorm2d(n_feats))
if i == 0: m.append(act)
self.body = nn.Sequential(*m)
self.res_scale = res_scale
def forward(self, x):
res = self.body(x).mul(self.res_scale)
res += x
return res
class Upsampler(nn.Sequential):
def __init__(self, conv, scale, n_feats, bn=False, act=False, bias=True):
m = []
if (scale & (scale - 1)) == 0: # Is scale = 2^n?
for _ in range(int(math.log(scale, 2))):
m.append(conv(n_feats, 4 * n_feats, 3, bias))
m.append(nn.PixelShuffle(2))
if bn: m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
elif scale == 3:
m.append(conv(n_feats, 9 * n_feats, 3, bias))
m.append(nn.PixelShuffle(3))
if bn: m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
else:
raise NotImplementedError
super(Upsampler, self).__init__(*m) | 33.569767 | 104 | 0.55594 | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
def default_conv(in_channels, out_channels, kernel_size, bias=True):
return nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), bias=bias)
class MeanShift(nn.Conv2d):
def __init__(self, rgb_range, rgb_mean=(0.4488, 0.4371, 0.4040), rgb_std=(1.0, 1.0, 1.0), sign=-1):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = torch.eye(3).view(3, 3, 1, 1)
self.weight.data.div_(std.view(3, 1, 1, 1))
self.bias.data = sign * rgb_range * torch.Tensor(rgb_mean)
self.bias.data.div_(std)
self.requires_grad = False
class BasicBlock(nn.Sequential):
def __init__(
self, in_channels, out_channels, kernel_size, stride=1, bias=False,
bn=True, act=nn.ReLU(True)):
m = [nn.Conv2d(
in_channels, out_channels, kernel_size,
padding=(kernel_size//2), stride=stride, bias=bias)
]
if bn: m.append(nn.BatchNorm2d(out_channels))
if act is not None: m.append(act)
super(BasicBlock, self).__init__(*m)
class ResBlock(nn.Module):
def __init__(
self, conv, n_feats, kernel_size,
bias=True, bn=False, act=nn.ReLU(True), res_scale=1):
super(ResBlock, self).__init__()
m = []
for i in range(2):
m.append(conv(n_feats, n_feats, kernel_size, bias=bias))
if bn: m.append(nn.BatchNorm2d(n_feats))
if i == 0: m.append(act)
self.body = nn.Sequential(*m)
self.res_scale = res_scale
def forward(self, x):
res = self.body(x).mul(self.res_scale)
res += x
return res
class Upsampler(nn.Sequential):
def __init__(self, conv, scale, n_feats, bn=False, act=False, bias=True):
m = []
if (scale & (scale - 1)) == 0:
for _ in range(int(math.log(scale, 2))):
m.append(conv(n_feats, 4 * n_feats, 3, bias))
m.append(nn.PixelShuffle(2))
if bn: m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
elif scale == 3:
m.append(conv(n_feats, 9 * n_feats, 3, bias))
m.append(nn.PixelShuffle(3))
if bn: m.append(nn.BatchNorm2d(n_feats))
if act == 'relu':
m.append(nn.ReLU(True))
elif act == 'prelu':
m.append(nn.PReLU(n_feats))
else:
raise NotImplementedError
super(Upsampler, self).__init__(*m) | true | true |
f720ae8b8a42176cee5c72888875e04ea9be0096 | 749 | py | Python | tests/hooks.py | j-mechacorta/atoolbox | 900ad665f463d16911982dfadab7015cb95aa5ca | [
"MIT"
] | null | null | null | tests/hooks.py | j-mechacorta/atoolbox | 900ad665f463d16911982dfadab7015cb95aa5ca | [
"MIT"
] | null | null | null | tests/hooks.py | j-mechacorta/atoolbox | 900ad665f463d16911982dfadab7015cb95aa5ca | [
"MIT"
] | null | null | null | import os
from os.path import dirname as _dir
import logging
def get_logger(name):
return logging.getLogger('conftest.%s' % name)
def pytest_sessionstart(session):
BASE_FORMAT = "[%(name)s][%(levelname)-6s] %(message)s"
FILE_FORMAT = "[%(asctime)s]" + BASE_FORMAT
root_logger = logging.getLogger('conftest')
dir_path = os.path.dirname(os.path.realpath(__file__))
top_level = _dir(_dir(dir_path))
log_file = os.path.join(top_level, 'pytest-functional-tests.log')
print(log_file)
root_logger.setLevel(logging.INFO)
# File Logger
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter(FILE_FORMAT, "%Y-%m-%d %H:%M:%S"))
root_logger.addHandler(fh)
| 26.75 | 72 | 0.698264 | import os
from os.path import dirname as _dir
import logging
def get_logger(name):
return logging.getLogger('conftest.%s' % name)
def pytest_sessionstart(session):
BASE_FORMAT = "[%(name)s][%(levelname)-6s] %(message)s"
FILE_FORMAT = "[%(asctime)s]" + BASE_FORMAT
root_logger = logging.getLogger('conftest')
dir_path = os.path.dirname(os.path.realpath(__file__))
top_level = _dir(_dir(dir_path))
log_file = os.path.join(top_level, 'pytest-functional-tests.log')
print(log_file)
root_logger.setLevel(logging.INFO)
fh = logging.FileHandler(log_file)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter(FILE_FORMAT, "%Y-%m-%d %H:%M:%S"))
root_logger.addHandler(fh)
| true | true |
f720ae996883f8afdc19851c7b8222b960cb4d67 | 389 | py | Python | python-ds-practice/10_frequency/frequency.py | MostFunGuy/SpringboardProjectsPublic | bbda3ba26ecf8a09e62df81583122cae83acc1e6 | [
"MIT"
] | null | null | null | python-ds-practice/10_frequency/frequency.py | MostFunGuy/SpringboardProjectsPublic | bbda3ba26ecf8a09e62df81583122cae83acc1e6 | [
"MIT"
] | null | null | null | python-ds-practice/10_frequency/frequency.py | MostFunGuy/SpringboardProjectsPublic | bbda3ba26ecf8a09e62df81583122cae83acc1e6 | [
"MIT"
] | null | null | null | def frequency(lst, search_term):
"""Return frequency of term in lst.
>>> frequency([1, 4, 3, 4, 4], 4)
3
>>> frequency([1, 4, 3], 7)
0
"""
return lst.count(search_term)
print(F"frequency.py: frequency([1, 4, 3, 4, 4], 4) = `3` = {frequency([1, 4, 3, 4, 4], 4)}")
print(F"frequency.py: frequency([1, 4, 3], 7) = `0` = {frequency([1, 4, 3], 7)}") | 35.363636 | 93 | 0.511568 | def frequency(lst, search_term):
return lst.count(search_term)
print(F"frequency.py: frequency([1, 4, 3, 4, 4], 4) = `3` = {frequency([1, 4, 3, 4, 4], 4)}")
print(F"frequency.py: frequency([1, 4, 3], 7) = `0` = {frequency([1, 4, 3], 7)}") | true | true |
f720aeded9d52c0f3f6082dcb150a7020df5a4fb | 107 | py | Python | models/__init__.py | Abdulah-Fawaz/Benchmarking-Surface-DL | 9693379f26d57f9aabf28b973f40a9f6f627d26f | [
"MIT"
] | 2 | 2021-12-04T07:04:56.000Z | 2021-12-13T16:28:50.000Z | models/__init__.py | Abdulah-Fawaz/Benchmarking-Surface-DL | 9693379f26d57f9aabf28b973f40a9f6f627d26f | [
"MIT"
] | 1 | 2021-12-21T09:36:11.000Z | 2022-01-25T10:26:43.000Z | models/__init__.py | Abdulah-Fawaz/Benchmarking-Surface-DL | 9693379f26d57f9aabf28b973f40a9f6f627d26f | [
"MIT"
] | 1 | 2022-02-27T17:38:19.000Z | 2022-02-27T17:38:19.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 29 10:29:06 2020
@author: fa19
"""
| 11.888889 | 35 | 0.588785 | true | true | |
f720af0f5626dfcd589a2242cb387ffca059e40a | 7,100 | py | Python | querybook/server/models/admin.py | czgu/querybook | fb3120245cd9693b7aa67bf0f08d427fd2dde74b | [
"Apache-2.0"
] | 1,144 | 2021-03-30T05:06:16.000Z | 2022-03-31T10:40:31.000Z | querybook/server/models/admin.py | czgu/querybook | fb3120245cd9693b7aa67bf0f08d427fd2dde74b | [
"Apache-2.0"
] | 100 | 2021-03-30T19:43:45.000Z | 2022-03-25T17:29:32.000Z | querybook/server/models/admin.py | czgu/querybook | fb3120245cd9693b7aa67bf0f08d427fd2dde74b | [
"Apache-2.0"
] | 113 | 2021-03-30T00:07:20.000Z | 2022-03-31T07:18:43.000Z | import sqlalchemy as sql
from sqlalchemy.orm import relationship, backref
from app import db
from const.admin import AdminOperation
from const.db import (
name_length,
now,
description_length,
# mediumtext_length,
# text_length
)
from lib.sqlalchemy import CRUDMixin
Base = db.Base
class Announcement(CRUDMixin, Base):
__tablename__ = "announcements"
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8mb4"}
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now)
updated_at = sql.Column(sql.DateTime, default=now)
uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
message = sql.Column(sql.String(length=description_length))
url_regex = sql.Column(sql.String(length=name_length))
can_dismiss = sql.Column(sql.Boolean, default=True)
active_from = sql.Column(sql.Date)
active_till = sql.Column(sql.Date)
def to_dict(self):
return {
"id": self.id,
"message": self.message,
"url_regex": self.url_regex,
"can_dismiss": self.can_dismiss,
}
def to_dict_admin(self):
return {
"id": self.id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"message": self.message,
"uid": self.uid,
"url_regex": self.url_regex,
"can_dismiss": self.can_dismiss,
"active_from": self.active_from,
"active_till": self.active_till,
}
class QueryEngineEnvironment(CRUDMixin, Base):
__tablename__ = "query_engine_environment"
__table_args__ = (
sql.UniqueConstraint(
"query_engine_id", "environment_id", name="unique_query_engine_environment"
),
)
id = sql.Column(sql.Integer, primary_key=True, autoincrement=True)
query_engine_id = sql.Column(
sql.Integer,
sql.ForeignKey("query_engine.id", ondelete="CASCADE"),
nullable=False,
)
environment_id = sql.Column(
sql.Integer,
sql.ForeignKey("environment.id", ondelete="CASCADE"),
nullable=False,
)
engine_order = sql.Column(sql.Integer, nullable=False)
class QueryEngine(CRUDMixin, Base):
__tablename__ = "query_engine"
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now)
updated_at = sql.Column(sql.DateTime, default=now)
deleted_at = sql.Column(sql.DateTime)
name = sql.Column(sql.String(length=name_length), unique=True, nullable=False)
description = sql.Column(sql.String(length=name_length))
language = sql.Column(sql.String(length=name_length), nullable=False)
executor = sql.Column(sql.String(length=name_length), nullable=False)
status_checker = sql.Column(sql.String(length=name_length))
# JSON field
executor_params = sql.Column(sql.JSON)
control_params = sql.Column(sql.JSON, default={}, nullable=False)
metastore_id = sql.Column(
sql.Integer, sql.ForeignKey("query_metastore.id", ondelete="SET NULL")
)
metastore = relationship("QueryMetastore", backref="query_engine")
environments = relationship(
"Environment",
secondary="query_engine_environment",
backref=backref(
"query_engines", order_by="QueryEngineEnvironment.engine_order"
),
)
def to_dict(self):
# IMPORTANT: do not expose executor params unless it is for admin
return {
"id": self.id,
"name": self.name,
"language": self.language,
"description": self.description,
"metastore_id": self.metastore_id,
"executor": self.executor,
}
def to_dict_admin(self):
# THIS API IS FOR ADMIN USAGE
return {
"id": self.id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"deleted_at": self.deleted_at,
"name": self.name,
"language": self.language,
"description": self.description,
"metastore_id": self.metastore_id,
"executor": self.executor,
"executor_params": self.get_engine_params(),
"control_params": self.control_params,
"status_checker": self.status_checker,
"environments": self.environments,
}
def get_engine_params(self):
return self.executor_params
class QueryMetastore(CRUDMixin, Base):
__tablename__ = "query_metastore"
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now)
updated_at = sql.Column(sql.DateTime, default=now)
deleted_at = sql.Column(sql.DateTime)
name = sql.Column(sql.String(length=name_length), unique=True, nullable=False)
# Comma separated hive metastore urls
loader = sql.Column(sql.String(length=128), nullable=False)
metastore_params = sql.Column(sql.JSON)
acl_control = sql.Column(sql.JSON, default={}, nullable=False)
def to_dict(self):
return {"id": self.id, "name": self.name}
def to_dict_admin(self):
return {
"id": self.id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"deleted_at": self.deleted_at,
"name": self.name,
"loader": self.loader,
"metastore_params": self.metastore_params,
"acl_control": self.acl_control,
}
class APIAccessToken(CRUDMixin, Base):
__tablename__ = "api_access_token"
id = sql.Column(sql.Integer, primary_key=True)
token = sql.Column(sql.String(length=128), unique=True, nullable=False)
description = sql.Column(sql.String(length=description_length))
enabled = sql.Column(sql.Boolean, default=True)
created_at = sql.Column(sql.DateTime, default=now)
creator_uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
updated_at = sql.Column(sql.DateTime, default=now)
updater_uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
def to_dict(self):
return {
"id": self.id,
"description": self.description,
"enabled": self.enabled,
"created_at": self.created_at,
"creator_uid": self.creator_uid,
"updated_at": self.updated_at,
"updater_uid": self.updater_uid,
}
class AdminAuditLog(CRUDMixin, Base):
__tablename__ = "admin_audit_log"
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now, nullable=False)
uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
item_type = sql.Column(sql.String(length=name_length), nullable=False, index=True)
item_id = sql.Column(sql.Integer, nullable=False, index=True)
op = sql.Column(sql.Enum(AdminOperation), nullable=False)
log = sql.Column(sql.String(length=description_length))
user = relationship("User", uselist=False)
| 33.809524 | 88 | 0.647183 | import sqlalchemy as sql
from sqlalchemy.orm import relationship, backref
from app import db
from const.admin import AdminOperation
from const.db import (
name_length,
now,
description_length,
)
from lib.sqlalchemy import CRUDMixin
Base = db.Base
class Announcement(CRUDMixin, Base):
__tablename__ = "announcements"
__table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8mb4"}
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now)
updated_at = sql.Column(sql.DateTime, default=now)
uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
message = sql.Column(sql.String(length=description_length))
url_regex = sql.Column(sql.String(length=name_length))
can_dismiss = sql.Column(sql.Boolean, default=True)
active_from = sql.Column(sql.Date)
active_till = sql.Column(sql.Date)
def to_dict(self):
return {
"id": self.id,
"message": self.message,
"url_regex": self.url_regex,
"can_dismiss": self.can_dismiss,
}
def to_dict_admin(self):
return {
"id": self.id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"message": self.message,
"uid": self.uid,
"url_regex": self.url_regex,
"can_dismiss": self.can_dismiss,
"active_from": self.active_from,
"active_till": self.active_till,
}
class QueryEngineEnvironment(CRUDMixin, Base):
__tablename__ = "query_engine_environment"
__table_args__ = (
sql.UniqueConstraint(
"query_engine_id", "environment_id", name="unique_query_engine_environment"
),
)
id = sql.Column(sql.Integer, primary_key=True, autoincrement=True)
query_engine_id = sql.Column(
sql.Integer,
sql.ForeignKey("query_engine.id", ondelete="CASCADE"),
nullable=False,
)
environment_id = sql.Column(
sql.Integer,
sql.ForeignKey("environment.id", ondelete="CASCADE"),
nullable=False,
)
engine_order = sql.Column(sql.Integer, nullable=False)
class QueryEngine(CRUDMixin, Base):
__tablename__ = "query_engine"
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now)
updated_at = sql.Column(sql.DateTime, default=now)
deleted_at = sql.Column(sql.DateTime)
name = sql.Column(sql.String(length=name_length), unique=True, nullable=False)
description = sql.Column(sql.String(length=name_length))
language = sql.Column(sql.String(length=name_length), nullable=False)
executor = sql.Column(sql.String(length=name_length), nullable=False)
status_checker = sql.Column(sql.String(length=name_length))
executor_params = sql.Column(sql.JSON)
control_params = sql.Column(sql.JSON, default={}, nullable=False)
metastore_id = sql.Column(
sql.Integer, sql.ForeignKey("query_metastore.id", ondelete="SET NULL")
)
metastore = relationship("QueryMetastore", backref="query_engine")
environments = relationship(
"Environment",
secondary="query_engine_environment",
backref=backref(
"query_engines", order_by="QueryEngineEnvironment.engine_order"
),
)
def to_dict(self):
return {
"id": self.id,
"name": self.name,
"language": self.language,
"description": self.description,
"metastore_id": self.metastore_id,
"executor": self.executor,
}
def to_dict_admin(self):
return {
"id": self.id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"deleted_at": self.deleted_at,
"name": self.name,
"language": self.language,
"description": self.description,
"metastore_id": self.metastore_id,
"executor": self.executor,
"executor_params": self.get_engine_params(),
"control_params": self.control_params,
"status_checker": self.status_checker,
"environments": self.environments,
}
def get_engine_params(self):
return self.executor_params
class QueryMetastore(CRUDMixin, Base):
__tablename__ = "query_metastore"
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now)
updated_at = sql.Column(sql.DateTime, default=now)
deleted_at = sql.Column(sql.DateTime)
name = sql.Column(sql.String(length=name_length), unique=True, nullable=False)
loader = sql.Column(sql.String(length=128), nullable=False)
metastore_params = sql.Column(sql.JSON)
acl_control = sql.Column(sql.JSON, default={}, nullable=False)
def to_dict(self):
return {"id": self.id, "name": self.name}
def to_dict_admin(self):
return {
"id": self.id,
"created_at": self.created_at,
"updated_at": self.updated_at,
"deleted_at": self.deleted_at,
"name": self.name,
"loader": self.loader,
"metastore_params": self.metastore_params,
"acl_control": self.acl_control,
}
class APIAccessToken(CRUDMixin, Base):
__tablename__ = "api_access_token"
id = sql.Column(sql.Integer, primary_key=True)
token = sql.Column(sql.String(length=128), unique=True, nullable=False)
description = sql.Column(sql.String(length=description_length))
enabled = sql.Column(sql.Boolean, default=True)
created_at = sql.Column(sql.DateTime, default=now)
creator_uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
updated_at = sql.Column(sql.DateTime, default=now)
updater_uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
def to_dict(self):
return {
"id": self.id,
"description": self.description,
"enabled": self.enabled,
"created_at": self.created_at,
"creator_uid": self.creator_uid,
"updated_at": self.updated_at,
"updater_uid": self.updater_uid,
}
class AdminAuditLog(CRUDMixin, Base):
__tablename__ = "admin_audit_log"
id = sql.Column(sql.Integer, primary_key=True)
created_at = sql.Column(sql.DateTime, default=now, nullable=False)
uid = sql.Column(sql.Integer, sql.ForeignKey("user.id", ondelete="CASCADE"))
item_type = sql.Column(sql.String(length=name_length), nullable=False, index=True)
item_id = sql.Column(sql.Integer, nullable=False, index=True)
op = sql.Column(sql.Enum(AdminOperation), nullable=False)
log = sql.Column(sql.String(length=description_length))
user = relationship("User", uselist=False)
| true | true |
f720af70da7be7958d444ad2af3d0b7e0b2ef072 | 601 | py | Python | basicsortings/SelectionSort.py | ankushdecoded123/basicalgorithms | f8d42a57d7619ddb29fd6eae9e5f2db27ee5712c | [
"Apache-2.0"
] | null | null | null | basicsortings/SelectionSort.py | ankushdecoded123/basicalgorithms | f8d42a57d7619ddb29fd6eae9e5f2db27ee5712c | [
"Apache-2.0"
] | null | null | null | basicsortings/SelectionSort.py | ankushdecoded123/basicalgorithms | f8d42a57d7619ddb29fd6eae9e5f2db27ee5712c | [
"Apache-2.0"
] | null | null | null | # selectionsort() method
def selectionSort(arr):
arraySize = len(arr)
for i in range(arraySize):
min = i
for j in range(i+1, arraySize):
if arr[j] < arr[min]:
min = j
#swap values
arr[i], arr[min] = arr[min], arr[i]
# method to print an array
def printList(arr):
for i in range(len(arr)):
print(arr[i],end=" ")
print("\n")
# driver method
if __name__ == '__main__':
arr = [3,4,1,7,6,2,8]
print ("Given array: ", end="\n")
printList(arr)
selectionSort(arr)
print("Sorted array: ", end="\n")
printList(arr) | 20.033333 | 39 | 0.55574 |
def selectionSort(arr):
arraySize = len(arr)
for i in range(arraySize):
min = i
for j in range(i+1, arraySize):
if arr[j] < arr[min]:
min = j
arr[i], arr[min] = arr[min], arr[i]
def printList(arr):
for i in range(len(arr)):
print(arr[i],end=" ")
print("\n")
if __name__ == '__main__':
arr = [3,4,1,7,6,2,8]
print ("Given array: ", end="\n")
printList(arr)
selectionSort(arr)
print("Sorted array: ", end="\n")
printList(arr) | true | true |
f720b19536007d90852dfc1229d07fda01236456 | 2,189 | py | Python | functions/sample/python/main.py | aneeshmraj/agfzb-CloudAppDevelopment_Capstone | ed9b1a675a0c4325e56bf77ed4497a36d1755484 | [
"Apache-2.0"
] | null | null | null | functions/sample/python/main.py | aneeshmraj/agfzb-CloudAppDevelopment_Capstone | ed9b1a675a0c4325e56bf77ed4497a36d1755484 | [
"Apache-2.0"
] | null | null | null | functions/sample/python/main.py | aneeshmraj/agfzb-CloudAppDevelopment_Capstone | ed9b1a675a0c4325e56bf77ed4497a36d1755484 | [
"Apache-2.0"
] | null | null | null | #
#
# main() will be run when you invoke this action
#
# @param Cloud Functions actions accept a single parameter, which must be a JSON object.
#
# @return The output of this action, which must be a JSON object.
#
#
from cloudant.client import Cloudant
from cloudant.error import CloudantException
from cloudant.query import Query
from requests import ConnectionError, ReadTimeout, RequestException
import requests
import sys
def main(dict):
print(dict)
service = Cloudant.iam(None, dict["IAM_API_KEY"], url=dict["COUCH_URL"], connect=True)
db = service['reviews']
try:
selector = {'dealership': {'$eq':int(dict["dealerId"])}}
docs = db.get_query_result(selector)
reviews = []
for doc in docs:
reviews.append(doc)
return {"docs":reviews}
except CloudantException as ce:
print("Method failed")
print(" - status code: " + str(ce.code))
print(" - error message: " + ce.message)
except ConnectionError as cerr:
print("Connection error occurred:")
print(cerr)
except ReadTimeout as rt:
# The server did not send any data in the allotted amount of time.
print("Read timed out:")
print(rt)
except RequestException as re:
# Handle other request failures
print("Request Exception:")
print(re)
#add review
def main1(dict):
print(dict)
service = Cloudant.iam(None, dict["IAM_API_KEY"], url=dict["COUCH_URL"], connect=True)
db = service['reviews']
try:
# Create a document using the Database API
my_document = db.create_document(dict["review"])
# Check that the document exists in the database
if my_document.exists():
return {"text": "Review successfully added."}
except ConnectionError as cerr:
print("Connection error occurred:")
print(cerr)
except ReadTimeout as rt:
# The server did not send any data in the allotted amount of time.
print("Read timed out:")
print(rt)
except RequestException as re:
# Handle other request failures
print("Request Exception:")
print(re)
| 31.724638 | 90 | 0.643216 |
from cloudant.client import Cloudant
from cloudant.error import CloudantException
from cloudant.query import Query
from requests import ConnectionError, ReadTimeout, RequestException
import requests
import sys
def main(dict):
print(dict)
service = Cloudant.iam(None, dict["IAM_API_KEY"], url=dict["COUCH_URL"], connect=True)
db = service['reviews']
try:
selector = {'dealership': {'$eq':int(dict["dealerId"])}}
docs = db.get_query_result(selector)
reviews = []
for doc in docs:
reviews.append(doc)
return {"docs":reviews}
except CloudantException as ce:
print("Method failed")
print(" - status code: " + str(ce.code))
print(" - error message: " + ce.message)
except ConnectionError as cerr:
print("Connection error occurred:")
print(cerr)
except ReadTimeout as rt:
print("Read timed out:")
print(rt)
except RequestException as re:
print("Request Exception:")
print(re)
def main1(dict):
print(dict)
service = Cloudant.iam(None, dict["IAM_API_KEY"], url=dict["COUCH_URL"], connect=True)
db = service['reviews']
try:
my_document = db.create_document(dict["review"])
if my_document.exists():
return {"text": "Review successfully added."}
except ConnectionError as cerr:
print("Connection error occurred:")
print(cerr)
except ReadTimeout as rt:
print("Read timed out:")
print(rt)
except RequestException as re:
print("Request Exception:")
print(re)
| true | true |
f720b26349b04b5e0459f3b75168c72fe5c3ff77 | 5,299 | py | Python | src/OFS/tests/test_Uninstalled.py | rbanffy/Zope | ecf6770219052e7c7f8c9634ddf187a1e6280742 | [
"ZPL-2.1"
] | null | null | null | src/OFS/tests/test_Uninstalled.py | rbanffy/Zope | ecf6770219052e7c7f8c9634ddf187a1e6280742 | [
"ZPL-2.1"
] | 1 | 2020-11-11T07:11:31.000Z | 2020-11-11T07:11:31.000Z | src/OFS/tests/test_Uninstalled.py | rbanffy/Zope | ecf6770219052e7c7f8c9634ddf187a1e6280742 | [
"ZPL-2.1"
] | null | null | null | ##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
from OFS.SimpleItem import SimpleItem
from Testing.ZopeTestCase import base
class ToBreak(SimpleItem):
pass
class TestsOfBroken(unittest.TestCase):
"""Tests for the factory for "broken" classes.
"""
def setUp(self):
from OFS.Uninstalled import broken_klasses
from OFS.Uninstalled import broken_klasses_lock
self.broken_klasses_OLD = {}
broken_klasses_lock.acquire()
try:
self.broken_klasses_OLD.update(broken_klasses)
broken_klasses.clear()
finally:
broken_klasses_lock.release()
def tearDown(self):
from OFS.Uninstalled import broken_klasses
from OFS.Uninstalled import broken_klasses_lock
broken_klasses_lock.acquire()
try:
broken_klasses.clear()
broken_klasses.update(self.broken_klasses_OLD)
finally:
broken_klasses_lock.release()
def test_Broken_non_product_no_oid_yields_class_derived_from_Broken(self):
from OFS.Uninstalled import Broken
from OFS.Uninstalled import BrokenClass
klass = Broken(self, None, ('some.python.module', 'MyClass'))
self.assertTrue(issubclass(klass, BrokenClass))
self.assertEqual(klass.__name__, 'MyClass')
self.assertEqual(klass.__module__, 'some.python.module')
self.assertEqual(klass.product_name, 'unknown')
def test_Broken_product_no_oid_yields_class_derived_from_Broken(self):
from OFS.Uninstalled import Broken
from OFS.Uninstalled import BrokenClass
klass = Broken(self, None, ('Products.MyProduct.MyClass', 'MyClass'))
self.assertTrue(issubclass(klass, BrokenClass))
self.assertEqual(klass.__name__, 'MyClass')
self.assertEqual(klass.__module__, 'Products.MyProduct.MyClass')
self.assertEqual(klass.product_name, 'MyProduct')
def test_Broken_product_with_oid_yields_instance_derived_from_Broken(self):
from OFS.Uninstalled import Broken
from OFS.Uninstalled import BrokenClass
OID = '\x01' * 8
inst = Broken(self, OID, ('Products.MyProduct.MyClass', 'MyClass'))
self.assertIsInstance(inst, BrokenClass)
self.assertTrue(inst._p_jar is self)
self.assertEqual(inst._p_oid, OID)
klass = inst.__class__
self.assertEqual(klass.__name__, 'MyClass')
self.assertEqual(klass.__module__, 'Products.MyProduct.MyClass')
self.assertEqual(klass.product_name, 'MyProduct')
def test_Broken_instance___getattr___allows_persistence_attrs(self):
from OFS.Uninstalled import Broken
OID = '\x01' * 8
PERSISTENCE_ATTRS = ["_p_changed",
"_p_jar",
"_p_mtime",
"_p_oid",
"_p_serial",
"_p_state"]
PERSISTENCE_METHODS = ["_p_deactivate",
"_p_activate",
"_p_invalidate",
"_p_getattr",
"_p_setattr",
"_p_delattr"]
inst = Broken(self, OID, ('Products.MyProduct.MyClass', 'MyClass'))
for attr_name in PERSISTENCE_ATTRS:
getattr(inst, attr_name) # doesn't raise
for meth_name in PERSISTENCE_METHODS:
getattr(inst, meth_name) # doesn't raise
class TestsIntegratedBroken(base.TestCase):
def test_Broken_instance___getstate___gives_access_to_its_state(self):
from Acquisition import aq_base
from OFS.Uninstalled import BrokenClass
from OFS.tests import test_Uninstalled
import transaction
# store an instance
tr = ToBreak()
tr.id = 'tr'
self.app._setObject('tr', tr)
# commit to allow access in another connection
transaction.commit()
# remove class from namespace to ensure broken object
del test_Uninstalled.ToBreak
# get new connection that will give access to broken object
app = base.app()
inst = aq_base(app.tr)
self.assertIsInstance(inst, BrokenClass)
state = inst.__getstate__()
self.assertEqual(state, {'id': 'tr'})
# cleanup
app.manage_delObjects('tr')
transaction.commit()
# check that object is not left over
app = base.app()
self.assertFalse('tr' in app.objectIds())
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestsOfBroken))
suite.addTest(unittest.makeSuite(TestsIntegratedBroken))
return suite
| 35.804054 | 79 | 0.627854 | true | true | |
f720b26c972d7bdb64501599e3be9253fa24774d | 18,722 | py | Python | python/ray/ml/tests/test_preprocessors.py | siddgoel/ray | 7f3031f451de410b71a5fcb18e04452bfa7351d6 | [
"Apache-2.0"
] | 22 | 2018-05-08T05:52:34.000Z | 2020-04-01T10:09:55.000Z | python/ray/ml/tests/test_preprocessors.py | siddgoel/ray | 7f3031f451de410b71a5fcb18e04452bfa7351d6 | [
"Apache-2.0"
] | 51 | 2018-05-17T05:55:28.000Z | 2020-03-18T06:49:49.000Z | python/ray/ml/tests/test_preprocessors.py | siddgoel/ray | 7f3031f451de410b71a5fcb18e04452bfa7351d6 | [
"Apache-2.0"
] | 10 | 2018-04-27T10:50:59.000Z | 2020-02-24T02:41:43.000Z | import warnings
from unittest.mock import patch
import numpy as np
import pandas as pd
import pytest
import ray
from ray.ml.preprocessor import PreprocessorNotFittedException
from ray.ml.preprocessors import (
BatchMapper,
StandardScaler,
MinMaxScaler,
OrdinalEncoder,
OneHotEncoder,
LabelEncoder,
SimpleImputer,
Chain,
)
def test_standard_scaler():
"""Tests basic StandardScaler functionality."""
col_a = [-1, 0, 1, 2]
col_b = [1, 1, 5, 5]
col_c = [1, 1, 1, None]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
scaler = StandardScaler(["B", "C"])
# Transform with unfitted preprocessor.
with pytest.raises(PreprocessorNotFittedException):
scaler.transform(ds)
# Fit data.
scaler.fit(ds)
assert scaler.stats_ == {
"mean(B)": 3.0,
"mean(C)": 1.0,
"std(B)": 2.0,
"std(C)": 0.0,
}
# Transform data.
transformed = scaler.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [-1.0, -1.0, 1.0, 1.0]
processed_col_c = [0.0, 0.0, 0.0, None]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = [1, 2, 3]
pred_col_b = [3, 5, 7]
pred_col_c = [0, 1, 2]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = scaler.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [0.0, 1.0, 2.0]
pred_processed_col_c = [-1.0, 0.0, 1.0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
@patch.object(warnings, "warn")
def test_fit_twice(mocked_warn):
"""Tests that a warning msg should be printed."""
col_a = [-1, 0, 1]
col_b = [1, 3, 5]
col_c = [1, 1, None]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
scaler = MinMaxScaler(["B", "C"])
# Fit data.
scaler.fit(ds)
assert scaler.stats_ == {"min(B)": 1, "max(B)": 5, "min(C)": 1, "max(C)": 1}
ds = ds.map_batches(lambda x: x * 2)
# Fit again
scaler.fit(ds)
# Assert that the fitted state is corresponding to the second ds.
assert scaler.stats_ == {"min(B)": 2, "max(B)": 10, "min(C)": 2, "max(C)": 2}
msg = (
"`fit` has already been called on the preprocessor (or at least one "
"contained preprocessors if this is a chain). "
"All previously fitted state will be overwritten!"
)
mocked_warn.assert_called_once_with(msg)
def test_min_max_scaler():
"""Tests basic MinMaxScaler functionality."""
col_a = [-1, 0, 1]
col_b = [1, 3, 5]
col_c = [1, 1, None]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
scaler = MinMaxScaler(["B", "C"])
# Transform with unfitted preprocessor.
with pytest.raises(PreprocessorNotFittedException):
scaler.transform(ds)
# Fit data.
scaler.fit(ds)
assert scaler.stats_ == {"min(B)": 1, "max(B)": 5, "min(C)": 1, "max(C)": 1}
transformed = scaler.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [0.0, 0.5, 1.0]
processed_col_c = [0.0, 0.0, None]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = [1, 2, 3]
pred_col_b = [3, 5, 7]
pred_col_c = [0, 1, 2]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = scaler.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [0.5, 1.0, 1.5]
pred_processed_col_c = [-1.0, 0.0, 1.0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
def test_ordinal_encoder():
"""Tests basic OrdinalEncoder functionality."""
col_a = ["red", "green", "blue", "red"]
col_b = ["warm", "cold", "hot", "cold"]
col_c = [1, 10, 5, 10]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
encoder = OrdinalEncoder(["B", "C"])
# Transform with unfitted preprocessor.
with pytest.raises(PreprocessorNotFittedException):
encoder.transform(ds)
# Fit data.
encoder.fit(ds)
assert encoder.stats_ == {
"unique_values(B)": {"cold": 0, "hot": 1, "warm": 2},
"unique_values(C)": {1: 0, 5: 1, 10: 2},
}
# Transform data.
transformed = encoder.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [2, 0, 1, 0]
processed_col_c = [0, 2, 1, 2]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = ["blue", "yellow", None]
pred_col_b = ["cold", "warm", "other"]
pred_col_c = [10, 1, 20]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = encoder.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [0, 2, None]
pred_processed_col_c = [2, 0, None]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
# Test null behavior.
null_col = [1, None]
nonnull_col = [1, 1]
null_df = pd.DataFrame.from_dict({"A": null_col})
null_ds = ray.data.from_pandas(null_df)
nonnull_df = pd.DataFrame.from_dict({"A": nonnull_col})
nonnull_ds = ray.data.from_pandas(nonnull_df)
null_encoder = OrdinalEncoder(["A"])
# Verify fit fails for null values.
with pytest.raises(ValueError):
null_encoder.fit(null_ds)
null_encoder.fit(nonnull_ds)
# Verify transform fails for null values.
with pytest.raises(ValueError):
null_encoder.transform(null_ds)
null_encoder.transform(nonnull_ds)
# Verify transform_batch fails for null values.
with pytest.raises(ValueError):
null_encoder.transform_batch(null_df)
null_encoder.transform_batch(nonnull_df)
def test_one_hot_encoder():
"""Tests basic OneHotEncoder functionality."""
col_a = ["red", "green", "blue", "red"]
col_b = ["warm", "cold", "hot", "cold"]
col_c = [1, 10, 5, 10]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
encoder = OneHotEncoder(["B", "C"])
# Transform with unfitted preprocessor.
with pytest.raises(PreprocessorNotFittedException):
encoder.transform(ds)
# Fit data.
encoder.fit(ds)
assert encoder.stats_ == {
"unique_values(B)": {"cold": 0, "hot": 1, "warm": 2},
"unique_values(C)": {1: 0, 5: 1, 10: 2},
}
# Transform data.
transformed = encoder.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b_cold = [0, 1, 0, 1]
processed_col_b_hot = [0, 0, 1, 0]
processed_col_b_warm = [1, 0, 0, 0]
processed_col_c_1 = [1, 0, 0, 0]
processed_col_c_5 = [0, 0, 1, 0]
processed_col_c_10 = [0, 1, 0, 1]
expected_df = pd.DataFrame.from_dict(
{
"A": processed_col_a,
"B_cold": processed_col_b_cold,
"B_hot": processed_col_b_hot,
"B_warm": processed_col_b_warm,
"C_1": processed_col_c_1,
"C_5": processed_col_c_5,
"C_10": processed_col_c_10,
}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = ["blue", "yellow", None]
pred_col_b = ["cold", "warm", "other"]
pred_col_c = [10, 1, 20]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = encoder.transform_batch(pred_in_df)
pred_processed_col_a = ["blue", "yellow", None]
pred_processed_col_b_cold = [1, 0, 0]
pred_processed_col_b_hot = [0, 0, 0]
pred_processed_col_b_warm = [0, 1, 0]
pred_processed_col_c_1 = [0, 1, 0]
pred_processed_col_c_5 = [0, 0, 0]
pred_processed_col_c_10 = [1, 0, 0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B_cold": pred_processed_col_b_cold,
"B_hot": pred_processed_col_b_hot,
"B_warm": pred_processed_col_b_warm,
"C_1": pred_processed_col_c_1,
"C_5": pred_processed_col_c_5,
"C_10": pred_processed_col_c_10,
}
)
assert pred_out_df.equals(pred_expected_df)
# Test null behavior.
null_col = [1, None]
nonnull_col = [1, 1]
null_df = pd.DataFrame.from_dict({"A": null_col})
null_ds = ray.data.from_pandas(null_df)
nonnull_df = pd.DataFrame.from_dict({"A": nonnull_col})
nonnull_ds = ray.data.from_pandas(nonnull_df)
null_encoder = OneHotEncoder(["A"])
# Verify fit fails for null values.
with pytest.raises(ValueError):
null_encoder.fit(null_ds)
null_encoder.fit(nonnull_ds)
# Verify transform fails for null values.
with pytest.raises(ValueError):
null_encoder.transform(null_ds)
null_encoder.transform(nonnull_ds)
# Verify transform_batch fails for null values.
with pytest.raises(ValueError):
null_encoder.transform_batch(null_df)
null_encoder.transform_batch(nonnull_df)
def test_label_encoder():
"""Tests basic LabelEncoder functionality."""
col_a = ["red", "green", "blue", "red"]
col_b = ["warm", "cold", "cold", "hot"]
col_c = [1, 2, 3, 4]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
encoder = LabelEncoder("A")
# Transform with unfitted preprocessor.
with pytest.raises(PreprocessorNotFittedException):
encoder.transform(ds)
# Fit data.
encoder.fit(ds)
assert encoder.stats_ == {"unique_values(A)": {"blue": 0, "green": 1, "red": 2}}
# Transform data.
transformed = encoder.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = [2, 1, 0, 2]
processed_col_b = col_b
processed_col_c = col_c
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = ["blue", "red", "yellow"]
pred_col_b = ["cold", "unknown", None]
pred_col_c = [10, 20, None]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = encoder.transform_batch(pred_in_df)
pred_processed_col_a = [0, 2, None]
pred_processed_col_b = pred_col_b
pred_processed_col_c = pred_col_c
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
# Test null behavior.
null_col = [1, None]
nonnull_col = [1, 1]
null_df = pd.DataFrame.from_dict({"A": null_col})
null_ds = ray.data.from_pandas(null_df)
nonnull_df = pd.DataFrame.from_dict({"A": nonnull_col})
nonnull_ds = ray.data.from_pandas(nonnull_df)
null_encoder = LabelEncoder("A")
# Verify fit fails for null values.
with pytest.raises(ValueError):
null_encoder.fit(null_ds)
null_encoder.fit(nonnull_ds)
# Verify transform fails for null values.
with pytest.raises(ValueError):
null_encoder.transform(null_ds)
null_encoder.transform(nonnull_ds)
# Verify transform_batch fails for null values.
with pytest.raises(ValueError):
null_encoder.transform_batch(null_df)
null_encoder.transform_batch(nonnull_df)
def test_simple_imputer():
col_a = [1, 1, 1, np.nan]
col_b = [1, 3, None, np.nan]
col_c = [1, 1, 1, 1]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
imputer = SimpleImputer(["B", "C"])
# Transform with unfitted preprocessor.
with pytest.raises(PreprocessorNotFittedException):
imputer.transform(ds)
# Fit data.
imputer.fit(ds)
assert imputer.stats_ == {"mean(B)": 2.0, "mean(C)": 1.0}
# Transform data.
transformed = imputer.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [1.0, 3.0, 2.0, 2.0]
processed_col_c = [1, 1, 1, 1]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = [1, 2, np.nan]
pred_col_b = [1, 2, np.nan]
pred_col_c = [None, None, None]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = imputer.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [1.0, 2.0, 2.0]
pred_processed_col_c = [1.0, 1.0, 1.0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
# Test "most_frequent" strategy.
most_frequent_col_a = [1, 2, 2, None, None, None]
most_frequent_col_b = [None, "c", "c", "b", "b", "a"]
most_frequent_df = pd.DataFrame.from_dict(
{"A": most_frequent_col_a, "B": most_frequent_col_b}
)
most_frequent_ds = ray.data.from_pandas(most_frequent_df).repartition(3)
most_frequent_imputer = SimpleImputer(["A", "B"], strategy="most_frequent")
most_frequent_imputer.fit(most_frequent_ds)
assert most_frequent_imputer.stats_ == {
"most_frequent(A)": 2.0,
"most_frequent(B)": "c",
}
most_frequent_transformed = most_frequent_imputer.transform(most_frequent_ds)
most_frequent_out_df = most_frequent_transformed.to_pandas()
most_frequent_processed_col_a = [1.0, 2.0, 2.0, 2.0, 2.0, 2.0]
most_frequent_processed_col_b = ["c", "c", "c", "b", "b", "a"]
most_frequent_expected_df = pd.DataFrame.from_dict(
{"A": most_frequent_processed_col_a, "B": most_frequent_processed_col_b}
)
assert most_frequent_out_df.equals(most_frequent_expected_df)
# Test "constant" strategy.
constant_col_a = ["apple", None]
constant_df = pd.DataFrame.from_dict({"A": constant_col_a})
constant_ds = ray.data.from_pandas(constant_df)
with pytest.raises(ValueError):
SimpleImputer(["A"], strategy="constant")
constant_imputer = SimpleImputer(
["A", "B"], strategy="constant", fill_value="missing"
)
constant_transformed = constant_imputer.transform(constant_ds)
constant_out_df = constant_transformed.to_pandas()
constant_processed_col_a = ["apple", "missing"]
constant_expected_df = pd.DataFrame.from_dict({"A": constant_processed_col_a})
assert constant_out_df.equals(constant_expected_df)
def test_chain():
"""Tests basic Chain functionality."""
col_a = [-1, -1, 1, 1]
col_b = [1, 1, 1, None]
col_c = ["sunday", "monday", "tuesday", "tuesday"]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
def udf(df):
df["A"] *= 2
return df
batch_mapper = BatchMapper(fn=udf)
imputer = SimpleImputer(["B"])
scaler = StandardScaler(["A", "B"])
encoder = LabelEncoder("C")
chain = Chain(scaler, imputer, encoder, batch_mapper)
# Fit data.
chain.fit(ds)
assert imputer.stats_ == {
"mean(B)": 0.0,
}
assert scaler.stats_ == {
"mean(A)": 0.0,
"mean(B)": 1.0,
"std(A)": 1.0,
"std(B)": 0.0,
}
assert encoder.stats_ == {
"unique_values(C)": {"monday": 0, "sunday": 1, "tuesday": 2}
}
# Transform data.
transformed = chain.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = [-2.0, -2.0, 2.0, 2.0]
processed_col_b = [0.0, 0.0, 0.0, 0.0]
processed_col_c = [1, 0, 2, 2]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
# Transform batch.
pred_col_a = [1, 2, None]
pred_col_b = [0, None, 2]
pred_col_c = ["monday", "tuesday", "wednesday"]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = chain.transform_batch(pred_in_df)
pred_processed_col_a = [2, 4, None]
pred_processed_col_b = [-1.0, 0.0, 1.0]
pred_processed_col_c = [0, 2, None]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
def test_batch_mapper():
"""Tests batch mapper functionality."""
old_column = [1, 2, 3, 4]
to_be_modified = [1, -1, 1, -1]
in_df = pd.DataFrame.from_dict(
{"old_column": old_column, "to_be_modified": to_be_modified}
)
ds = ray.data.from_pandas(in_df)
def add_and_modify_udf(df: "pd.DataFrame"):
df["new_col"] = df["old_column"] + 1
df["to_be_modified"] *= 2
return df
batch_mapper = BatchMapper(fn=add_and_modify_udf)
batch_mapper.fit(ds)
transformed = batch_mapper.transform(ds)
out_df = transformed.to_pandas()
expected_df = pd.DataFrame.from_dict(
{
"old_column": old_column,
"to_be_modified": [2, -2, 2, -2],
"new_col": [2, 3, 4, 5],
}
)
assert out_df.equals(expected_df)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-sv", __file__]))
| 29.670365 | 84 | 0.626429 | import warnings
from unittest.mock import patch
import numpy as np
import pandas as pd
import pytest
import ray
from ray.ml.preprocessor import PreprocessorNotFittedException
from ray.ml.preprocessors import (
BatchMapper,
StandardScaler,
MinMaxScaler,
OrdinalEncoder,
OneHotEncoder,
LabelEncoder,
SimpleImputer,
Chain,
)
def test_standard_scaler():
col_a = [-1, 0, 1, 2]
col_b = [1, 1, 5, 5]
col_c = [1, 1, 1, None]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
scaler = StandardScaler(["B", "C"])
with pytest.raises(PreprocessorNotFittedException):
scaler.transform(ds)
scaler.fit(ds)
assert scaler.stats_ == {
"mean(B)": 3.0,
"mean(C)": 1.0,
"std(B)": 2.0,
"std(C)": 0.0,
}
transformed = scaler.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [-1.0, -1.0, 1.0, 1.0]
processed_col_c = [0.0, 0.0, 0.0, None]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
pred_col_a = [1, 2, 3]
pred_col_b = [3, 5, 7]
pred_col_c = [0, 1, 2]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = scaler.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [0.0, 1.0, 2.0]
pred_processed_col_c = [-1.0, 0.0, 1.0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
@patch.object(warnings, "warn")
def test_fit_twice(mocked_warn):
col_a = [-1, 0, 1]
col_b = [1, 3, 5]
col_c = [1, 1, None]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
scaler = MinMaxScaler(["B", "C"])
scaler.fit(ds)
assert scaler.stats_ == {"min(B)": 1, "max(B)": 5, "min(C)": 1, "max(C)": 1}
ds = ds.map_batches(lambda x: x * 2)
scaler.fit(ds)
assert scaler.stats_ == {"min(B)": 2, "max(B)": 10, "min(C)": 2, "max(C)": 2}
msg = (
"`fit` has already been called on the preprocessor (or at least one "
"contained preprocessors if this is a chain). "
"All previously fitted state will be overwritten!"
)
mocked_warn.assert_called_once_with(msg)
def test_min_max_scaler():
col_a = [-1, 0, 1]
col_b = [1, 3, 5]
col_c = [1, 1, None]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
scaler = MinMaxScaler(["B", "C"])
with pytest.raises(PreprocessorNotFittedException):
scaler.transform(ds)
scaler.fit(ds)
assert scaler.stats_ == {"min(B)": 1, "max(B)": 5, "min(C)": 1, "max(C)": 1}
transformed = scaler.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [0.0, 0.5, 1.0]
processed_col_c = [0.0, 0.0, None]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
pred_col_a = [1, 2, 3]
pred_col_b = [3, 5, 7]
pred_col_c = [0, 1, 2]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = scaler.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [0.5, 1.0, 1.5]
pred_processed_col_c = [-1.0, 0.0, 1.0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
def test_ordinal_encoder():
col_a = ["red", "green", "blue", "red"]
col_b = ["warm", "cold", "hot", "cold"]
col_c = [1, 10, 5, 10]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
encoder = OrdinalEncoder(["B", "C"])
with pytest.raises(PreprocessorNotFittedException):
encoder.transform(ds)
encoder.fit(ds)
assert encoder.stats_ == {
"unique_values(B)": {"cold": 0, "hot": 1, "warm": 2},
"unique_values(C)": {1: 0, 5: 1, 10: 2},
}
transformed = encoder.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [2, 0, 1, 0]
processed_col_c = [0, 2, 1, 2]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
pred_col_a = ["blue", "yellow", None]
pred_col_b = ["cold", "warm", "other"]
pred_col_c = [10, 1, 20]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = encoder.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [0, 2, None]
pred_processed_col_c = [2, 0, None]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
null_col = [1, None]
nonnull_col = [1, 1]
null_df = pd.DataFrame.from_dict({"A": null_col})
null_ds = ray.data.from_pandas(null_df)
nonnull_df = pd.DataFrame.from_dict({"A": nonnull_col})
nonnull_ds = ray.data.from_pandas(nonnull_df)
null_encoder = OrdinalEncoder(["A"])
with pytest.raises(ValueError):
null_encoder.fit(null_ds)
null_encoder.fit(nonnull_ds)
with pytest.raises(ValueError):
null_encoder.transform(null_ds)
null_encoder.transform(nonnull_ds)
with pytest.raises(ValueError):
null_encoder.transform_batch(null_df)
null_encoder.transform_batch(nonnull_df)
def test_one_hot_encoder():
col_a = ["red", "green", "blue", "red"]
col_b = ["warm", "cold", "hot", "cold"]
col_c = [1, 10, 5, 10]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
encoder = OneHotEncoder(["B", "C"])
with pytest.raises(PreprocessorNotFittedException):
encoder.transform(ds)
encoder.fit(ds)
assert encoder.stats_ == {
"unique_values(B)": {"cold": 0, "hot": 1, "warm": 2},
"unique_values(C)": {1: 0, 5: 1, 10: 2},
}
transformed = encoder.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b_cold = [0, 1, 0, 1]
processed_col_b_hot = [0, 0, 1, 0]
processed_col_b_warm = [1, 0, 0, 0]
processed_col_c_1 = [1, 0, 0, 0]
processed_col_c_5 = [0, 0, 1, 0]
processed_col_c_10 = [0, 1, 0, 1]
expected_df = pd.DataFrame.from_dict(
{
"A": processed_col_a,
"B_cold": processed_col_b_cold,
"B_hot": processed_col_b_hot,
"B_warm": processed_col_b_warm,
"C_1": processed_col_c_1,
"C_5": processed_col_c_5,
"C_10": processed_col_c_10,
}
)
assert out_df.equals(expected_df)
pred_col_a = ["blue", "yellow", None]
pred_col_b = ["cold", "warm", "other"]
pred_col_c = [10, 1, 20]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = encoder.transform_batch(pred_in_df)
pred_processed_col_a = ["blue", "yellow", None]
pred_processed_col_b_cold = [1, 0, 0]
pred_processed_col_b_hot = [0, 0, 0]
pred_processed_col_b_warm = [0, 1, 0]
pred_processed_col_c_1 = [0, 1, 0]
pred_processed_col_c_5 = [0, 0, 0]
pred_processed_col_c_10 = [1, 0, 0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B_cold": pred_processed_col_b_cold,
"B_hot": pred_processed_col_b_hot,
"B_warm": pred_processed_col_b_warm,
"C_1": pred_processed_col_c_1,
"C_5": pred_processed_col_c_5,
"C_10": pred_processed_col_c_10,
}
)
assert pred_out_df.equals(pred_expected_df)
null_col = [1, None]
nonnull_col = [1, 1]
null_df = pd.DataFrame.from_dict({"A": null_col})
null_ds = ray.data.from_pandas(null_df)
nonnull_df = pd.DataFrame.from_dict({"A": nonnull_col})
nonnull_ds = ray.data.from_pandas(nonnull_df)
null_encoder = OneHotEncoder(["A"])
with pytest.raises(ValueError):
null_encoder.fit(null_ds)
null_encoder.fit(nonnull_ds)
with pytest.raises(ValueError):
null_encoder.transform(null_ds)
null_encoder.transform(nonnull_ds)
with pytest.raises(ValueError):
null_encoder.transform_batch(null_df)
null_encoder.transform_batch(nonnull_df)
def test_label_encoder():
col_a = ["red", "green", "blue", "red"]
col_b = ["warm", "cold", "cold", "hot"]
col_c = [1, 2, 3, 4]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
encoder = LabelEncoder("A")
with pytest.raises(PreprocessorNotFittedException):
encoder.transform(ds)
encoder.fit(ds)
assert encoder.stats_ == {"unique_values(A)": {"blue": 0, "green": 1, "red": 2}}
transformed = encoder.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = [2, 1, 0, 2]
processed_col_b = col_b
processed_col_c = col_c
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
pred_col_a = ["blue", "red", "yellow"]
pred_col_b = ["cold", "unknown", None]
pred_col_c = [10, 20, None]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = encoder.transform_batch(pred_in_df)
pred_processed_col_a = [0, 2, None]
pred_processed_col_b = pred_col_b
pred_processed_col_c = pred_col_c
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
null_col = [1, None]
nonnull_col = [1, 1]
null_df = pd.DataFrame.from_dict({"A": null_col})
null_ds = ray.data.from_pandas(null_df)
nonnull_df = pd.DataFrame.from_dict({"A": nonnull_col})
nonnull_ds = ray.data.from_pandas(nonnull_df)
null_encoder = LabelEncoder("A")
with pytest.raises(ValueError):
null_encoder.fit(null_ds)
null_encoder.fit(nonnull_ds)
with pytest.raises(ValueError):
null_encoder.transform(null_ds)
null_encoder.transform(nonnull_ds)
with pytest.raises(ValueError):
null_encoder.transform_batch(null_df)
null_encoder.transform_batch(nonnull_df)
def test_simple_imputer():
col_a = [1, 1, 1, np.nan]
col_b = [1, 3, None, np.nan]
col_c = [1, 1, 1, 1]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
imputer = SimpleImputer(["B", "C"])
with pytest.raises(PreprocessorNotFittedException):
imputer.transform(ds)
imputer.fit(ds)
assert imputer.stats_ == {"mean(B)": 2.0, "mean(C)": 1.0}
transformed = imputer.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = col_a
processed_col_b = [1.0, 3.0, 2.0, 2.0]
processed_col_c = [1, 1, 1, 1]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
pred_col_a = [1, 2, np.nan]
pred_col_b = [1, 2, np.nan]
pred_col_c = [None, None, None]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = imputer.transform_batch(pred_in_df)
pred_processed_col_a = pred_col_a
pred_processed_col_b = [1.0, 2.0, 2.0]
pred_processed_col_c = [1.0, 1.0, 1.0]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
most_frequent_col_a = [1, 2, 2, None, None, None]
most_frequent_col_b = [None, "c", "c", "b", "b", "a"]
most_frequent_df = pd.DataFrame.from_dict(
{"A": most_frequent_col_a, "B": most_frequent_col_b}
)
most_frequent_ds = ray.data.from_pandas(most_frequent_df).repartition(3)
most_frequent_imputer = SimpleImputer(["A", "B"], strategy="most_frequent")
most_frequent_imputer.fit(most_frequent_ds)
assert most_frequent_imputer.stats_ == {
"most_frequent(A)": 2.0,
"most_frequent(B)": "c",
}
most_frequent_transformed = most_frequent_imputer.transform(most_frequent_ds)
most_frequent_out_df = most_frequent_transformed.to_pandas()
most_frequent_processed_col_a = [1.0, 2.0, 2.0, 2.0, 2.0, 2.0]
most_frequent_processed_col_b = ["c", "c", "c", "b", "b", "a"]
most_frequent_expected_df = pd.DataFrame.from_dict(
{"A": most_frequent_processed_col_a, "B": most_frequent_processed_col_b}
)
assert most_frequent_out_df.equals(most_frequent_expected_df)
constant_col_a = ["apple", None]
constant_df = pd.DataFrame.from_dict({"A": constant_col_a})
constant_ds = ray.data.from_pandas(constant_df)
with pytest.raises(ValueError):
SimpleImputer(["A"], strategy="constant")
constant_imputer = SimpleImputer(
["A", "B"], strategy="constant", fill_value="missing"
)
constant_transformed = constant_imputer.transform(constant_ds)
constant_out_df = constant_transformed.to_pandas()
constant_processed_col_a = ["apple", "missing"]
constant_expected_df = pd.DataFrame.from_dict({"A": constant_processed_col_a})
assert constant_out_df.equals(constant_expected_df)
def test_chain():
col_a = [-1, -1, 1, 1]
col_b = [1, 1, 1, None]
col_c = ["sunday", "monday", "tuesday", "tuesday"]
in_df = pd.DataFrame.from_dict({"A": col_a, "B": col_b, "C": col_c})
ds = ray.data.from_pandas(in_df)
def udf(df):
df["A"] *= 2
return df
batch_mapper = BatchMapper(fn=udf)
imputer = SimpleImputer(["B"])
scaler = StandardScaler(["A", "B"])
encoder = LabelEncoder("C")
chain = Chain(scaler, imputer, encoder, batch_mapper)
chain.fit(ds)
assert imputer.stats_ == {
"mean(B)": 0.0,
}
assert scaler.stats_ == {
"mean(A)": 0.0,
"mean(B)": 1.0,
"std(A)": 1.0,
"std(B)": 0.0,
}
assert encoder.stats_ == {
"unique_values(C)": {"monday": 0, "sunday": 1, "tuesday": 2}
}
transformed = chain.transform(ds)
out_df = transformed.to_pandas()
processed_col_a = [-2.0, -2.0, 2.0, 2.0]
processed_col_b = [0.0, 0.0, 0.0, 0.0]
processed_col_c = [1, 0, 2, 2]
expected_df = pd.DataFrame.from_dict(
{"A": processed_col_a, "B": processed_col_b, "C": processed_col_c}
)
assert out_df.equals(expected_df)
pred_col_a = [1, 2, None]
pred_col_b = [0, None, 2]
pred_col_c = ["monday", "tuesday", "wednesday"]
pred_in_df = pd.DataFrame.from_dict(
{"A": pred_col_a, "B": pred_col_b, "C": pred_col_c}
)
pred_out_df = chain.transform_batch(pred_in_df)
pred_processed_col_a = [2, 4, None]
pred_processed_col_b = [-1.0, 0.0, 1.0]
pred_processed_col_c = [0, 2, None]
pred_expected_df = pd.DataFrame.from_dict(
{
"A": pred_processed_col_a,
"B": pred_processed_col_b,
"C": pred_processed_col_c,
}
)
assert pred_out_df.equals(pred_expected_df)
def test_batch_mapper():
old_column = [1, 2, 3, 4]
to_be_modified = [1, -1, 1, -1]
in_df = pd.DataFrame.from_dict(
{"old_column": old_column, "to_be_modified": to_be_modified}
)
ds = ray.data.from_pandas(in_df)
def add_and_modify_udf(df: "pd.DataFrame"):
df["new_col"] = df["old_column"] + 1
df["to_be_modified"] *= 2
return df
batch_mapper = BatchMapper(fn=add_and_modify_udf)
batch_mapper.fit(ds)
transformed = batch_mapper.transform(ds)
out_df = transformed.to_pandas()
expected_df = pd.DataFrame.from_dict(
{
"old_column": old_column,
"to_be_modified": [2, -2, 2, -2],
"new_col": [2, 3, 4, 5],
}
)
assert out_df.equals(expected_df)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-sv", __file__]))
| true | true |
f720b26f06cbae99f40eb0f83633ea9c408ef321 | 5,737 | py | Python | astro/plugins/_core.py | Lightyagami788/Astro-UB | cb2d8c76064c474ffd507e38421509f51918520f | [
"Apache-2.0"
] | null | null | null | astro/plugins/_core.py | Lightyagami788/Astro-UB | cb2d8c76064c474ffd507e38421509f51918520f | [
"Apache-2.0"
] | null | null | null | astro/plugins/_core.py | Lightyagami788/Astro-UB | cb2d8c76064c474ffd507e38421509f51918520f | [
"Apache-2.0"
] | 1 | 2021-11-16T06:20:41.000Z | 2021-11-16T06:20:41.000Z | import asyncio
import os
from datetime import datetime
from pathlib import Path
from telethon.tl.types import InputMessagesFilterDocument
from astro.config import Config
from astro import CMD_HELP
from astro.utils import admin_cmd, load_module, remove_plugin
NAME = Config.NAME
DELETE_TIMEOUT = 5
thumb_image_path = "./resources/astro.jpeg"
DEFAULTUSER = str(NAME) if NAME else "ASTRO USER"
@astro.on(admin_cmd(pattern=r"send (?P<shortname>\w+)", outgoing=True))
@astro.on(sudo_cmd(pattern=r"send (?P<shortname>\w+)", allow_sudo=True))
async def send(event):
ok = await eor(event, "Sending...")
if event.fwd_from:
return
hmm = bot.uid
message_id = event.message.id
thumb = thumb_image_path
input_str = event.pattern_match.group(1)
the_plugin_file = "./astro/plugins/{}.py".format(input_str)
if os.path.exists(the_plugin_file):
await ok.delete()
start = datetime.now()
pro = await event.client.send_file(
event.chat_id,
the_plugin_file,
force_document=True,
allow_cache=False,
thumb=thumb,
reply_to=message_id,
)
end = datetime.now()
time_taken_in_ms = (end - start).seconds
await pro.edit(
f"**► Plugin Name:** `{input_str}`\n**► Uploaded by:** [{DEFAULTUSER}](tg://user?id={hmm})\n\n© @Astro_HelpChat"
)
await asyncio.sleep(DELETE_TIMEOUT)
else:
await ok.edit("**404**: `No Such Plugin!`")
@astro.on(admin_cmd(pattern="install"))
async def install(event):
if event.fwd_from:
return
if event.reply_to_msg_id:
try:
downloaded_file_name = (
await event.client.download_media( # pylint:disable=E0602
await event.get_reply_message(),
"astro/plugins/", # pylint:disable=E0602
)
)
if "(" not in downloaded_file_name:
path1 = Path(downloaded_file_name)
shortname = path1.stem
load_module(shortname.replace(".py", ""))
await event.edit(
"astro Succesfully Installed The Plugin `{}`".format(
os.path.basename(downloaded_file_name)
)
)
else:
os.remove(downloaded_file_name)
await event.edit(
"**Error!**\nPlugin cannot be installed!\nMight have been pre-installed."
)
except Exception as e: # pylint:disable=C0103,W0703
await event.edit(str(e))
os.remove(downloaded_file_name)
await asyncio.sleep(DELETE_TIMEOUT)
await event.delete()
@astro.on(admin_cmd(pattern=r"unload (?P<shortname>\w+)$"))
async def unload(event):
if event.fwd_from:
return
shortname = event.pattern_match["shortname"]
try:
remove_plugin(shortname)
await event.edit(f"astro has successfully unloaded {shortname}")
except Exception as e:
await event.edit(
"astro has successfully unloaded {shortname}\n{}".format(
shortname, str(e)
)
)
@astro.on(admin_cmd(pattern=r"load (?P<shortname>\w+)$"))
async def load(event):
if event.fwd_from:
return
shortname = event.pattern_match["shortname"]
try:
try:
remove_plugin(shortname)
except BaseException:
pass
load_module(shortname)
await event.edit(f"astro has successfully loaded {shortname}")
except Exception as e:
await event.edit(
f"astro could not load {shortname} because of the following error.\n{str(e)}"
)
@astro.on(admin_cmd(pattern=r"installall$"))
async def install(event):
if event.fwd_from:
return
documentss = await event.client.get_messages(
event.chat_id, None, search=".py", filter=InputMessagesFilterDocument
)
total = int(documentss.total)
total_doxx = range(0, total)
b = await event.client.send_message(
event.chat_id,
f"**Installing {total} plugins...**\n`This msg will be deleted after the installation gets completed`",
)
text = "**Installing Plugins...**\n\n"
a = await event.client.send_message(event.chat_id, text)
if total == 0:
await a.edit("**No plugins to install.**")
await event.delete()
return
for ixo in total_doxx:
mxo = documentss[ixo].id
downloaded_file_name = await event.client.download_media(
await event.client.get_messages(event.chat_id, ids=mxo), "astro/plugins/"
)
if "(" not in downloaded_file_name:
path1 = Path(downloaded_file_name)
shortname = path1.stem
try:
load_module(shortname.replace(".py", ""))
text += f"**• Installed** `{(os.path.basename(downloaded_file_name))}` **successfully.**\n"
except BaseException:
text += f"**• Error installing** `{(os.path.basename(downloaded_file_name))}`\n"
else:
text += f"**• Plugin** `{(os.path.basename(downloaded_file_name))}` **already installed.**\n"
await a.edit(f"{text}\n**Installed every plugin.**")
await event.delete()
await b.delete()
CMD_HELP.update(
{
"core": ".load <plugin name>\nUse - Load the plugin.\
\n\n.unload <plugin name>\nUse - Unload the plugin.\
\n\n.install <reply to plugin file (.py)>\nUse - Install the plugin.\
\n\n.installall\nUse - Install all the plugins in the group/channel where it is used in.\
\n\n.send <plugin name>\nUse - Send the plugin."
}
)
| 34.981707 | 124 | 0.599791 | import asyncio
import os
from datetime import datetime
from pathlib import Path
from telethon.tl.types import InputMessagesFilterDocument
from astro.config import Config
from astro import CMD_HELP
from astro.utils import admin_cmd, load_module, remove_plugin
NAME = Config.NAME
DELETE_TIMEOUT = 5
thumb_image_path = "./resources/astro.jpeg"
DEFAULTUSER = str(NAME) if NAME else "ASTRO USER"
@astro.on(admin_cmd(pattern=r"send (?P<shortname>\w+)", outgoing=True))
@astro.on(sudo_cmd(pattern=r"send (?P<shortname>\w+)", allow_sudo=True))
async def send(event):
ok = await eor(event, "Sending...")
if event.fwd_from:
return
hmm = bot.uid
message_id = event.message.id
thumb = thumb_image_path
input_str = event.pattern_match.group(1)
the_plugin_file = "./astro/plugins/{}.py".format(input_str)
if os.path.exists(the_plugin_file):
await ok.delete()
start = datetime.now()
pro = await event.client.send_file(
event.chat_id,
the_plugin_file,
force_document=True,
allow_cache=False,
thumb=thumb,
reply_to=message_id,
)
end = datetime.now()
time_taken_in_ms = (end - start).seconds
await pro.edit(
f"**► Plugin Name:** `{input_str}`\n**► Uploaded by:** [{DEFAULTUSER}](tg://user?id={hmm})\n\n© @Astro_HelpChat"
)
await asyncio.sleep(DELETE_TIMEOUT)
else:
await ok.edit("**404**: `No Such Plugin!`")
@astro.on(admin_cmd(pattern="install"))
async def install(event):
if event.fwd_from:
return
if event.reply_to_msg_id:
try:
downloaded_file_name = (
await event.client.download_media(
await event.get_reply_message(),
"astro/plugins/",
)
)
if "(" not in downloaded_file_name:
path1 = Path(downloaded_file_name)
shortname = path1.stem
load_module(shortname.replace(".py", ""))
await event.edit(
"astro Succesfully Installed The Plugin `{}`".format(
os.path.basename(downloaded_file_name)
)
)
else:
os.remove(downloaded_file_name)
await event.edit(
"**Error!**\nPlugin cannot be installed!\nMight have been pre-installed."
)
except Exception as e:
await event.edit(str(e))
os.remove(downloaded_file_name)
await asyncio.sleep(DELETE_TIMEOUT)
await event.delete()
@astro.on(admin_cmd(pattern=r"unload (?P<shortname>\w+)$"))
async def unload(event):
if event.fwd_from:
return
shortname = event.pattern_match["shortname"]
try:
remove_plugin(shortname)
await event.edit(f"astro has successfully unloaded {shortname}")
except Exception as e:
await event.edit(
"astro has successfully unloaded {shortname}\n{}".format(
shortname, str(e)
)
)
@astro.on(admin_cmd(pattern=r"load (?P<shortname>\w+)$"))
async def load(event):
if event.fwd_from:
return
shortname = event.pattern_match["shortname"]
try:
try:
remove_plugin(shortname)
except BaseException:
pass
load_module(shortname)
await event.edit(f"astro has successfully loaded {shortname}")
except Exception as e:
await event.edit(
f"astro could not load {shortname} because of the following error.\n{str(e)}"
)
@astro.on(admin_cmd(pattern=r"installall$"))
async def install(event):
if event.fwd_from:
return
documentss = await event.client.get_messages(
event.chat_id, None, search=".py", filter=InputMessagesFilterDocument
)
total = int(documentss.total)
total_doxx = range(0, total)
b = await event.client.send_message(
event.chat_id,
f"**Installing {total} plugins...**\n`This msg will be deleted after the installation gets completed`",
)
text = "**Installing Plugins...**\n\n"
a = await event.client.send_message(event.chat_id, text)
if total == 0:
await a.edit("**No plugins to install.**")
await event.delete()
return
for ixo in total_doxx:
mxo = documentss[ixo].id
downloaded_file_name = await event.client.download_media(
await event.client.get_messages(event.chat_id, ids=mxo), "astro/plugins/"
)
if "(" not in downloaded_file_name:
path1 = Path(downloaded_file_name)
shortname = path1.stem
try:
load_module(shortname.replace(".py", ""))
text += f"**• Installed** `{(os.path.basename(downloaded_file_name))}` **successfully.**\n"
except BaseException:
text += f"**• Error installing** `{(os.path.basename(downloaded_file_name))}`\n"
else:
text += f"**• Plugin** `{(os.path.basename(downloaded_file_name))}` **already installed.**\n"
await a.edit(f"{text}\n**Installed every plugin.**")
await event.delete()
await b.delete()
CMD_HELP.update(
{
"core": ".load <plugin name>\nUse - Load the plugin.\
\n\n.unload <plugin name>\nUse - Unload the plugin.\
\n\n.install <reply to plugin file (.py)>\nUse - Install the plugin.\
\n\n.installall\nUse - Install all the plugins in the group/channel where it is used in.\
\n\n.send <plugin name>\nUse - Send the plugin."
}
)
| true | true |
f720b37866bd3fbd5203ac0faed5ee3a58cc01bc | 317 | py | Python | raspi/deskTimer.py | Itera/ariot2018 | e83adc8ac4e788df09fe412dd57ce3aca966b99a | [
"MIT"
] | null | null | null | raspi/deskTimer.py | Itera/ariot2018 | e83adc8ac4e788df09fe412dd57ce3aca966b99a | [
"MIT"
] | 1 | 2018-03-15T15:04:10.000Z | 2018-03-15T16:02:28.000Z | raspi/deskTimer.py | Itera/ariot2018 | e83adc8ac4e788df09fe412dd57ce3aca966b99a | [
"MIT"
] | null | null | null | from threading import Timer
class DeskTimer(object):
current_timer = None
def start(self, time, callback, *args):
self.current_timer = Timer(time, callback, args)
self.current_timer.start()
def stop(self):
if self.current_timer != None:
self.current_timer.cancel()
| 24.384615 | 56 | 0.649842 | from threading import Timer
class DeskTimer(object):
current_timer = None
def start(self, time, callback, *args):
self.current_timer = Timer(time, callback, args)
self.current_timer.start()
def stop(self):
if self.current_timer != None:
self.current_timer.cancel()
| true | true |
f720b3c07515379b83fea8c011c643547f776843 | 19,885 | py | Python | perfkitbenchmarker/providers/rackspace/rackspace_virtual_machine.py | msidana/PerfKitBenchmarker | 2784642d3e6b20b3f474c4e27edb1ef163804f66 | [
"Apache-2.0"
] | 1 | 2018-08-28T19:33:21.000Z | 2018-08-28T19:33:21.000Z | perfkitbenchmarker/providers/rackspace/rackspace_virtual_machine.py | msidana/PerfKitBenchmarker | 2784642d3e6b20b3f474c4e27edb1ef163804f66 | [
"Apache-2.0"
] | null | null | null | perfkitbenchmarker/providers/rackspace/rackspace_virtual_machine.py | msidana/PerfKitBenchmarker | 2784642d3e6b20b3f474c4e27edb1ef163804f66 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to represent a Rackspace Virtual Machine object.
Zones:
DFW (Dallas-Fort Worth)
IAD (Northern Virginia)
ORD (Chicago)
LON (London)
SYD (Sydney)
HKG (Hong Kong)
Machine Types:
run 'rack servers flavor list'
Images:
run 'rack servers image list'
All VM specifics are self-contained and the class provides methods to
operate on the VM: boot, shutdown, etc.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import json
import logging
import re
import tempfile
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker import providers
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers.rackspace import rackspace_disk
from perfkitbenchmarker.providers.rackspace import rackspace_network
from perfkitbenchmarker.providers.rackspace import util
import six
from six.moves import range
from six.moves import zip
FLAGS = flags.FLAGS
CLOUD_CONFIG_TEMPLATE = '''#cloud-config
users:
- name: {0}
ssh-authorized-keys:
- {1}
sudo: ['ALL=(ALL) NOPASSWD:ALL']
groups: sudo
shell: /bin/bash
'''
BLOCK_DEVICE_TEMPLATE = '''
source-type=image,
source-id={0},
dest=volume,
size={1},
shutdown=remove,
bootindex=0
'''
LSBLK_REGEX = (r'NAME="(.*)"\s+MODEL="(.*)"\s+SIZE="(.*)"'
r'\s+TYPE="(.*)"\s+MOUNTPOINT="(.*)"\s+LABEL="(.*)"')
LSBLK_PATTERN = re.compile(LSBLK_REGEX)
UBUNTU_IMAGE = '09de0a66-3156-48b4-90a5-1cf25a905207'
RHEL_IMAGE = '92f8a8b8-6019-4c27-949b-cf9910b84ffb'
INSTANCE_EXISTS_STATUSES = frozenset(
['BUILD', 'ACTIVE', 'PAUSED', 'SHUTOFF', 'ERROR'])
INSTANCE_DELETED_STATUSES = frozenset(
['DELETED'])
INSTANCE_KNOWN_STATUSES = INSTANCE_EXISTS_STATUSES | INSTANCE_DELETED_STATUSES
REMOTE_BOOT_DISK_SIZE_GB = 50
def RenderBlockDeviceTemplate(image, volume_size):
"""Renders template used for the block-device flag in RackCLI.
Args:
image: string. Image ID of the source image.
volume_size: string. Size in GB of desired volume size.
Returns:
string value for block-device parameter used when creating a VM.
"""
blk_params = BLOCK_DEVICE_TEMPLATE.replace('\n', '').format(
image, str(volume_size))
return blk_params
class RackspaceVmSpec(virtual_machine.BaseVmSpec):
"""Object containing the information needed to create a
RackspaceVirtualMachine.
Attributes:
project: None or string. Project ID, also known as Tenant ID
rackspace_region: None or string. Rackspace region to build VM resources.
rack_profile: None or string. Rack CLI profile configuration.
"""
CLOUD = providers.RACKSPACE
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
"""Modifies config options based on runtime flag values.
Args:
config_values: dict mapping config option names to provided values. May
be modified by this function.
flag_values: flags.FlagValues. Runtime flags that may override the
provided config values.
"""
super(RackspaceVmSpec, cls)._ApplyFlags(config_values, flag_values)
if flag_values['project'].present:
config_values['project'] = flag_values.project
if flag_values['rackspace_region'].present:
config_values['rackspace_region'] = flag_values.rackspace_region
if flag_values['rack_profile'].present:
config_values['rack_profile'] = flag_values.rack_profile
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword
arguments to construct in order to decode the named option.
"""
result = super(RackspaceVmSpec, cls)._GetOptionDecoderConstructions()
result.update({
'project': (option_decoders.StringDecoder, {'default': None}),
'rackspace_region': (option_decoders.StringDecoder, {'default': 'IAD'}),
'rack_profile': (option_decoders.StringDecoder, {'default': None})})
return result
class RackspaceVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a Rackspace Public Cloud Virtual Machine."""
CLOUD = providers.RACKSPACE
DEFAULT_IMAGE = None
def __init__(self, vm_spec):
"""Initialize a Rackspace Virtual Machine
Args:
vm_spec: virtual_machine.BaseVirtualMachineSpec object of the VM.
"""
super(RackspaceVirtualMachine, self).__init__(vm_spec)
self.boot_metadata = {}
self.boot_device = None
self.boot_disk_allocated = False
self.allocated_disks = set()
self.id = None
self.image = self.image or self.DEFAULT_IMAGE
self.region = vm_spec.rackspace_region
self.project = vm_spec.project
self.profile = vm_spec.rack_profile
# Isolated tenant networks are regional, not globally available.
# Security groups (firewalls) apply to a network, hence they are regional.
# TODO(meteorfox) Create tenant network if it doesn't exist in the region.
self.firewall = rackspace_network.RackspaceFirewall.GetFirewall()
def _CreateDependencies(self):
"""Create dependencies prior creating the VM."""
# TODO(meteorfox) Create security group (if applies)
self._UploadSSHPublicKey()
def _Create(self):
"""Creates a Rackspace VM instance and waits until it's ACTIVE."""
self._CreateInstance()
self._WaitForInstanceUntilActive()
@vm_util.Retry()
def _PostCreate(self):
"""Gets the VM's information."""
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, _, _ = get_cmd.Issue()
resp = json.loads(stdout)
self.internal_ip = resp['PrivateIPv4']
self.ip_address = resp['PublicIPv4']
self.AddMetadata(**self.vm_metadata)
def _Exists(self):
"""Returns true if the VM exists otherwise returns false."""
if self.id is None:
return False
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, _, _ = get_cmd.Issue(suppress_warning=True)
try:
resp = json.loads(stdout)
except ValueError:
return False
status = resp['Status']
return status in INSTANCE_EXISTS_STATUSES
def _Delete(self):
"""Deletes a Rackspace VM instance and waits until API returns 404."""
if self.id is None:
return
self._DeleteInstance()
self._WaitForInstanceUntilDeleted()
def _DeleteDependencies(self):
"""Deletes dependencies that were need for the VM after the VM has been
deleted."""
# TODO(meteorfox) Delete security group (if applies)
self._DeleteSSHPublicKey()
def _UploadSSHPublicKey(self):
"""Uploads SSH public key to the VM's region. 1 key per VM per Region."""
cmd = util.RackCLICommand(self, 'servers', 'keypair', 'upload')
cmd.flags = OrderedDict([
('name', self.name), ('file', self.ssh_public_key)])
cmd.Issue()
def _DeleteSSHPublicKey(self):
"""Deletes SSH public key used for a VM."""
cmd = util.RackCLICommand(self, 'servers', 'keypair', 'delete')
cmd.flags['name'] = self.name
cmd.Issue()
def _CreateInstance(self):
"""Generates and execute command for creating a Rackspace VM."""
with tempfile.NamedTemporaryFile(dir=vm_util.GetTempDir(),
prefix='user-data') as tf:
with open(self.ssh_public_key) as f:
public_key = f.read().rstrip('\n')
tf.write(CLOUD_CONFIG_TEMPLATE.format(self.user_name, public_key))
tf.flush()
create_cmd = self._GetCreateCommand(tf)
stdout, stderr, _ = create_cmd.Issue()
if stderr:
resp = json.loads(stderr)
raise errors.Error(''.join(
('Non-recoverable error has occurred: %s\n' % str(resp),
'Following command caused the error: %s' % repr(create_cmd),)))
resp = json.loads(stdout)
self.id = resp['ID']
def _GetCreateCommand(self, tf):
"""Generates RackCLI command for creating a Rackspace VM.
Args:
tf: file object containing cloud-config script.
Returns:
RackCLICommand containing RackCLI arguments to build a Rackspace VM.
"""
create_cmd = util.RackCLICommand(self, 'servers', 'instance', 'create')
create_cmd.flags['name'] = self.name
create_cmd.flags['keypair'] = self.name
create_cmd.flags['flavor-id'] = self.machine_type
if FLAGS.rackspace_boot_from_cbs_volume:
blk_flag = RenderBlockDeviceTemplate(self.image, REMOTE_BOOT_DISK_SIZE_GB)
create_cmd.flags['block-device'] = blk_flag
else:
create_cmd.flags['image-id'] = self.image
if FLAGS.rackspace_network_id is not None:
create_cmd.flags['networks'] = ','.join([
rackspace_network.PUBLIC_NET_ID, rackspace_network.SERVICE_NET_ID,
FLAGS.rackspace_network_id])
create_cmd.flags['user-data'] = tf.name
metadata = ['owner=%s' % FLAGS.owner]
for key, value in six.iteritems(self.boot_metadata):
metadata.append('%s=%s' % (key, value))
create_cmd.flags['metadata'] = ','.join(metadata)
return create_cmd
@vm_util.Retry(poll_interval=5, max_retries=720, log_errors=False,
retryable_exceptions=(errors.Resource.RetryableCreationError,))
def _WaitForInstanceUntilActive(self):
"""Waits until instance achieves non-transient state."""
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, stderr, _ = get_cmd.Issue()
if stdout:
instance = json.loads(stdout)
if instance['Status'] == 'ACTIVE':
logging.info('VM: %s is up and running.' % self.name)
return
elif instance['Status'] == 'ERROR':
logging.error('VM: %s failed to boot.' % self.name)
raise errors.VirtualMachine.VmStateError()
raise errors.Resource.RetryableCreationError(
'VM: %s is not running. Retrying to check status.' % self.name)
def _DeleteInstance(self):
"""Executes delete command for removing a Rackspace VM."""
cmd = util.RackCLICommand(self, 'servers', 'instance', 'delete')
cmd.flags['id'] = self.id
stdout, _, _ = cmd.Issue(suppress_warning=True)
resp = json.loads(stdout)
if 'result' not in resp or 'Deleting' not in resp['result']:
raise errors.Resource.RetryableDeletionError()
@vm_util.Retry(poll_interval=5, max_retries=-1, timeout=300,
log_errors=False,
retryable_exceptions=(errors.Resource.RetryableDeletionError,))
def _WaitForInstanceUntilDeleted(self):
"""Waits until instance has been fully removed, or deleted."""
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, stderr, _ = get_cmd.Issue()
if stderr:
resp = json.loads(stderr)
if 'error' in resp and "couldn't find" in resp['error']:
logging.info('VM: %s has been successfully deleted.' % self.name)
return
instance = json.loads(stdout)
if instance['Status'] == 'ERROR':
logging.error('VM: %s failed to delete.' % self.name)
raise errors.VirtualMachine.VmStateError()
if instance['Status'] == 'DELETED':
logging.info('VM: %s has been successfully deleted.' % self.name)
else:
raise errors.Resource.RetryableDeletionError(
'VM: %s has not been deleted. Retrying to check status.' % self.name)
def AddMetadata(self, **kwargs):
"""Adds metadata to the VM via RackCLI update-metadata command."""
if not kwargs:
return
cmd = util.RackCLICommand(self, 'servers', 'instance', 'update-metadata')
cmd.flags['id'] = self.id
cmd.flags['metadata'] = ','.join('{0}={1}'.format(key, value)
for key, value in six.iteritems(kwargs))
cmd.Issue()
def OnStartup(self):
"""Executes commands on the VM immediately after it has booted."""
super(RackspaceVirtualMachine, self).OnStartup()
self.boot_device = self._GetBootDevice()
def CreateScratchDisk(self, disk_spec):
"""Creates a VM's scratch disk that will be used for a benchmark.
Given a data_disk_type it will either create a corresponding Disk object,
or raise an error that such data disk type is not supported.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
Raises:
errors.Error indicating that the requested 'data_disk_type' is
not supported.
"""
if disk_spec.disk_type == rackspace_disk.BOOT: # Ignore num_striped_disks
self._AllocateBootDisk(disk_spec)
elif disk_spec.disk_type == rackspace_disk.LOCAL:
self._AllocateLocalDisks(disk_spec)
elif disk_spec.disk_type in rackspace_disk.REMOTE_TYPES:
self._AllocateRemoteDisks(disk_spec)
else:
raise errors.Error('Unsupported data disk type: %s' % disk_spec.disk_type)
def _AllocateBootDisk(self, disk_spec):
"""Allocate the VM's boot, or system, disk as the scratch disk.
Boot disk can only be allocated once. If multiple data disks are required
it will raise an error.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
Raises:
errors.Error when boot disk has already been allocated as a data disk.
"""
if self.boot_disk_allocated:
raise errors.Error('Only one boot disk can be created per VM')
device_path = '/dev/%s' % self.boot_device['name']
scratch_disk = rackspace_disk.RackspaceBootDisk(
disk_spec, self.zone, self.project, device_path, self.image)
self.boot_disk_allocated = True
self.scratch_disks.append(scratch_disk)
scratch_disk.Create()
path = disk_spec.mount_point
mk_cmd = 'sudo mkdir -p {0}; sudo chown -R $USER:$USER {0};'.format(path)
self.RemoteCommand(mk_cmd)
def _AllocateLocalDisks(self, disk_spec):
"""Allocate the VM's local disks (included with the VM), as a data disk(s).
A local disk can only be allocated once per data disk.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
block_devices = self._GetBlockDevices()
free_blk_devices = self._GetFreeBlockDevices(block_devices, disk_spec)
disks = []
for i in range(disk_spec.num_striped_disks):
local_device = free_blk_devices[i]
disk_name = '%s-local-disk-%d' % (self.name, i)
device_path = '/dev/%s' % local_device['name']
local_disk = rackspace_disk.RackspaceLocalDisk(
disk_spec, disk_name, self.zone, self.project, device_path)
self.allocated_disks.add(local_disk)
disks.append(local_disk)
self._CreateScratchDiskFromDisks(disk_spec, disks)
def _AllocateRemoteDisks(self, disk_spec):
"""Creates and allocates Rackspace Cloud Block Storage volumes as
as data disks.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
scratch_disks = []
for disk_num in range(disk_spec.num_striped_disks):
volume_name = '%s-volume-%d' % (self.name, disk_num)
scratch_disk = rackspace_disk.RackspaceRemoteDisk(
disk_spec, volume_name, self.zone, self.project,
media=disk_spec.disk_type)
scratch_disks.append(scratch_disk)
self._CreateScratchDiskFromDisks(disk_spec, scratch_disks)
def _GetFreeBlockDevices(self, block_devices, disk_spec):
"""Returns available block devices that are not in used as data disk or as
a boot disk.
Args:
block_devices: list of dict containing information about all block devices
in the VM.
disk_spec: virtual_machine.BaseDiskSpec of the disk.
Returns:
list of dicts of only block devices that are not being used.
Raises:
errors.Error Whenever there are no available block devices.
"""
free_blk_devices = []
for dev in block_devices:
if self._IsDiskAvailable(dev):
free_blk_devices.append(dev)
if not free_blk_devices:
raise errors.Error(
''.join(('Machine type %s does not include' % self.machine_type,
' local disks. Please use a different disk_type,',
' or a machine_type that provides local disks.')))
elif len(free_blk_devices) < disk_spec.num_striped_disks:
raise errors.Error('Not enough local data disks. '
'Requesting %d disk(s) but only %d available.'
% (disk_spec.num_striped_disks, len(free_blk_devices)))
return free_blk_devices
def _GetBlockDevices(self):
"""Execute command on VM to gather all block devices in the VM.
Returns:
list of dicts block devices in the VM.
"""
stdout, _ = self.RemoteCommand(
'sudo lsblk -o NAME,MODEL,SIZE,TYPE,MOUNTPOINT,LABEL -n -b -P')
lines = stdout.splitlines()
groups = [LSBLK_PATTERN.match(line) for line in lines]
tuples = [g.groups() for g in groups if g]
colnames = ('name', 'model', 'size_bytes', 'type', 'mountpoint', 'label',)
blk_devices = [dict(list(zip(colnames, t))) for t in tuples]
for d in blk_devices:
d['model'] = d['model'].rstrip()
d['label'] = d['label'].rstrip()
d['size_bytes'] = int(d['size_bytes'])
return blk_devices
def _GetBootDevice(self):
"""Returns backing block device where '/' is mounted on.
Returns:
dict blk device data
Raises:
errors.Error indicates that could not find block device with '/'.
"""
blk_devices = self._GetBlockDevices()
boot_blk_device = None
for dev in blk_devices:
if dev['mountpoint'] == '/':
boot_blk_device = dev
break
if boot_blk_device is None: # Unlikely
raise errors.Error('Could not find disk with "/" root mount point.')
if boot_blk_device['type'] != 'part':
return boot_blk_device
return self._FindBootBlockDevice(blk_devices, boot_blk_device)
def _FindBootBlockDevice(self, blk_devices, boot_blk_device):
"""Helper method to search for backing block device of a partition."""
blk_device_name = boot_blk_device['name'].rstrip('0123456789')
for dev in blk_devices:
if dev['type'] == 'disk' and dev['name'] == blk_device_name:
boot_blk_device = dev
return boot_blk_device
def _IsDiskAvailable(self, blk_device):
"""Returns True if a block device is available.
An available disk, is a disk that has not been allocated previously as
a data disk, or is not being used as boot disk.
"""
return (blk_device['type'] != 'part' and
blk_device['name'] != self.boot_device['name'] and
'config' not in blk_device['label'] and
blk_device['name'] not in self.allocated_disks)
class DebianBasedRackspaceVirtualMachine(RackspaceVirtualMachine,
linux_virtual_machine.DebianMixin):
DEFAULT_IMAGE = UBUNTU_IMAGE
class RhelBasedRackspaceVirtualMachine(RackspaceVirtualMachine,
linux_virtual_machine.RhelMixin):
DEFAULT_IMAGE = RHEL_IMAGE
| 37.029795 | 80 | 0.692582 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import json
import logging
import re
import tempfile
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker import providers
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers.rackspace import rackspace_disk
from perfkitbenchmarker.providers.rackspace import rackspace_network
from perfkitbenchmarker.providers.rackspace import util
import six
from six.moves import range
from six.moves import zip
FLAGS = flags.FLAGS
CLOUD_CONFIG_TEMPLATE = '''#cloud-config
users:
- name: {0}
ssh-authorized-keys:
- {1}
sudo: ['ALL=(ALL) NOPASSWD:ALL']
groups: sudo
shell: /bin/bash
'''
BLOCK_DEVICE_TEMPLATE = '''
source-type=image,
source-id={0},
dest=volume,
size={1},
shutdown=remove,
bootindex=0
'''
LSBLK_REGEX = (r'NAME="(.*)"\s+MODEL="(.*)"\s+SIZE="(.*)"'
r'\s+TYPE="(.*)"\s+MOUNTPOINT="(.*)"\s+LABEL="(.*)"')
LSBLK_PATTERN = re.compile(LSBLK_REGEX)
UBUNTU_IMAGE = '09de0a66-3156-48b4-90a5-1cf25a905207'
RHEL_IMAGE = '92f8a8b8-6019-4c27-949b-cf9910b84ffb'
INSTANCE_EXISTS_STATUSES = frozenset(
['BUILD', 'ACTIVE', 'PAUSED', 'SHUTOFF', 'ERROR'])
INSTANCE_DELETED_STATUSES = frozenset(
['DELETED'])
INSTANCE_KNOWN_STATUSES = INSTANCE_EXISTS_STATUSES | INSTANCE_DELETED_STATUSES
REMOTE_BOOT_DISK_SIZE_GB = 50
def RenderBlockDeviceTemplate(image, volume_size):
blk_params = BLOCK_DEVICE_TEMPLATE.replace('\n', '').format(
image, str(volume_size))
return blk_params
class RackspaceVmSpec(virtual_machine.BaseVmSpec):
CLOUD = providers.RACKSPACE
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
super(RackspaceVmSpec, cls)._ApplyFlags(config_values, flag_values)
if flag_values['project'].present:
config_values['project'] = flag_values.project
if flag_values['rackspace_region'].present:
config_values['rackspace_region'] = flag_values.rackspace_region
if flag_values['rack_profile'].present:
config_values['rack_profile'] = flag_values.rack_profile
@classmethod
def _GetOptionDecoderConstructions(cls):
result = super(RackspaceVmSpec, cls)._GetOptionDecoderConstructions()
result.update({
'project': (option_decoders.StringDecoder, {'default': None}),
'rackspace_region': (option_decoders.StringDecoder, {'default': 'IAD'}),
'rack_profile': (option_decoders.StringDecoder, {'default': None})})
return result
class RackspaceVirtualMachine(virtual_machine.BaseVirtualMachine):
CLOUD = providers.RACKSPACE
DEFAULT_IMAGE = None
def __init__(self, vm_spec):
super(RackspaceVirtualMachine, self).__init__(vm_spec)
self.boot_metadata = {}
self.boot_device = None
self.boot_disk_allocated = False
self.allocated_disks = set()
self.id = None
self.image = self.image or self.DEFAULT_IMAGE
self.region = vm_spec.rackspace_region
self.project = vm_spec.project
self.profile = vm_spec.rack_profile
self.firewall = rackspace_network.RackspaceFirewall.GetFirewall()
def _CreateDependencies(self):
# TODO(meteorfox) Create security group (if applies)
self._UploadSSHPublicKey()
def _Create(self):
self._CreateInstance()
self._WaitForInstanceUntilActive()
@vm_util.Retry()
def _PostCreate(self):
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, _, _ = get_cmd.Issue()
resp = json.loads(stdout)
self.internal_ip = resp['PrivateIPv4']
self.ip_address = resp['PublicIPv4']
self.AddMetadata(**self.vm_metadata)
def _Exists(self):
if self.id is None:
return False
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, _, _ = get_cmd.Issue(suppress_warning=True)
try:
resp = json.loads(stdout)
except ValueError:
return False
status = resp['Status']
return status in INSTANCE_EXISTS_STATUSES
def _Delete(self):
if self.id is None:
return
self._DeleteInstance()
self._WaitForInstanceUntilDeleted()
def _DeleteDependencies(self):
# TODO(meteorfox) Delete security group (if applies)
self._DeleteSSHPublicKey()
def _UploadSSHPublicKey(self):
cmd = util.RackCLICommand(self, 'servers', 'keypair', 'upload')
cmd.flags = OrderedDict([
('name', self.name), ('file', self.ssh_public_key)])
cmd.Issue()
def _DeleteSSHPublicKey(self):
cmd = util.RackCLICommand(self, 'servers', 'keypair', 'delete')
cmd.flags['name'] = self.name
cmd.Issue()
def _CreateInstance(self):
with tempfile.NamedTemporaryFile(dir=vm_util.GetTempDir(),
prefix='user-data') as tf:
with open(self.ssh_public_key) as f:
public_key = f.read().rstrip('\n')
tf.write(CLOUD_CONFIG_TEMPLATE.format(self.user_name, public_key))
tf.flush()
create_cmd = self._GetCreateCommand(tf)
stdout, stderr, _ = create_cmd.Issue()
if stderr:
resp = json.loads(stderr)
raise errors.Error(''.join(
('Non-recoverable error has occurred: %s\n' % str(resp),
'Following command caused the error: %s' % repr(create_cmd),)))
resp = json.loads(stdout)
self.id = resp['ID']
def _GetCreateCommand(self, tf):
create_cmd = util.RackCLICommand(self, 'servers', 'instance', 'create')
create_cmd.flags['name'] = self.name
create_cmd.flags['keypair'] = self.name
create_cmd.flags['flavor-id'] = self.machine_type
if FLAGS.rackspace_boot_from_cbs_volume:
blk_flag = RenderBlockDeviceTemplate(self.image, REMOTE_BOOT_DISK_SIZE_GB)
create_cmd.flags['block-device'] = blk_flag
else:
create_cmd.flags['image-id'] = self.image
if FLAGS.rackspace_network_id is not None:
create_cmd.flags['networks'] = ','.join([
rackspace_network.PUBLIC_NET_ID, rackspace_network.SERVICE_NET_ID,
FLAGS.rackspace_network_id])
create_cmd.flags['user-data'] = tf.name
metadata = ['owner=%s' % FLAGS.owner]
for key, value in six.iteritems(self.boot_metadata):
metadata.append('%s=%s' % (key, value))
create_cmd.flags['metadata'] = ','.join(metadata)
return create_cmd
@vm_util.Retry(poll_interval=5, max_retries=720, log_errors=False,
retryable_exceptions=(errors.Resource.RetryableCreationError,))
def _WaitForInstanceUntilActive(self):
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, stderr, _ = get_cmd.Issue()
if stdout:
instance = json.loads(stdout)
if instance['Status'] == 'ACTIVE':
logging.info('VM: %s is up and running.' % self.name)
return
elif instance['Status'] == 'ERROR':
logging.error('VM: %s failed to boot.' % self.name)
raise errors.VirtualMachine.VmStateError()
raise errors.Resource.RetryableCreationError(
'VM: %s is not running. Retrying to check status.' % self.name)
def _DeleteInstance(self):
cmd = util.RackCLICommand(self, 'servers', 'instance', 'delete')
cmd.flags['id'] = self.id
stdout, _, _ = cmd.Issue(suppress_warning=True)
resp = json.loads(stdout)
if 'result' not in resp or 'Deleting' not in resp['result']:
raise errors.Resource.RetryableDeletionError()
@vm_util.Retry(poll_interval=5, max_retries=-1, timeout=300,
log_errors=False,
retryable_exceptions=(errors.Resource.RetryableDeletionError,))
def _WaitForInstanceUntilDeleted(self):
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, stderr, _ = get_cmd.Issue()
if stderr:
resp = json.loads(stderr)
if 'error' in resp and "couldn't find" in resp['error']:
logging.info('VM: %s has been successfully deleted.' % self.name)
return
instance = json.loads(stdout)
if instance['Status'] == 'ERROR':
logging.error('VM: %s failed to delete.' % self.name)
raise errors.VirtualMachine.VmStateError()
if instance['Status'] == 'DELETED':
logging.info('VM: %s has been successfully deleted.' % self.name)
else:
raise errors.Resource.RetryableDeletionError(
'VM: %s has not been deleted. Retrying to check status.' % self.name)
def AddMetadata(self, **kwargs):
if not kwargs:
return
cmd = util.RackCLICommand(self, 'servers', 'instance', 'update-metadata')
cmd.flags['id'] = self.id
cmd.flags['metadata'] = ','.join('{0}={1}'.format(key, value)
for key, value in six.iteritems(kwargs))
cmd.Issue()
def OnStartup(self):
super(RackspaceVirtualMachine, self).OnStartup()
self.boot_device = self._GetBootDevice()
def CreateScratchDisk(self, disk_spec):
if disk_spec.disk_type == rackspace_disk.BOOT:
self._AllocateBootDisk(disk_spec)
elif disk_spec.disk_type == rackspace_disk.LOCAL:
self._AllocateLocalDisks(disk_spec)
elif disk_spec.disk_type in rackspace_disk.REMOTE_TYPES:
self._AllocateRemoteDisks(disk_spec)
else:
raise errors.Error('Unsupported data disk type: %s' % disk_spec.disk_type)
def _AllocateBootDisk(self, disk_spec):
if self.boot_disk_allocated:
raise errors.Error('Only one boot disk can be created per VM')
device_path = '/dev/%s' % self.boot_device['name']
scratch_disk = rackspace_disk.RackspaceBootDisk(
disk_spec, self.zone, self.project, device_path, self.image)
self.boot_disk_allocated = True
self.scratch_disks.append(scratch_disk)
scratch_disk.Create()
path = disk_spec.mount_point
mk_cmd = 'sudo mkdir -p {0}; sudo chown -R $USER:$USER {0};'.format(path)
self.RemoteCommand(mk_cmd)
def _AllocateLocalDisks(self, disk_spec):
block_devices = self._GetBlockDevices()
free_blk_devices = self._GetFreeBlockDevices(block_devices, disk_spec)
disks = []
for i in range(disk_spec.num_striped_disks):
local_device = free_blk_devices[i]
disk_name = '%s-local-disk-%d' % (self.name, i)
device_path = '/dev/%s' % local_device['name']
local_disk = rackspace_disk.RackspaceLocalDisk(
disk_spec, disk_name, self.zone, self.project, device_path)
self.allocated_disks.add(local_disk)
disks.append(local_disk)
self._CreateScratchDiskFromDisks(disk_spec, disks)
def _AllocateRemoteDisks(self, disk_spec):
scratch_disks = []
for disk_num in range(disk_spec.num_striped_disks):
volume_name = '%s-volume-%d' % (self.name, disk_num)
scratch_disk = rackspace_disk.RackspaceRemoteDisk(
disk_spec, volume_name, self.zone, self.project,
media=disk_spec.disk_type)
scratch_disks.append(scratch_disk)
self._CreateScratchDiskFromDisks(disk_spec, scratch_disks)
def _GetFreeBlockDevices(self, block_devices, disk_spec):
free_blk_devices = []
for dev in block_devices:
if self._IsDiskAvailable(dev):
free_blk_devices.append(dev)
if not free_blk_devices:
raise errors.Error(
''.join(('Machine type %s does not include' % self.machine_type,
' local disks. Please use a different disk_type,',
' or a machine_type that provides local disks.')))
elif len(free_blk_devices) < disk_spec.num_striped_disks:
raise errors.Error('Not enough local data disks. '
'Requesting %d disk(s) but only %d available.'
% (disk_spec.num_striped_disks, len(free_blk_devices)))
return free_blk_devices
def _GetBlockDevices(self):
stdout, _ = self.RemoteCommand(
'sudo lsblk -o NAME,MODEL,SIZE,TYPE,MOUNTPOINT,LABEL -n -b -P')
lines = stdout.splitlines()
groups = [LSBLK_PATTERN.match(line) for line in lines]
tuples = [g.groups() for g in groups if g]
colnames = ('name', 'model', 'size_bytes', 'type', 'mountpoint', 'label',)
blk_devices = [dict(list(zip(colnames, t))) for t in tuples]
for d in blk_devices:
d['model'] = d['model'].rstrip()
d['label'] = d['label'].rstrip()
d['size_bytes'] = int(d['size_bytes'])
return blk_devices
def _GetBootDevice(self):
blk_devices = self._GetBlockDevices()
boot_blk_device = None
for dev in blk_devices:
if dev['mountpoint'] == '/':
boot_blk_device = dev
break
if boot_blk_device is None:
raise errors.Error('Could not find disk with "/" root mount point.')
if boot_blk_device['type'] != 'part':
return boot_blk_device
return self._FindBootBlockDevice(blk_devices, boot_blk_device)
def _FindBootBlockDevice(self, blk_devices, boot_blk_device):
blk_device_name = boot_blk_device['name'].rstrip('0123456789')
for dev in blk_devices:
if dev['type'] == 'disk' and dev['name'] == blk_device_name:
boot_blk_device = dev
return boot_blk_device
def _IsDiskAvailable(self, blk_device):
return (blk_device['type'] != 'part' and
blk_device['name'] != self.boot_device['name'] and
'config' not in blk_device['label'] and
blk_device['name'] not in self.allocated_disks)
class DebianBasedRackspaceVirtualMachine(RackspaceVirtualMachine,
linux_virtual_machine.DebianMixin):
DEFAULT_IMAGE = UBUNTU_IMAGE
class RhelBasedRackspaceVirtualMachine(RackspaceVirtualMachine,
linux_virtual_machine.RhelMixin):
DEFAULT_IMAGE = RHEL_IMAGE
| true | true |
f720b4c65b03ffc9b7a16a20c28258f9373c712e | 1,316 | py | Python | app/modules/ssh.py | danielpodwysocki/zoltan | 52536c41e95ca7b641d4e2b740f68c9e00170aee | [
"MIT"
] | null | null | null | app/modules/ssh.py | danielpodwysocki/zoltan | 52536c41e95ca7b641d4e2b740f68c9e00170aee | [
"MIT"
] | null | null | null | app/modules/ssh.py | danielpodwysocki/zoltan | 52536c41e95ca7b641d4e2b740f68c9e00170aee | [
"MIT"
] | null | null | null | import re
import paramiko
class Handler:
'''
A slash command for checking ssh connectivity and rebooting machines.
'''
id = 2
def __init__(self, regexp):
'''
Takes a regexp as an argument, the regexp will then be used to check if the format of the hostname is correct
'''
self.prog = re.compile(regexp)
def command(self, message):
response = "Something went wrong :("
if not message:
response = "Run `/ssh [machine's name]` to see if the machine is reachable"
elif bool(self.prog.match(message)):
response = "Checking `%s`" % message
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
#try connecting to the machine specified by the message
try:
ssh.connect(message, key_filename="/ssh/zoltan", username='zoltan')
response = "The machine is reachable."
ssh.close()
except Exception as e:
print(e)
response = "The machine is not reachable."
else:
response = "The machine's name is not in the correct format. Run `/help ssh` for command examples"
return response
| 32.097561 | 117 | 0.569149 | import re
import paramiko
class Handler:
id = 2
def __init__(self, regexp):
self.prog = re.compile(regexp)
def command(self, message):
response = "Something went wrong :("
if not message:
response = "Run `/ssh [machine's name]` to see if the machine is reachable"
elif bool(self.prog.match(message)):
response = "Checking `%s`" % message
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
#try connecting to the machine specified by the message
try:
ssh.connect(message, key_filename="/ssh/zoltan", username='zoltan')
response = "The machine is reachable."
ssh.close()
except Exception as e:
print(e)
response = "The machine is not reachable."
else:
response = "The machine's name is not in the correct format. Run `/help ssh` for command examples"
return response
| true | true |
f720b5b8dd20b02542407ce32d85af6fe11ca20b | 29,285 | py | Python | pyqstrat/account.py | alexanu/pyqstrat | ec62a1a7b048df05e8d1058a37bfe2cf113d2815 | [
"BSD-3-Clause"
] | null | null | null | pyqstrat/account.py | alexanu/pyqstrat | ec62a1a7b048df05e8d1058a37bfe2cf113d2815 | [
"BSD-3-Clause"
] | null | null | null | pyqstrat/account.py | alexanu/pyqstrat | ec62a1a7b048df05e8d1058a37bfe2cf113d2815 | [
"BSD-3-Clause"
] | null | null | null | from collections import defaultdict
from sortedcontainers import SortedDict
import math
import pandas as pd
import numpy as np
from pyqstrat.pq_types import ContractGroup, Trade, Contract
from types import SimpleNamespace
from typing import Sequence, Any, Tuple, Callable, Union, MutableSet, MutableSequence, MutableMapping, List
def calc_trade_pnl(open_qtys: np.ndarray,
open_prices: np.ndarray,
new_qtys: np.ndarray,
new_prices: np.ndarray,
multiplier: float) -> Tuple[np.ndarray, np.ndarray, float, float, float]:
'''
>>> print(calc_trade_pnl(
... open_qtys = np.array([], dtype = np.float), open_prices = np.array([], dtype = np.float),
... new_qtys = np.array([-8, 9, -4]), new_prices = np.array([10, 11, 6]), multiplier = 100))
(array([-3.]), array([6.]), -3.0, 6.0, -1300.0)
>>> print(calc_trade_pnl(open_qtys = np.array([], dtype = np.float), open_prices = np.array([], dtype = np.float), new_qtys = np.array([3, 10, -5]),
... new_prices = np.array([51, 50, 45]), multiplier = 100))
(array([8.]), array([50.]), 8.0, 50.0, -2800.0)
>>> print(calc_trade_pnl(open_qtys = np.array([]), open_prices = np.array([]),
... new_qtys = np.array([-58, -5, -5, 6, -8, 5, 5, -5, 19, 7, 5, -5, 39]),
... new_prices = np.array([2080, 2075.25, 2070.75, 2076, 2066.75, 2069.25, 2074.75, 2069.75, 2087.25, 2097.25, 2106, 2088.25, 2085.25]),
... multiplier = 50))
(array([], dtype=float64), array([], dtype=float64), 0.0, 0, -33762.5) '''
# TODO: Cythonize this
realized = 0.
new_qtys = new_qtys.copy()
new_prices = new_prices.copy()
_open_prices = np.zeros(len(open_prices) + len(new_prices), dtype=np.float)
_open_prices[:len(open_prices)] = open_prices
_open_qtys = np.zeros(len(open_qtys) + len(new_qtys), dtype=np.float)
_open_qtys[:len(open_qtys)] = open_qtys
new_qty_indices = np.nonzero(new_qtys)[0]
open_qty_indices = np.zeros(len(_open_qtys), dtype=np.int)
nonzero_indices = np.nonzero(_open_qtys)[0]
open_qty_indices[:len(nonzero_indices)] = nonzero_indices
i = 0 # index into new_qty_indices to get idx of the new qty we are currently netting
o = len(nonzero_indices) # virtual length of open_qty_indices
j = 0 # index into open_qty_indices to get idx of the open qty we are currently netting
k = len(open_qtys) # virtual length of _open_qtys
# Try to net all new trades against existing non-netted trades.
# Append any remaining non-netted new trades to end of existing trades
while i < len(new_qty_indices):
# Always try to net first non-zero new trade against first non-zero existing trade
# FIFO acccounting
new_idx = new_qty_indices[i]
new_qty, new_price = new_qtys[new_idx], new_prices[new_idx]
# print(f'i: {i} j: {j} k: {k} o: {o} oq: {_open_qtys} oqi: {open_qty_indices} op: {_open_prices} nq: {new_qtys} np: {new_prices}')
if j < o: # while we still have open positions to net against
open_idx = open_qty_indices[j]
open_qty, open_price = _open_qtys[open_idx], _open_prices[open_idx]
if math.copysign(1, open_qty) == math.copysign(1, new_qty):
# Nothing to net against so add this trade to the array and wait for the next offsetting trade
_open_qtys[k] = new_qty
_open_prices[k] = new_price
open_qty_indices[o] = k
k += 1
o += 1
new_qtys[new_idx] = 0
i += 1
elif abs(new_qty) > abs(open_qty):
# New trade has more qty than offsetting trade so:
# a. net against offsetting trade
# b. remove the offsetting trade
# c. reduce qty of new trade
open_qty, open_price = _open_qtys[open_idx], _open_prices[open_idx]
realized += open_qty * (new_price - open_price)
# print(f'open_qty: {open_qty} open_price: {open_price} open_idx: {open_idx} i: {i}
# j: {j} k: {k} l: {l} oq: {_open_qtys} oqi: {open_qty_indices} op: {_open_prices} nq: {new_qtys} np: {new_prices}')
_open_qtys[open_idx] = 0
j += 1
new_qtys[new_idx] += open_qty
else:
# New trade has less qty than offsetting trade so:
# a. net against offsetting trade
# b. remove new trade
# c. reduce qty of offsetting trade
realized += new_qty * (open_price - new_price)
new_qtys[new_idx] = 0
i += 1
_open_qtys[open_idx] += new_qty
else:
# Nothing to net against so add this trade to the open trades array and wait for the next offsetting trade
_open_qtys[k] = new_qty
_open_prices[k] = new_price
open_qty_indices[o] = k
k += 1
o += 1
new_qtys[new_idx] = 0
i += 1
mask = _open_qtys != 0
_open_qtys = _open_qtys[mask]
_open_prices = _open_prices[mask]
open_qty = np.sum(_open_qtys)
if math.isclose(open_qty, 0):
weighted_avg_price = 0
else:
weighted_avg_price = np.sum(_open_qtys * _open_prices) / open_qty
return _open_qtys, _open_prices, open_qty, weighted_avg_price, realized * multiplier
def leading_nan_to_zero(df: pd.DataFrame, columns: Sequence[str]) -> pd.DataFrame:
for column in columns:
vals = df[column].values
first_non_nan_index = np.ravel(np.nonzero(~np.isnan(vals)))
if len(first_non_nan_index):
first_non_nan_index = first_non_nan_index[0]
else:
first_non_nan_index = -1
if first_non_nan_index > 0 and first_non_nan_index < len(vals):
vals[:first_non_nan_index] = np.nan_to_num(vals[:first_non_nan_index])
df[column] = vals
return df
def find_last_non_nan_index(array: np.ndarray) -> int:
i = np.nonzero(np.isfinite(array))[0]
if len(i): return i[-1]
return 0
def find_index_before(sorted_dict: SortedDict, key: Any) -> int:
'''
Find index of the first key in a sorted dict that is less than or equal to the key passed in.
If the key is less than the first key in the dict, return -1
'''
size = len(sorted_dict)
if not size: return -1
i = sorted_dict.bisect_left(key)
if i == size: return size - 1
if sorted_dict.keys()[i] != key:
return i - 1
return i
class ContractPNL:
'''Computes pnl for a single contract over time given trades and market data'''
def __init__(self,
contract: Contract,
account_timestamps: np.ndarray,
price_function: Callable[[Contract, np.ndarray, int, SimpleNamespace], float],
strategy_context: SimpleNamespace) -> None:
self.contract = contract
self._price_function = price_function
self.strategy_context = strategy_context
self._account_timestamps = account_timestamps
self._trade_pnl = SortedDict()
self._net_pnl = SortedDict()
# Store trades that are not offset so when new trades come in we can offset against these to calc pnl
self.open_qtys = np.empty(0, dtype=np.int)
self.open_prices = np.empty(0, dtype=np.float)
self.first_trade_timestamp = None
self.final_pnl = np.nan
def _add_trades(self, trades: Sequence[Trade]) -> None:
'''
Args:
trades: Must be sorted by timestamp
'''
if not len(trades): return
timestamps = [trade.timestamp for trade in trades]
if len(self._trade_pnl):
k, v = self._trade_pnl.peekitem(0)
if timestamps[0] <= k:
raise Exception(f'Can only add a trade that is newer than last added current: {timestamps[0]} prev max timestamp: {k}')
if self.first_trade_timestamp is None: self.first_trade_timestamp = timestamps[0]
for i, timestamp in enumerate(timestamps):
t_trades = [trade for trade in trades if trade.timestamp == timestamp]
open_qtys, open_prices, open_qty, weighted_avg_price, realized_chg = calc_trade_pnl(
self.open_qtys, self.open_prices,
np.array([trade.qty for trade in t_trades]),
np.array([trade.price for trade in t_trades]),
self.contract.multiplier)
self.open_qtys = open_qtys
self.open_prices = open_prices
position_chg = sum([trade.qty for trade in t_trades])
commission_chg = sum([trade.commission for trade in t_trades])
fee_chg = sum([trade.fee for trade in t_trades])
index = find_index_before(self._trade_pnl, timestamp)
if index == -1:
self._trade_pnl[timestamp] = (position_chg, realized_chg, fee_chg, commission_chg, open_qty, weighted_avg_price)
else:
prev_timestamp, (prev_position, prev_realized, prev_fee, prev_commission, _, _) = self._trade_pnl.peekitem(index)
self._trade_pnl[timestamp] = (prev_position + position_chg, prev_realized + realized_chg,
prev_fee + fee_chg, prev_commission + commission_chg, open_qty, weighted_avg_price)
self.calc_net_pnl(timestamp)
def calc_net_pnl(self, timestamp: np.datetime64) -> None:
if timestamp in self._net_pnl: return
if timestamp < self.first_trade_timestamp: return
# TODO: Option expiry should be a special case. If option expires at 3:00 pm, we put in an expiry order at 3 pm and the
# trade comes in at 3:01 pm. In this case, the final pnl is recorded at 3:01 but should be at 3 pm.
if self.contract.expiry is not None and timestamp > self.contract.expiry and not math.isnan(self.final_pnl): return
i = np.searchsorted(self._account_timestamps, timestamp)
assert(self._account_timestamps[i] == timestamp)
# Find the index before or equal to current timestamp. If not found, set to 0's
trade_pnl_index = find_index_before(self._trade_pnl, timestamp)
if trade_pnl_index == -1:
realized, fee, commission, open_qty, open_qty, weighted_avg_price = 0, 0, 0, 0, 0, 0
else:
_, (_, realized, fee, commission, open_qty, weighted_avg_price) = self._trade_pnl.peekitem(trade_pnl_index)
price = np.nan
if math.isclose(open_qty, 0):
unrealized = 0
else:
price = self._price_function(self.contract, self._account_timestamps, i, self.strategy_context)
assert np.isreal(price), \
f'Unexpected price type: {price} {type(price)} for contract: {self.contract} timestamp: {self._account_timestamps[i]}'
if math.isnan(price):
index = find_index_before(self._net_pnl, timestamp) # Last index we computed net pnl for
if index == -1:
prev_unrealized = 0
else:
_, (_, prev_unrealized, _) = self._net_pnl.peekitem(index)
unrealized = prev_unrealized
else:
unrealized = open_qty * (price - weighted_avg_price) * self.contract.multiplier
net_pnl = realized + unrealized - commission - fee
self._net_pnl[timestamp] = (price, unrealized, net_pnl)
if self.contract.expiry is not None and timestamp > self.contract.expiry:
self.final_pnl = net_pnl
def position(self, timestamp: np.datetime64) -> float:
index = find_index_before(self._trade_pnl, timestamp)
if index == -1: return 0.
_, (position, _, _, _, _, _) = self._trade_pnl.peekitem(index) # Less than or equal to timestamp
return position
def net_pnl(self, timestamp: np.datetime64) -> float:
if self.contract.expiry is not None and timestamp > self.contract.expiry and not math.isnan(self.final_pnl):
return self.final_pnl
index = find_index_before(self._net_pnl, timestamp)
if index == -1: return 0.
_, (_, _, net_pnl) = self._net_pnl.peekitem(index) # Less than or equal to timestamp
return net_pnl
def pnl(self, timestamp: np.datetime64) -> Tuple[float, float, float, float, float, float, float]:
index = find_index_before(self._trade_pnl, timestamp)
position, realized, fee, commission, price, unrealized, net_pnl = 0, 0, 0, 0, 0, 0, 0
if index != -1:
_, (position, realized, fee, commission, _, _) = self._trade_pnl.peekitem(index) # Less than or equal to timestamp
index = find_index_before(self._net_pnl, timestamp)
if index != -1:
_, (price, unrealized, net_pnl) = self._net_pnl.peekitem(index) # Less than or equal to timestamp
return position, price, realized, unrealized, fee, commission, net_pnl
def df(self) -> pd.DataFrame:
'''Returns a pandas dataframe with pnl data'''
df_trade_pnl = pd.DataFrame.from_records([
(k, v[0], v[1], v[2], v[3]) for k, v in self._trade_pnl.items()],
columns=['timestamp', 'position', 'realized', 'fee', 'commission'])
df_net_pnl = pd.DataFrame.from_records([
(k, v[0], v[1], v[2]) for k, v in self._net_pnl.items()],
columns=['timestamp', 'price', 'unrealized', 'net_pnl'])
all_timestamps = np.unique(np.concatenate((df_trade_pnl.timestamp.values, df_net_pnl.timestamp.values)))
df_trade_pnl = df_trade_pnl.set_index('timestamp').reindex(all_timestamps, method='ffill').reset_index()
df_trade_pnl = leading_nan_to_zero(df_trade_pnl, ['position', 'realized', 'fee', 'commission'])
df_net_pnl = df_net_pnl.set_index('timestamp').reindex(all_timestamps, method='ffill').reset_index()
del df_net_pnl['timestamp']
df = pd.concat([df_trade_pnl, df_net_pnl], axis=1)
df['symbol'] = self.contract.symbol
df = df[['symbol', 'timestamp', 'position', 'price', 'unrealized', 'realized', 'commission', 'fee', 'net_pnl']]
return df
def _get_calc_timestamps(timestamps: np.ndarray, pnl_calc_time: int) -> np.ndarray:
time_delta = np.timedelta64(pnl_calc_time, 'm')
calc_timestamps = np.unique(timestamps.astype('M8[D]')) + time_delta
calc_indices = np.searchsorted(timestamps, calc_timestamps, side='left') - 1
if calc_indices[0] == -1: calc_indices[0] = 0
return np.unique(timestamps[calc_indices])
class Account:
'''An Account calculates pnl for a set of contracts'''
def __init__(self,
contract_groups: Sequence[ContractGroup],
timestamps: np.ndarray,
price_function: Callable[[Contract, np.ndarray, int, SimpleNamespace], float],
strategy_context: SimpleNamespace,
starting_equity: float = 1.0e6,
pnl_calc_time: int = 15 * 60) -> None:
'''
Args:
contract_groups: Contract groups that we want to compute PNL for
timestamps: Timestamps that we might compute PNL at
price_function: Function that returns contract prices used to compute pnl
strategy_context: This is passed into the price function so we can use current state of strategy to compute prices
starting_equity: Starting equity in account currency. Default 1.e6
pnl_calc_time: Number of minutes past midnight that we should calculate PNL at. Default 15 * 60, i.e. 3 pm
'''
self.starting_equity = starting_equity
self._price_function = price_function
self.strategy_context = strategy_context
self.timestamps = timestamps
self.calc_timestamps = _get_calc_timestamps(timestamps, pnl_calc_time)
self.contracts: MutableSet[Contract] = set()
self._trades: MutableSequence[Trade] = []
self._pnl = SortedDict()
self.symbol_pnls_by_contract_group: MutableMapping[str, MutableSequence[ContractPNL]] = defaultdict(list)
self.symbol_pnls: MutableMapping[str, ContractPNL] = {}
def symbols(self) -> MutableSequence[str]:
return [contract.symbol for contract in self.contracts]
def _add_contract(self, contract: Contract, timestamp: np.datetime64) -> None:
if contract.symbol in self.symbol_pnls:
raise Exception(f'Already have contract with symbol: {contract.symbol} {contract}')
contract_pnl = ContractPNL(contract, self.timestamps, self._price_function, self.strategy_context)
self.symbol_pnls[contract.symbol] = contract_pnl
# For fast lookup in position function
self.symbol_pnls_by_contract_group[contract.contract_group.name].append(contract_pnl)
self.contracts.add(contract)
def add_trades(self, trades: Sequence[Trade]) -> None:
trades = sorted(trades, key=lambda x: getattr(x, 'timestamp'))
# Break up trades by contract so we can add them in a batch
trades_by_contract: MutableMapping[Contract, List[Trade]] = defaultdict(list)
for trade in trades:
contract = trade.contract
if contract not in self.contracts: self._add_contract(contract, trade.timestamp)
trades_by_contract[contract].append(trade)
for contract, contract_trades in trades_by_contract.items():
contract_trades.sort(key=lambda x: x.timestamp)
self.symbol_pnls[contract.symbol]._add_trades(contract_trades)
self._trades += trades
def calc(self, timestamp: np.datetime64) -> None:
'''
Computes P&L and stores it internally for all contracts.
Args:
timestamp: timestamp to compute P&L at. Account remembers the last timestamp it computed P&L up to and will compute P&L
between these and including timestamp. If there is more than one day between the last index and current index, we will
include pnl for at the defined pnl_calc_time for those dates as well.
'''
if timestamp in self._pnl: return
prev_idx = find_index_before(self._pnl, timestamp)
prev_timestamp = None if prev_idx == -1 else self.timestamps[prev_idx]
# Find the last timestamp per day that is between the previous index we computed and the current index,
# so we can compute daily pnl in addition to the current index pnl
calc_timestamps = self.calc_timestamps
intermediate_calc_timestamps = calc_timestamps[calc_timestamps <= timestamp]
if prev_timestamp is not None:
intermediate_calc_timestamps = intermediate_calc_timestamps[intermediate_calc_timestamps > prev_timestamp]
if not len(intermediate_calc_timestamps) or intermediate_calc_timestamps[-1] != timestamp:
intermediate_calc_timestamps = np.append(intermediate_calc_timestamps, timestamp)
for ts in intermediate_calc_timestamps:
net_pnl = 0.
for symbol_pnl in self.symbol_pnls.values():
symbol_pnl.calc_net_pnl(ts)
net_pnl += symbol_pnl.net_pnl(ts)
self._pnl[ts] = net_pnl
def position(self, contract_group: ContractGroup, timestamp: np.datetime64) -> float:
'''Returns netted position for a contract_group at a given date in number of contracts or shares.'''
position = 0.
for symbol_pnl in self.symbol_pnls_by_contract_group[contract_group.name]:
position += symbol_pnl.position(timestamp)
return position
def positions(self, contract_group: ContractGroup, timestamp: np.datetime64) -> MutableSequence[Tuple[Contract, float]]:
'''
Returns all non-zero positions in a contract group
'''
positions = []
for contract in contract_group.contracts:
symbol = contract.symbol
if symbol not in self.symbol_pnls: continue
position = self.symbol_pnls[symbol].position(timestamp)
if not math.isclose(position, 0): positions.append((contract, position))
return positions
def equity(self, timestamp: np.datetime64) -> float:
'''Returns equity in this account in Account currency. Will cause calculation if Account has not previously
calculated up to this date'''
pnl = self._pnl.get(timestamp)
if pnl is None:
self.calc(timestamp)
pnl = self._pnl[timestamp]
return self.starting_equity + pnl
def trades(self,
contract_group: ContractGroup = None,
start_date: np.datetime64 = None,
end_date: np.datetime64 = None) -> MutableSequence[Trade]:
'''Returns a list of trades with the given symbol and with trade date between (and including) start date
and end date if they are specified. If symbol is None trades for all symbols are returned'''
# start_date, end_date = str2date(start_date), str2date(end_date)
return [trade for trade in self._trades if (start_date is None or trade.timestamp >= start_date) and (
end_date is None or trade.timestamp <= end_date) and (
contract_group is None or trade.contract.contract_group == contract_group)]
def df_pnl(self, contract_groups: Union[ContractGroup, Sequence[ContractGroup]] = None) -> pd.DataFrame:
'''
Returns a dataframe with P&L columns broken down by contract group and symbol
Args:
contract_group: Return PNL for this contract group. If None (default), include all contract groups
'''
if contract_groups is None:
contract_groups = list(set([contract.contract_group for contract in self.contracts]))
if isinstance(contract_groups, ContractGroup): contract_groups = [contract_groups]
dfs = []
for contract_group in contract_groups:
for contract in contract_group.contracts:
symbol = contract.symbol
if symbol not in self.symbol_pnls: continue
df = self.symbol_pnls[symbol].df()
if len(df) > 1:
net_pnl_diff = np.diff(df.net_pnl.values) # np.diff returns a vector one shorter than the original
last_index = np.nonzero(net_pnl_diff)
if len(last_index[0]):
last_index = last_index[0][-1] + 1
df = df.iloc[:last_index + 1]
df['contract_group'] = contract_group.name
dfs.append(df)
ret_df = pd.concat(dfs)
ret_df = ret_df.sort_values(by=['timestamp', 'contract_group', 'symbol'])
ret_df = ret_df[['timestamp', 'contract_group', 'symbol', 'position', 'price', 'unrealized', 'realized',
'commission', 'fee', 'net_pnl']]
return ret_df
def df_account_pnl(self, contract_group: ContractGroup = None) -> pd.DataFrame:
'''
Returns PNL at the account level.
Args:
contract_group: If set, we only return pnl for this contract_group. Otherwise we return pnl for all contract groups
'''
if contract_group is not None:
symbols = [contract.symbol for contract in contract_group.contracts if contract.symbol in self.symbol_pnls]
symbol_pnls = [self.symbol_pnls[symbol] for symbol in symbols]
else:
symbol_pnls = list(self.symbol_pnls.values())
timestamps = self.calc_timestamps
position = np.full(len(timestamps), 0., dtype=np.float)
realized = np.full(len(timestamps), 0., dtype=np.float)
unrealized = np.full(len(timestamps), 0., dtype=np.float)
fee = np.full(len(timestamps), 0., dtype=np.float)
commission = np.full(len(timestamps), 0., dtype=np.float)
net_pnl = np.full(len(timestamps), 0., dtype=np.float)
for i, timestamp in enumerate(timestamps):
for symbol_pnl in symbol_pnls:
_position, _price, _realized, _unrealized, _fee, _commission, _net_pnl = symbol_pnl.pnl(timestamp)
if math.isfinite(_position): position[i] += _position
if math.isfinite(_realized): realized[i] += _realized
if math.isfinite(_unrealized): unrealized[i] += _unrealized
if math.isfinite(_fee): fee[i] += _fee
if math.isfinite(_commission): commission[i] += _commission
if math.isfinite(_net_pnl): net_pnl[i] += _net_pnl
df = pd.DataFrame.from_records(zip(timestamps, position, unrealized, realized, commission, fee, net_pnl),
columns=['timestamp', 'position', 'unrealized', 'realized', 'commission', 'fee', 'net_pnl'])
df['equity'] = self.starting_equity + df.net_pnl
return df[['timestamp', 'position', 'unrealized', 'realized', 'commission', 'fee', 'net_pnl', 'equity']]
def df_trades(self,
contract_group: ContractGroup = None,
start_date: np.datetime64 = None,
end_date: np.datetime64 = None) -> pd.DataFrame:
'''
Returns a dataframe of trades
Args:
contract_group: Return trades for this contract group. If None (default), include all contract groups
start_date: Include trades with date greater than or equal to this timestamp.
end_date: Include trades with date less than or equal to this timestamp.
'''
# start_date, end_date = str2date(start_date), str2date(end_date)
trades = self.trades(contract_group, start_date, end_date)
df = pd.DataFrame.from_records([(
trade.contract.symbol,
trade.timestamp,
trade.qty,
trade.price,
trade.fee,
trade.commission,
trade.order.timestamp,
trade.order.qty,
trade.order.reason_code,
(str(trade.order.properties.__dict__) if trade.order.properties.__dict__ else ''),
(str(trade.contract.properties.__dict__) if trade.contract.properties.__dict__ else '')) for trade in trades],
columns=['symbol', 'timestamp', 'qty', 'price', 'fee', 'commission', 'order_date', 'order_qty',
'reason_code', 'order_props', 'contract_props'])
df = df.sort_values(by=['timestamp', 'symbol'])
return df
def test_account():
from pyqstrat.pq_types import MarketOrder
def get_close_price(contract, timestamps, idx, strategy_context):
if contract.symbol == "IBM":
price = idx + 10.1
elif contract.symbol == "MSFT":
price = idx + 15.3
else:
raise Exception(f'unknown contract: {contract}')
return price
ContractGroup.clear()
Contract.clear()
ibm_cg = ContractGroup.create('IBM')
msft_cg = ContractGroup.create('MSFT')
ibm_contract = Contract.create('IBM', contract_group=ibm_cg)
msft_contract = Contract.create('MSFT', contract_group=msft_cg)
timestamps = np.array(['2018-01-01 09:00', '2018-01-02 08:00', '2018-01-02 09:00', '2018-01-05 13:35'], dtype='M8[m]')
account = Account([ibm_cg, msft_cg], timestamps, get_close_price, None)
# account = Account([Contract(symbol)], timestamps, get_close_price)
trade_1 = Trade(ibm_contract, MarketOrder(ibm_contract, np.datetime64('2018-01-01 09:00'), 10),
np.datetime64('2018-01-02 08:00'), 10, 10.1, commission=0.01)
trade_2 = Trade(ibm_contract, MarketOrder(ibm_contract, np.datetime64('2018-01-01 09:00'), -20),
np.datetime64('2018-01-02 09:00'), -20, 15.1, commission=0.02)
trade_3 = Trade(msft_contract, MarketOrder(msft_contract, timestamps[1], 15), timestamps[1], 20, 13.2, commission=0.04)
trade_4 = Trade(msft_contract, MarketOrder(msft_contract, timestamps[2], 20), timestamps[2], 20, 16.2, commission=0.05)
account.add_trades([trade_1, trade_2, trade_3, trade_4])
account.calc(np.datetime64('2018-01-05 13:35'))
assert(len(account.df_trades()) == 4)
assert(len(account.df_pnl()) == 6)
assert(np.allclose(np.array([9.99, 61.96, 79.97, 103.91, 69.97, 143.91]), account.df_pnl().net_pnl.values, rtol=0))
assert(np.allclose(np.array([10, 20, -10, 40, -10, 40]), account.df_pnl().position.values, rtol=0))
assert(np.allclose(np.array([1000000., 1000183.88, 1000213.88]), account.df_account_pnl().equity.values, rtol=0))
if __name__ == "__main__":
test_account()
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
| 50.753899 | 155 | 0.625337 | from collections import defaultdict
from sortedcontainers import SortedDict
import math
import pandas as pd
import numpy as np
from pyqstrat.pq_types import ContractGroup, Trade, Contract
from types import SimpleNamespace
from typing import Sequence, Any, Tuple, Callable, Union, MutableSet, MutableSequence, MutableMapping, List
def calc_trade_pnl(open_qtys: np.ndarray,
open_prices: np.ndarray,
new_qtys: np.ndarray,
new_prices: np.ndarray,
multiplier: float) -> Tuple[np.ndarray, np.ndarray, float, float, float]:
realized = 0.
new_qtys = new_qtys.copy()
new_prices = new_prices.copy()
_open_prices = np.zeros(len(open_prices) + len(new_prices), dtype=np.float)
_open_prices[:len(open_prices)] = open_prices
_open_qtys = np.zeros(len(open_qtys) + len(new_qtys), dtype=np.float)
_open_qtys[:len(open_qtys)] = open_qtys
new_qty_indices = np.nonzero(new_qtys)[0]
open_qty_indices = np.zeros(len(_open_qtys), dtype=np.int)
nonzero_indices = np.nonzero(_open_qtys)[0]
open_qty_indices[:len(nonzero_indices)] = nonzero_indices
i = 0
o = len(nonzero_indices)
j = 0
k = len(open_qtys)
while i < len(new_qty_indices):
new_idx = new_qty_indices[i]
new_qty, new_price = new_qtys[new_idx], new_prices[new_idx]
if j < o:
open_idx = open_qty_indices[j]
open_qty, open_price = _open_qtys[open_idx], _open_prices[open_idx]
if math.copysign(1, open_qty) == math.copysign(1, new_qty):
_open_qtys[k] = new_qty
_open_prices[k] = new_price
open_qty_indices[o] = k
k += 1
o += 1
new_qtys[new_idx] = 0
i += 1
elif abs(new_qty) > abs(open_qty):
open_qty, open_price = _open_qtys[open_idx], _open_prices[open_idx]
realized += open_qty * (new_price - open_price)
# j: {j} k: {k} l: {l} oq: {_open_qtys} oqi: {open_qty_indices} op: {_open_prices} nq: {new_qtys} np: {new_prices}')
_open_qtys[open_idx] = 0
j += 1
new_qtys[new_idx] += open_qty
else:
realized += new_qty * (open_price - new_price)
new_qtys[new_idx] = 0
i += 1
_open_qtys[open_idx] += new_qty
else:
_open_qtys[k] = new_qty
_open_prices[k] = new_price
open_qty_indices[o] = k
k += 1
o += 1
new_qtys[new_idx] = 0
i += 1
mask = _open_qtys != 0
_open_qtys = _open_qtys[mask]
_open_prices = _open_prices[mask]
open_qty = np.sum(_open_qtys)
if math.isclose(open_qty, 0):
weighted_avg_price = 0
else:
weighted_avg_price = np.sum(_open_qtys * _open_prices) / open_qty
return _open_qtys, _open_prices, open_qty, weighted_avg_price, realized * multiplier
def leading_nan_to_zero(df: pd.DataFrame, columns: Sequence[str]) -> pd.DataFrame:
for column in columns:
vals = df[column].values
first_non_nan_index = np.ravel(np.nonzero(~np.isnan(vals)))
if len(first_non_nan_index):
first_non_nan_index = first_non_nan_index[0]
else:
first_non_nan_index = -1
if first_non_nan_index > 0 and first_non_nan_index < len(vals):
vals[:first_non_nan_index] = np.nan_to_num(vals[:first_non_nan_index])
df[column] = vals
return df
def find_last_non_nan_index(array: np.ndarray) -> int:
i = np.nonzero(np.isfinite(array))[0]
if len(i): return i[-1]
return 0
def find_index_before(sorted_dict: SortedDict, key: Any) -> int:
size = len(sorted_dict)
if not size: return -1
i = sorted_dict.bisect_left(key)
if i == size: return size - 1
if sorted_dict.keys()[i] != key:
return i - 1
return i
class ContractPNL:
def __init__(self,
contract: Contract,
account_timestamps: np.ndarray,
price_function: Callable[[Contract, np.ndarray, int, SimpleNamespace], float],
strategy_context: SimpleNamespace) -> None:
self.contract = contract
self._price_function = price_function
self.strategy_context = strategy_context
self._account_timestamps = account_timestamps
self._trade_pnl = SortedDict()
self._net_pnl = SortedDict()
self.open_qtys = np.empty(0, dtype=np.int)
self.open_prices = np.empty(0, dtype=np.float)
self.first_trade_timestamp = None
self.final_pnl = np.nan
def _add_trades(self, trades: Sequence[Trade]) -> None:
if not len(trades): return
timestamps = [trade.timestamp for trade in trades]
if len(self._trade_pnl):
k, v = self._trade_pnl.peekitem(0)
if timestamps[0] <= k:
raise Exception(f'Can only add a trade that is newer than last added current: {timestamps[0]} prev max timestamp: {k}')
if self.first_trade_timestamp is None: self.first_trade_timestamp = timestamps[0]
for i, timestamp in enumerate(timestamps):
t_trades = [trade for trade in trades if trade.timestamp == timestamp]
open_qtys, open_prices, open_qty, weighted_avg_price, realized_chg = calc_trade_pnl(
self.open_qtys, self.open_prices,
np.array([trade.qty for trade in t_trades]),
np.array([trade.price for trade in t_trades]),
self.contract.multiplier)
self.open_qtys = open_qtys
self.open_prices = open_prices
position_chg = sum([trade.qty for trade in t_trades])
commission_chg = sum([trade.commission for trade in t_trades])
fee_chg = sum([trade.fee for trade in t_trades])
index = find_index_before(self._trade_pnl, timestamp)
if index == -1:
self._trade_pnl[timestamp] = (position_chg, realized_chg, fee_chg, commission_chg, open_qty, weighted_avg_price)
else:
prev_timestamp, (prev_position, prev_realized, prev_fee, prev_commission, _, _) = self._trade_pnl.peekitem(index)
self._trade_pnl[timestamp] = (prev_position + position_chg, prev_realized + realized_chg,
prev_fee + fee_chg, prev_commission + commission_chg, open_qty, weighted_avg_price)
self.calc_net_pnl(timestamp)
def calc_net_pnl(self, timestamp: np.datetime64) -> None:
if timestamp in self._net_pnl: return
if timestamp < self.first_trade_timestamp: return
if self.contract.expiry is not None and timestamp > self.contract.expiry and not math.isnan(self.final_pnl): return
i = np.searchsorted(self._account_timestamps, timestamp)
assert(self._account_timestamps[i] == timestamp)
trade_pnl_index = find_index_before(self._trade_pnl, timestamp)
if trade_pnl_index == -1:
realized, fee, commission, open_qty, open_qty, weighted_avg_price = 0, 0, 0, 0, 0, 0
else:
_, (_, realized, fee, commission, open_qty, weighted_avg_price) = self._trade_pnl.peekitem(trade_pnl_index)
price = np.nan
if math.isclose(open_qty, 0):
unrealized = 0
else:
price = self._price_function(self.contract, self._account_timestamps, i, self.strategy_context)
assert np.isreal(price), \
f'Unexpected price type: {price} {type(price)} for contract: {self.contract} timestamp: {self._account_timestamps[i]}'
if math.isnan(price):
index = find_index_before(self._net_pnl, timestamp) # Last index we computed net pnl for
if index == -1:
prev_unrealized = 0
else:
_, (_, prev_unrealized, _) = self._net_pnl.peekitem(index)
unrealized = prev_unrealized
else:
unrealized = open_qty * (price - weighted_avg_price) * self.contract.multiplier
net_pnl = realized + unrealized - commission - fee
self._net_pnl[timestamp] = (price, unrealized, net_pnl)
if self.contract.expiry is not None and timestamp > self.contract.expiry:
self.final_pnl = net_pnl
def position(self, timestamp: np.datetime64) -> float:
index = find_index_before(self._trade_pnl, timestamp)
if index == -1: return 0.
_, (position, _, _, _, _, _) = self._trade_pnl.peekitem(index) # Less than or equal to timestamp
return position
def net_pnl(self, timestamp: np.datetime64) -> float:
if self.contract.expiry is not None and timestamp > self.contract.expiry and not math.isnan(self.final_pnl):
return self.final_pnl
index = find_index_before(self._net_pnl, timestamp)
if index == -1: return 0.
_, (_, _, net_pnl) = self._net_pnl.peekitem(index) # Less than or equal to timestamp
return net_pnl
def pnl(self, timestamp: np.datetime64) -> Tuple[float, float, float, float, float, float, float]:
index = find_index_before(self._trade_pnl, timestamp)
position, realized, fee, commission, price, unrealized, net_pnl = 0, 0, 0, 0, 0, 0, 0
if index != -1:
_, (position, realized, fee, commission, _, _) = self._trade_pnl.peekitem(index) # Less than or equal to timestamp
index = find_index_before(self._net_pnl, timestamp)
if index != -1:
_, (price, unrealized, net_pnl) = self._net_pnl.peekitem(index) # Less than or equal to timestamp
return position, price, realized, unrealized, fee, commission, net_pnl
def df(self) -> pd.DataFrame:
df_trade_pnl = pd.DataFrame.from_records([
(k, v[0], v[1], v[2], v[3]) for k, v in self._trade_pnl.items()],
columns=['timestamp', 'position', 'realized', 'fee', 'commission'])
df_net_pnl = pd.DataFrame.from_records([
(k, v[0], v[1], v[2]) for k, v in self._net_pnl.items()],
columns=['timestamp', 'price', 'unrealized', 'net_pnl'])
all_timestamps = np.unique(np.concatenate((df_trade_pnl.timestamp.values, df_net_pnl.timestamp.values)))
df_trade_pnl = df_trade_pnl.set_index('timestamp').reindex(all_timestamps, method='ffill').reset_index()
df_trade_pnl = leading_nan_to_zero(df_trade_pnl, ['position', 'realized', 'fee', 'commission'])
df_net_pnl = df_net_pnl.set_index('timestamp').reindex(all_timestamps, method='ffill').reset_index()
del df_net_pnl['timestamp']
df = pd.concat([df_trade_pnl, df_net_pnl], axis=1)
df['symbol'] = self.contract.symbol
df = df[['symbol', 'timestamp', 'position', 'price', 'unrealized', 'realized', 'commission', 'fee', 'net_pnl']]
return df
def _get_calc_timestamps(timestamps: np.ndarray, pnl_calc_time: int) -> np.ndarray:
time_delta = np.timedelta64(pnl_calc_time, 'm')
calc_timestamps = np.unique(timestamps.astype('M8[D]')) + time_delta
calc_indices = np.searchsorted(timestamps, calc_timestamps, side='left') - 1
if calc_indices[0] == -1: calc_indices[0] = 0
return np.unique(timestamps[calc_indices])
class Account:
def __init__(self,
contract_groups: Sequence[ContractGroup],
timestamps: np.ndarray,
price_function: Callable[[Contract, np.ndarray, int, SimpleNamespace], float],
strategy_context: SimpleNamespace,
starting_equity: float = 1.0e6,
pnl_calc_time: int = 15 * 60) -> None:
self.starting_equity = starting_equity
self._price_function = price_function
self.strategy_context = strategy_context
self.timestamps = timestamps
self.calc_timestamps = _get_calc_timestamps(timestamps, pnl_calc_time)
self.contracts: MutableSet[Contract] = set()
self._trades: MutableSequence[Trade] = []
self._pnl = SortedDict()
self.symbol_pnls_by_contract_group: MutableMapping[str, MutableSequence[ContractPNL]] = defaultdict(list)
self.symbol_pnls: MutableMapping[str, ContractPNL] = {}
def symbols(self) -> MutableSequence[str]:
return [contract.symbol for contract in self.contracts]
def _add_contract(self, contract: Contract, timestamp: np.datetime64) -> None:
if contract.symbol in self.symbol_pnls:
raise Exception(f'Already have contract with symbol: {contract.symbol} {contract}')
contract_pnl = ContractPNL(contract, self.timestamps, self._price_function, self.strategy_context)
self.symbol_pnls[contract.symbol] = contract_pnl
# For fast lookup in position function
self.symbol_pnls_by_contract_group[contract.contract_group.name].append(contract_pnl)
self.contracts.add(contract)
def add_trades(self, trades: Sequence[Trade]) -> None:
trades = sorted(trades, key=lambda x: getattr(x, 'timestamp'))
# Break up trades by contract so we can add them in a batch
trades_by_contract: MutableMapping[Contract, List[Trade]] = defaultdict(list)
for trade in trades:
contract = trade.contract
if contract not in self.contracts: self._add_contract(contract, trade.timestamp)
trades_by_contract[contract].append(trade)
for contract, contract_trades in trades_by_contract.items():
contract_trades.sort(key=lambda x: x.timestamp)
self.symbol_pnls[contract.symbol]._add_trades(contract_trades)
self._trades += trades
def calc(self, timestamp: np.datetime64) -> None:
if timestamp in self._pnl: return
prev_idx = find_index_before(self._pnl, timestamp)
prev_timestamp = None if prev_idx == -1 else self.timestamps[prev_idx]
# Find the last timestamp per day that is between the previous index we computed and the current index,
# so we can compute daily pnl in addition to the current index pnl
calc_timestamps = self.calc_timestamps
intermediate_calc_timestamps = calc_timestamps[calc_timestamps <= timestamp]
if prev_timestamp is not None:
intermediate_calc_timestamps = intermediate_calc_timestamps[intermediate_calc_timestamps > prev_timestamp]
if not len(intermediate_calc_timestamps) or intermediate_calc_timestamps[-1] != timestamp:
intermediate_calc_timestamps = np.append(intermediate_calc_timestamps, timestamp)
for ts in intermediate_calc_timestamps:
net_pnl = 0.
for symbol_pnl in self.symbol_pnls.values():
symbol_pnl.calc_net_pnl(ts)
net_pnl += symbol_pnl.net_pnl(ts)
self._pnl[ts] = net_pnl
def position(self, contract_group: ContractGroup, timestamp: np.datetime64) -> float:
position = 0.
for symbol_pnl in self.symbol_pnls_by_contract_group[contract_group.name]:
position += symbol_pnl.position(timestamp)
return position
def positions(self, contract_group: ContractGroup, timestamp: np.datetime64) -> MutableSequence[Tuple[Contract, float]]:
positions = []
for contract in contract_group.contracts:
symbol = contract.symbol
if symbol not in self.symbol_pnls: continue
position = self.symbol_pnls[symbol].position(timestamp)
if not math.isclose(position, 0): positions.append((contract, position))
return positions
def equity(self, timestamp: np.datetime64) -> float:
pnl = self._pnl.get(timestamp)
if pnl is None:
self.calc(timestamp)
pnl = self._pnl[timestamp]
return self.starting_equity + pnl
def trades(self,
contract_group: ContractGroup = None,
start_date: np.datetime64 = None,
end_date: np.datetime64 = None) -> MutableSequence[Trade]:
# start_date, end_date = str2date(start_date), str2date(end_date)
return [trade for trade in self._trades if (start_date is None or trade.timestamp >= start_date) and (
end_date is None or trade.timestamp <= end_date) and (
contract_group is None or trade.contract.contract_group == contract_group)]
def df_pnl(self, contract_groups: Union[ContractGroup, Sequence[ContractGroup]] = None) -> pd.DataFrame:
if contract_groups is None:
contract_groups = list(set([contract.contract_group for contract in self.contracts]))
if isinstance(contract_groups, ContractGroup): contract_groups = [contract_groups]
dfs = []
for contract_group in contract_groups:
for contract in contract_group.contracts:
symbol = contract.symbol
if symbol not in self.symbol_pnls: continue
df = self.symbol_pnls[symbol].df()
if len(df) > 1:
net_pnl_diff = np.diff(df.net_pnl.values) # np.diff returns a vector one shorter than the original
last_index = np.nonzero(net_pnl_diff)
if len(last_index[0]):
last_index = last_index[0][-1] + 1
df = df.iloc[:last_index + 1]
df['contract_group'] = contract_group.name
dfs.append(df)
ret_df = pd.concat(dfs)
ret_df = ret_df.sort_values(by=['timestamp', 'contract_group', 'symbol'])
ret_df = ret_df[['timestamp', 'contract_group', 'symbol', 'position', 'price', 'unrealized', 'realized',
'commission', 'fee', 'net_pnl']]
return ret_df
def df_account_pnl(self, contract_group: ContractGroup = None) -> pd.DataFrame:
if contract_group is not None:
symbols = [contract.symbol for contract in contract_group.contracts if contract.symbol in self.symbol_pnls]
symbol_pnls = [self.symbol_pnls[symbol] for symbol in symbols]
else:
symbol_pnls = list(self.symbol_pnls.values())
timestamps = self.calc_timestamps
position = np.full(len(timestamps), 0., dtype=np.float)
realized = np.full(len(timestamps), 0., dtype=np.float)
unrealized = np.full(len(timestamps), 0., dtype=np.float)
fee = np.full(len(timestamps), 0., dtype=np.float)
commission = np.full(len(timestamps), 0., dtype=np.float)
net_pnl = np.full(len(timestamps), 0., dtype=np.float)
for i, timestamp in enumerate(timestamps):
for symbol_pnl in symbol_pnls:
_position, _price, _realized, _unrealized, _fee, _commission, _net_pnl = symbol_pnl.pnl(timestamp)
if math.isfinite(_position): position[i] += _position
if math.isfinite(_realized): realized[i] += _realized
if math.isfinite(_unrealized): unrealized[i] += _unrealized
if math.isfinite(_fee): fee[i] += _fee
if math.isfinite(_commission): commission[i] += _commission
if math.isfinite(_net_pnl): net_pnl[i] += _net_pnl
df = pd.DataFrame.from_records(zip(timestamps, position, unrealized, realized, commission, fee, net_pnl),
columns=['timestamp', 'position', 'unrealized', 'realized', 'commission', 'fee', 'net_pnl'])
df['equity'] = self.starting_equity + df.net_pnl
return df[['timestamp', 'position', 'unrealized', 'realized', 'commission', 'fee', 'net_pnl', 'equity']]
def df_trades(self,
contract_group: ContractGroup = None,
start_date: np.datetime64 = None,
end_date: np.datetime64 = None) -> pd.DataFrame:
# start_date, end_date = str2date(start_date), str2date(end_date)
trades = self.trades(contract_group, start_date, end_date)
df = pd.DataFrame.from_records([(
trade.contract.symbol,
trade.timestamp,
trade.qty,
trade.price,
trade.fee,
trade.commission,
trade.order.timestamp,
trade.order.qty,
trade.order.reason_code,
(str(trade.order.properties.__dict__) if trade.order.properties.__dict__ else ''),
(str(trade.contract.properties.__dict__) if trade.contract.properties.__dict__ else '')) for trade in trades],
columns=['symbol', 'timestamp', 'qty', 'price', 'fee', 'commission', 'order_date', 'order_qty',
'reason_code', 'order_props', 'contract_props'])
df = df.sort_values(by=['timestamp', 'symbol'])
return df
def test_account():
from pyqstrat.pq_types import MarketOrder
def get_close_price(contract, timestamps, idx, strategy_context):
if contract.symbol == "IBM":
price = idx + 10.1
elif contract.symbol == "MSFT":
price = idx + 15.3
else:
raise Exception(f'unknown contract: {contract}')
return price
ContractGroup.clear()
Contract.clear()
ibm_cg = ContractGroup.create('IBM')
msft_cg = ContractGroup.create('MSFT')
ibm_contract = Contract.create('IBM', contract_group=ibm_cg)
msft_contract = Contract.create('MSFT', contract_group=msft_cg)
timestamps = np.array(['2018-01-01 09:00', '2018-01-02 08:00', '2018-01-02 09:00', '2018-01-05 13:35'], dtype='M8[m]')
account = Account([ibm_cg, msft_cg], timestamps, get_close_price, None)
# account = Account([Contract(symbol)], timestamps, get_close_price)
trade_1 = Trade(ibm_contract, MarketOrder(ibm_contract, np.datetime64('2018-01-01 09:00'), 10),
np.datetime64('2018-01-02 08:00'), 10, 10.1, commission=0.01)
trade_2 = Trade(ibm_contract, MarketOrder(ibm_contract, np.datetime64('2018-01-01 09:00'), -20),
np.datetime64('2018-01-02 09:00'), -20, 15.1, commission=0.02)
trade_3 = Trade(msft_contract, MarketOrder(msft_contract, timestamps[1], 15), timestamps[1], 20, 13.2, commission=0.04)
trade_4 = Trade(msft_contract, MarketOrder(msft_contract, timestamps[2], 20), timestamps[2], 20, 16.2, commission=0.05)
account.add_trades([trade_1, trade_2, trade_3, trade_4])
account.calc(np.datetime64('2018-01-05 13:35'))
assert(len(account.df_trades()) == 4)
assert(len(account.df_pnl()) == 6)
assert(np.allclose(np.array([9.99, 61.96, 79.97, 103.91, 69.97, 143.91]), account.df_pnl().net_pnl.values, rtol=0))
assert(np.allclose(np.array([10, 20, -10, 40, -10, 40]), account.df_pnl().position.values, rtol=0))
assert(np.allclose(np.array([1000000., 1000183.88, 1000213.88]), account.df_account_pnl().equity.values, rtol=0))
if __name__ == "__main__":
test_account()
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
| true | true |
f720b5f3be28e969cd5ce5fed492f2e66b5c370c | 881 | py | Python | setup.py | bayjan/openrisknet_magkoufopoulou | b1ed6dea48d67243c9ac81eec59e5d7830ca68de | [
"MIT"
] | null | null | null | setup.py | bayjan/openrisknet_magkoufopoulou | b1ed6dea48d67243c9ac81eec59e5d7830ca68de | [
"MIT"
] | null | null | null | setup.py | bayjan/openrisknet_magkoufopoulou | b1ed6dea48d67243c9ac81eec59e5d7830ca68de | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup file for openrisknet_magkoufopoulou.
This file was generated with PyScaffold 3.0.3.
PyScaffold helps you to put up the scaffold of your new Python project.
Learn more under: http://pyscaffold.org/
"""
import sys
from setuptools import setup
# Add here console scripts and other entry points in ini-style format
entry_points = """
[console_scripts]
# script_name = openrisknet_magkoufopoulou.module:function
# For example:
# fibonacci = openrisknet_magkoufopoulou.skeleton:run
"""
def setup_package():
needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['pyscaffold>=3.0a0,<3.1a0'] + sphinx,
entry_points=entry_points,
use_pyscaffold=True)
if __name__ == "__main__":
setup_package()
| 26.69697 | 75 | 0.713961 |
import sys
from setuptools import setup
entry_points = """
[console_scripts]
# script_name = openrisknet_magkoufopoulou.module:function
# For example:
# fibonacci = openrisknet_magkoufopoulou.skeleton:run
"""
def setup_package():
needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['pyscaffold>=3.0a0,<3.1a0'] + sphinx,
entry_points=entry_points,
use_pyscaffold=True)
if __name__ == "__main__":
setup_package()
| true | true |
f720b71a384cd705368c6959d78e6566a4530fc2 | 349 | py | Python | materials/sp20/hw/hw01/tests/q9.py | ds-modules/Deepnote-demo | 548c12ced6cae774ecd0036aa1e8bb833af6472c | [
"BSD-3-Clause"
] | null | null | null | materials/sp20/hw/hw01/tests/q9.py | ds-modules/Deepnote-demo | 548c12ced6cae774ecd0036aa1e8bb833af6472c | [
"BSD-3-Clause"
] | null | null | null | materials/sp20/hw/hw01/tests/q9.py | ds-modules/Deepnote-demo | 548c12ced6cae774ecd0036aa1e8bb833af6472c | [
"BSD-3-Clause"
] | null | null | null | test = {
'name': 'q9',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> survey == "2020 vision"
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| 15.173913 | 37 | 0.312321 | test = {
'name': 'q9',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> survey == "2020 vision"
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| true | true |
f720b75f54a9c131c4dcb67dc0dfaf8842c62e8e | 31,745 | py | Python | pwnlib/gdb.py | Ngugisenior/pwntools | c15afc592a94a5fd4c1255d2ce0137be38164a66 | [
"MIT"
] | null | null | null | pwnlib/gdb.py | Ngugisenior/pwntools | c15afc592a94a5fd4c1255d2ce0137be38164a66 | [
"MIT"
] | null | null | null | pwnlib/gdb.py | Ngugisenior/pwntools | c15afc592a94a5fd4c1255d2ce0137be38164a66 | [
"MIT"
] | 1 | 2019-12-07T10:45:52.000Z | 2019-12-07T10:45:52.000Z | # -*- coding: utf-8 -*-
"""
During exploit development, it is frequently useful to debug the
target binary under GDB.
Pwntools makes this easy-to-do with a handful of helper routines, designed
to make your exploit-debug-update cycles much faster.
Useful Functions
----------------
- :func:`attach` - Attach to an existing process
- :func:`debug` - Start a new process under a debugger, stopped at the first instruction
- :func:`debug_shellcode` - Build a binary with the provided shellcode, and start it under a debugger
Debugging Tips
--------------
The :func:`attach` and :func:`debug` functions will likely be your bread and
butter for debugging.
Both allow you to provide a script to pass to GDB when it is started, so that
it can automatically set your breakpoints.
Attaching to Processes
~~~~~~~~~~~~~~~~~~~~~~
To attach to an existing process, just use :func:`attach`. It is surprisingly
versatile, and can attach to a :class:`.process` for simple
binaries, or will automatically find the correct process to attach to for a
forking server, if given a :class:`.remote` object.
Spawning New Processes
~~~~~~~~~~~~~~~~~~~~~~
Attaching to processes with :func:`attach` is useful, but the state the process
is in may vary. If you need to attach to a process very early, and debug it from
the very first instruction (or even the start of ``main``), you instead should use
:func:`debug`.
When you use :func:`debug`, the return value is a :class:`.tube` object
that you interact with exactly like normal.
Tips and Troubleshooting
------------------------
``NOPTRACE`` magic argument
~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's quite cumbersom to comment and un-comment lines containing `attach`.
You can cause these lines to be a no-op by running your script with the
``NOPTRACE`` argument appended, or with ``PWNLIB_NOPTRACE=1`` in the environment.
::
$ python exploit.py NOPTRACE
[+] Starting local process '/bin/bash': Done
[!] Skipping debug attach since context.noptrace==True
...
Kernel Yama ptrace_scope
~~~~~~~~~~~~~~~~~~~~~~~~
The Linux kernel v3.4 introduced a security mechanism called ``ptrace_scope``,
which is intended to prevent processes from debugging eachother unless there is
a direct parent-child relationship.
This causes some issues with the normal Pwntools workflow, since the process
heirarchy looks like this:
::
python ---> target
`--> gdb
Note that ``python`` is the parent of ``target``, not ``gdb``.
In order to avoid this being a problem, Pwntools uses the function
``prctl(PR_SET_PTRACER, PR_SET_PTRACER_ANY)``. This disables Yama
for any processes launched by Pwntools via :class:`.process` or via
:meth:`.ssh.process`.
Older versions of Pwntools did not perform the ``prctl`` step, and
required that the Yama security feature was disabled systemwide, which
requires ``root`` access.
Member Documentation
===============================
"""
from __future__ import absolute_import
from __future__ import division
import os
import random
import re
import shlex
import tempfile
import time
from pwnlib import adb
from pwnlib import atexit
from pwnlib import elf
from pwnlib import qemu
from pwnlib import tubes
from pwnlib.asm import _bfdname
from pwnlib.asm import make_elf
from pwnlib.asm import make_elf_from_assembly
from pwnlib.context import LocalContext
from pwnlib.context import context
from pwnlib.log import getLogger
from pwnlib.util import misc
from pwnlib.util import proc
log = getLogger(__name__)
@LocalContext
def debug_assembly(asm, gdbscript=None, vma=None):
"""debug_assembly(asm, gdbscript=None, vma=None) -> tube
Creates an ELF file, and launches it under a debugger.
This is identical to debug_shellcode, except that
any defined symbols are available in GDB, and it
saves you the explicit call to asm().
Arguments:
asm(str): Assembly code to debug
gdbscript(str): Script to run in GDB
vma(int): Base address to load the shellcode at
**kwargs: Override any :obj:`pwnlib.context.context` values.
Returns:
:class:`.process`
Example:
.. code-block:: python
assembly = shellcraft.echo("Hello world!\n")
io = gdb.debug_assembly(assembly)
io.recvline()
# 'Hello world!'
"""
tmp_elf = make_elf_from_assembly(asm, vma=vma, extract=False)
os.chmod(tmp_elf, 0777)
atexit.register(lambda: os.unlink(tmp_elf))
if context.os == 'android':
android_path = '/data/data/%s' % os.path.basename(tmp_elf)
adb.push(tmp_elf, android_path)
tmp_elf = android_path
return debug(tmp_elf, gdbscript=gdbscript, arch=context.arch)
@LocalContext
def debug_shellcode(data, gdbscript=None, vma=None):
"""
Creates an ELF file, and launches it under a debugger.
Arguments:
data(str): Assembled shellcode bytes
gdbscript(str): Script to run in GDB
vma(int): Base address to load the shellcode at
**kwargs: Override any :obj:`pwnlib.context.context` values.
Returns:
:class:`.process`
Example:
.. code-block:: python
assembly = shellcraft.echo("Hello world!\n")
shellcode = asm(assembly)
io = gdb.debug_shellcode(shellcode)
io.recvline()
# 'Hello world!'
"""
if isinstance(data, unicode):
log.error("Shellcode is cannot be unicode. Did you mean debug_assembly?")
tmp_elf = make_elf(data, extract=False, vma=vma)
os.chmod(tmp_elf, 0777)
atexit.register(lambda: os.unlink(tmp_elf))
if context.os == 'android':
android_path = '/data/data/%s' % os.path.basename(tmp_elf)
adb.push(tmp_elf, android_path)
tmp_elf = android_path
return debug(tmp_elf, gdbscript=gdbscript, arch=context.arch)
def _gdbserver_args(pid=None, path=None, args=None, which=None):
"""_gdbserver_args(pid=None, path=None) -> list
Sets up a listening gdbserver, to either connect to the specified
PID, or launch the specified binary by its full path.
Arguments:
pid(int): Process ID to attach to
path(str): Process to launch
args(list): List of arguments to provide on the debugger command line
which(callaable): Function to find the path of a binary.
Returns:
A list of arguments to invoke gdbserver.
"""
if [pid, path, args].count(None) != 2:
log.error("Must specify exactly one of pid, path, or args")
if not which:
log.error("Must specify which.")
gdbserver = ''
if not args:
args = [str(path or pid)]
# Android targets have a distinct gdbserver
if context.bits == 64:
gdbserver = which('gdbserver64')
if not gdbserver:
gdbserver = which('gdbserver')
if not gdbserver:
log.error("gdbserver is not installed")
orig_args = args
gdbserver_args = [gdbserver, '--multi']
if context.aslr:
gdbserver_args += ['--no-disable-randomization']
else:
log.warn_once("Debugging process with ASLR disabled")
if pid:
gdbserver_args += ['--once', '--attach']
gdbserver_args += ['localhost:0']
gdbserver_args += args
return gdbserver_args
def _gdbserver_port(gdbserver, ssh):
which = _get_which(ssh)
# Process /bin/bash created; pid = 14366
# Listening on port 34816
process_created = gdbserver.recvline()
if process_created.startswith('ERROR:'):
raise ValueError(
'Failed to spawn process under gdbserver. gdbserver error message: %s' % process_created
)
gdbserver.pid = int(process_created.split()[-1], 0)
listening_on = ''
while 'Listening' not in listening_on:
listening_on = gdbserver.recvline()
port = int(listening_on.split()[-1])
# Set up port forarding for SSH
if ssh:
remote = ssh.connect_remote('127.0.0.1', port)
listener = tubes.listen.listen(0)
port = listener.lport
# Disable showing GDB traffic when debugging verbosity is increased
remote.level = 'error'
listener.level = 'error'
# Hook them up
remote <> listener
# Set up port forwarding for ADB
elif context.os == 'android':
adb.forward(port)
return port
def _get_which(ssh=None):
if ssh: return ssh.which
elif context.os == 'android': return adb.which
else: return misc.which
def _get_runner(ssh=None):
if ssh: return ssh.process
elif context.os == 'android': return adb.process
else: return tubes.process.process
@LocalContext
def debug(args, gdbscript=None, exe=None, ssh=None, env=None, sysroot=None, **kwargs):
"""debug(args) -> tube
Launch a GDB server with the specified command line,
and launches GDB to attach to it.
Arguments:
args(list): Arguments to the process, similar to :class:`.process`.
gdbscript(str): GDB script to run.
exe(str): Path to the executable on disk
env(dict): Environment to start the binary in
ssh(:class:`.ssh`): Remote ssh session to use to launch the process.
sysroot(str): Foreign-architecture sysroot, used for QEMU-emulated binaries
and Android targets.
Returns:
:class:`.process` or :class:`.ssh_channel`: A tube connected to the target process
Notes:
The debugger is attached automatically, and you can debug everything
from the very beginning. This requires that both ``gdb`` and ``gdbserver``
are installed on your machine.
When GDB opens via :func:`debug`, it will initially be stopped on the very first
instruction of the dynamic linker (``ld.so``) for dynamically-linked binaries.
Only the target binary and the linker will be loaded in memory, so you cannot
set breakpoints on shared library routines like ``malloc`` since ``libc.so``
has not even been loaded yet.
There are several ways to handle this:
1. Set a breakpoint on the executable's entry point (generally, ``_start``)
- This is only invoked after all of the required shared libraries
are loaded.
- You can generally get the address via the GDB command ``info file``.
2. Use pending breakpoints via ``set breakpoint pending on``
- This has the side-effect of setting breakpoints for **every** function
which matches the name. For ``malloc``, this will generally set a
breakpoint in the executable's PLT, in the linker's internal ``malloc``,
and eventaully in ``libc``'s malloc.
3. Wait for libraries to be loaded with ``set stop-on-solib-event 1``
- There is no way to stop on any specific library being loaded, and sometimes
multiple libraries are loaded and only a single breakpoint is issued.
- Generally, you just add a few ``continue`` commands until things are set up
the way you want it to be.
Examples:
.. code-block:: python
# Create a new process, and stop it at 'main'
io = gdb.debug('bash', '''
break main
continue
''')
# Send a command to Bash
io.sendline("echo hello")
# Interact with the process
io.interactive()
.. code-block:: python
# Create a new process, and stop it at 'main'
io = gdb.debug('bash', '''
# Wait until we hit the main executable's entry point
break _start
continue
# Now set breakpoint on shared library routines
break malloc
break free
continue
''')
# Send a command to Bash
io.sendline("echo hello")
# Interact with the process
io.interactive()
You can use :func:`debug` to spawn new processes on remote machines as well,
by using the ``ssh=`` keyword to pass in your :class:`.ssh` instance.
.. code-block:: python
# Connect to the SSH server
shell = ssh('passcode', 'pwnable.kr', 2222, password='guest')
# Start a process on the server
io = gdb.debug(['bash'],
ssh=shell,
gdbscript='''
break main
continue
''')
# Send a command to Bash
io.sendline("echo hello")
# Interact with the process
io.interactive()
"""
if isinstance(args, (int, tubes.process.process, tubes.ssh.ssh_channel)):
log.error("Use gdb.attach() to debug a running process")
if env is None:
env = os.environ
if isinstance(args, (str, unicode)):
args = [args]
orig_args = args
runner = _get_runner(ssh)
which = _get_which(ssh)
gdbscript = gdbscript or ''
if context.noptrace:
log.warn_once("Skipping debugger since context.noptrace==True")
return runner(args, executable=exe, env=env)
if ssh or context.native or (context.os == 'android'):
args = _gdbserver_args(args=args, which=which)
else:
qemu_port = random.randint(1024, 65535)
qemu_user = qemu.user_path()
sysroot = sysroot or qemu.ld_prefix(env=env)
if not qemu_user:
log.error("Cannot debug %s binaries without appropriate QEMU binaries" % context.arch)
args = [qemu_user, '-g', str(qemu_port)] + args
# Use a sane default sysroot for Android
if not sysroot and context.os == 'android':
sysroot = 'remote:/'
# Make sure gdbserver/qemu is installed
if not which(args[0]):
log.error("%s is not installed" % args[0])
exe = exe or which(orig_args[0])
if not exe:
log.error("%s does not exist" % orig_args[0])
else:
gdbscript = 'file "%s"\n%s' % (exe, gdbscript)
# Start gdbserver/qemu
# (Note: We override ASLR here for the gdbserver process itself.)
gdbserver = runner(args, env=env, aslr=1, **kwargs)
# Set the .executable on the process object.
gdbserver.executable = which(orig_args[0])
# Find what port we need to connect to
if context.native or (context.os == 'android'):
port = _gdbserver_port(gdbserver, ssh)
else:
port = qemu_port
host = '127.0.0.1'
if not ssh and context.os == 'android':
host = context.adb_host
attach((host, port), exe=exe, gdbscript=gdbscript, need_ptrace_scope = False, ssh=ssh, sysroot=sysroot)
# gdbserver outputs a message when a client connects
garbage = gdbserver.recvline(timeout=1)
# Some versions of gdbserver output an additional message
garbage2 = gdbserver.recvline_startswith("Remote debugging from host ", timeout=1)
return gdbserver
def get_gdb_arch():
return {
'amd64': 'i386:x86-64',
'powerpc': 'powerpc:common',
'powerpc64': 'powerpc:common64',
'mips64': 'mips:isa64',
'thumb': 'arm'
}.get(context.arch, context.arch)
def binary():
"""binary() -> str
Returns:
str: Path to the appropriate ``gdb`` binary to use.
Example:
>>> gdb.binary() # doctest: +SKIP
'/usr/bin/gdb'
"""
gdb = misc.which('pwntools-gdb') or misc.which('gdb')
if not context.native:
multiarch = misc.which('gdb-multiarch')
if multiarch:
return multiarch
log.warn_once('Cross-architecture debugging usually requires gdb-multiarch\n' \
'$ apt-get install gdb-multiarch')
if not gdb:
log.error('GDB is not installed\n'
'$ apt-get install gdb')
return gdb
@LocalContext
def attach(target, gdbscript = None, exe = None, need_ptrace_scope = True, gdb_args = None, ssh = None, sysroot = None):
"""attach(target, gdbscript = None, exe = None, arch = None, ssh = None) -> None
Start GDB in a new terminal and attach to `target`.
Arguments:
target: The target to attach to.
gdbscript(:obj:`str` or :obj:`file`): GDB script to run after attaching.
exe(str): The path of the target binary.
arch(str): Architechture of the target binary. If `exe` known GDB will
detect the architechture automatically (if it is supported).
gdb_args(list): List of additional arguments to pass to GDB.
sysroot(str): Foreign-architecture sysroot, used for QEMU-emulated binaries
and Android targets.
Returns:
PID of the GDB process (or the window which it is running in).
Notes:
The ``target`` argument is very robust, and can be any of the following:
:obj:`int`
PID of a process
:obj:`str`
Process name. The youngest process is selected.
:obj:`tuple`
Host, port pair of a listening ``gdbserver``
:class:`.process`
Process to connect to
:class:`.sock`
Connected socket. The executable on the other end of the connection is attached to.
Can be any socket type, including :class:`.listen` or :class:`.remote`.
:class:`.ssh_channel`
Remote process spawned via :meth:`.ssh.process`.
This will use the GDB installed on the remote machine.
If a password is required to connect, the ``sshpass`` program must be installed.
Examples:
.. code-block:: python
# Attach directly to pid 1234
gdb.attach(1234)
.. code-block:: python
# Attach to the youngest "bash" process
gdb.attach('bash')
.. code-block:: python
# Start a process
bash = process('bash')
# Attach the debugger
gdb.attach(bash, '''
set follow-fork-mode child
break execve
continue
''')
# Interact with the process
bash.sendline('whoami')
.. code-block:: python
# Start a forking server
server = process(['socat', 'tcp-listen:1234,fork,reuseaddr', 'exec:/bin/sh'])
# Connect to the server
io = remote('localhost', 1234)
# Connect the debugger to the server-spawned process
gdb.attach(io, '''
break exit
continue
''')
# Talk to the spawned 'sh'
io.sendline('exit')
.. code-block:: python
# Connect to the SSH server
shell = ssh('bandit0', 'bandit.labs.overthewire.org', password='bandit0', port=2220)
# Start a process on the server
cat = shell.process(['cat'])
# Attach a debugger to it
gdb.attach(cat, '''
break exit
continue
''')
# Cause `cat` to exit
cat.close()
"""
if context.noptrace:
log.warn_once("Skipping debug attach since context.noptrace==True")
return
# if gdbscript is a file object, then read it; we probably need to run some
# more gdb script anyway
if isinstance(gdbscript, file):
with gdbscript:
gdbscript = gdbscript.read()
# enable gdb.attach(p, 'continue')
if gdbscript and not gdbscript.endswith('\n'):
gdbscript += '\n'
# Use a sane default sysroot for Android
if not sysroot and context.os == 'android':
sysroot = 'remote:/'
# gdb script to run before `gdbscript`
pre = ''
if not context.native:
pre += 'set endian %s\n' % context.endian
pre += 'set architecture %s\n' % get_gdb_arch()
if sysroot:
pre += 'set sysroot %s\n' % sysroot
if context.os == 'android':
pre += 'set gnutarget ' + _bfdname() + '\n'
# let's see if we can find a pid to attach to
pid = None
if isinstance(target, (int, long)):
# target is a pid, easy peasy
pid = target
elif isinstance(target, str):
# pidof picks the youngest process
pidof = proc.pidof
if context.os == 'android':
pidof = adb.pidof
pids = pidof(target)
if not pids:
log.error('No such process: %s' % target)
pid = pids[0]
log.info('Attaching to youngest process "%s" (PID = %d)' %
(target, pid))
elif isinstance(target, tubes.ssh.ssh_channel):
if not target.pid:
log.error("PID unknown for channel")
shell = target.parent
tmpfile = shell.mktemp()
gdbscript = 'shell rm %s\n%s' % (tmpfile, gdbscript)
shell.upload_data(gdbscript or '', tmpfile)
cmd = ['ssh', '-C', '-t', '-p', str(shell.port), '-l', shell.user, shell.host]
if shell.password:
if not misc.which('sshpass'):
log.error("sshpass must be installed to debug ssh processes")
cmd = ['sshpass', '-p', shell.password] + cmd
if shell.keyfile:
cmd += ['-i', shell.keyfile]
cmd += ['gdb -q %r %s -x "%s"' % (target.executable,
target.pid,
tmpfile)]
misc.run_in_new_terminal(' '.join(cmd))
return
elif isinstance(target, tubes.sock.sock):
pids = proc.pidof(target)
if not pids:
log.error('could not find remote process (%s:%d) on this machine' %
target.sock.getpeername())
pid = pids[0]
elif isinstance(target, tubes.process.process):
pid = proc.pidof(target)[0]
exe = exe or target.executable
elif isinstance(target, tuple) and len(target) == 2:
host, port = target
if context.os != 'android':
pre += 'target remote %s:%d\n' % (host, port)
else:
# Android debugging is done over gdbserver, which can't follow
# new inferiors (tldr; follow-fork-mode child) unless it is run
# in extended-remote mode.
pre += 'target extended-remote %s:%d\n' % (host, port)
pre += 'set detach-on-fork off\n'
def findexe():
for spid in proc.pidof(target):
sexe = proc.exe(spid)
name = os.path.basename(sexe)
# XXX: parse cmdline
if name.startswith('qemu-') or name.startswith('gdbserver'):
exe = proc.cmdline(spid)[-1]
return os.path.join(proc.cwd(spid), exe)
exe = exe or findexe()
elif isinstance(target, elf.corefile.Corefile):
pre += 'target core %s\n' % target.path
else:
log.error("don't know how to attach to target: %r" % target)
# if we have a pid but no exe, just look it up in /proc/
if pid and not exe:
exe_fn = proc.exe
if context.os == 'android':
exe_fn = adb.proc_exe
exe = exe_fn(pid)
if not pid and not exe:
log.error('could not find target process')
if exe:
# The 'file' statement should go first
pre = 'file "%s"\n%s' % (exe, pre)
cmd = binary()
if gdb_args:
cmd += ' '
cmd += ' '.join(gdb_args)
if context.gdbinit:
cmd += ' -nh ' # ignore ~/.gdbinit
cmd += ' -x %s ' % context.gdbinit # load custom gdbinit
cmd += ' -q '
if exe and context.native:
if not ssh and not os.path.isfile(exe):
log.error('No such file: %s' % exe)
cmd += ' "%s"' % exe
if pid and not context.os == 'android':
cmd += ' %d' % pid
if context.os == 'android' and pid:
runner = _get_runner()
which = _get_which()
gdb_cmd = _gdbserver_args(pid=pid, which=which)
gdbserver = runner(gdb_cmd)
port = _gdbserver_port(gdbserver, None)
host = context.adb_host
pre += 'target extended-remote %s:%i\n' % (context.adb_host, port)
# gdbserver on Android sets 'detach-on-fork on' which breaks things
# when you're trying to debug anything that forks.
pre += 'set detach-on-fork off\n'
gdbscript = pre + (gdbscript or '')
if gdbscript:
tmp = tempfile.NamedTemporaryFile(prefix = 'pwn', suffix = '.gdb',
delete = False)
log.debug('Wrote gdb script to %r\n%s' % (tmp.name, gdbscript))
gdbscript = 'shell rm %s\n%s' % (tmp.name, gdbscript)
tmp.write(gdbscript)
tmp.close()
cmd += ' -x "%s"' % (tmp.name)
log.info('running in new terminal: %s' % cmd)
gdb_pid = misc.run_in_new_terminal(cmd)
if pid and context.native:
proc.wait_for_debugger(pid)
return gdb_pid
def ssh_gdb(ssh, argv, gdbscript = None, arch = None, **kwargs):
if not isinstance(argv, (list, tuple)):
argv = [argv]
exe = argv[0]
argv = ["gdbserver", "--multi", "127.0.0.1:0"] + argv
# Download the executable
local_exe = os.path.basename(exe)
ssh.download_file(ssh.which(exe), local_exe)
# Run the process
c = ssh.process(argv, **kwargs)
# Find the port for the gdb server
c.recvuntil('port ')
line = c.recvline().strip()
gdbport = re.match('[0-9]+', line)
if gdbport:
gdbport = int(gdbport.group(0))
l = tubes.listen.listen(0)
forwardport = l.lport
attach(('127.0.0.1', forwardport), gdbscript, local_exe, arch, ssh=ssh)
l.wait_for_connection() <> ssh.connect_remote('127.0.0.1', gdbport)
return c
def find_module_addresses(binary, ssh=None, ulimit=False):
"""
Cheat to find modules by using GDB.
We can't use ``/proc/$pid/map`` since some servers forbid it.
This breaks ``info proc`` in GDB, but ``info sharedlibrary`` still works.
Additionally, ``info sharedlibrary`` works on FreeBSD, which may not have
procfs enabled or accessible.
The output looks like this:
::
info proc mapping
process 13961
warning: unable to open /proc file '/proc/13961/maps'
info sharedlibrary
From To Syms Read Shared Object Library
0xf7fdc820 0xf7ff505f Yes (*) /lib/ld-linux.so.2
0xf7fbb650 0xf7fc79f8 Yes /lib32/libpthread.so.0
0xf7e26f10 0xf7f5b51c Yes (*) /lib32/libc.so.6
(*): Shared library is missing debugging information.
Note that the raw addresses provided by ``info sharedlibrary`` are actually
the address of the ``.text`` segment, not the image base address.
This routine automates the entire process of:
1. Downloading the binaries from the remote server
2. Scraping GDB for the information
3. Loading each library into an ELF
4. Fixing up the base address vs. the ``.text`` segment address
Arguments:
binary(str): Path to the binary on the remote server
ssh(pwnlib.tubes.tube): SSH connection through which to load the libraries.
If left as :const:`None`, will use a :class:`pwnlib.tubes.process.process`.
ulimit(bool): Set to :const:`True` to run "ulimit -s unlimited" before GDB.
Returns:
A list of pwnlib.elf.ELF objects, with correct base addresses.
Example:
>>> with context.local(log_level=9999): # doctest: +SKIP
... shell = ssh(host='bandit.labs.overthewire.org',user='bandit0',password='bandit0', port=2220)
... bash_libs = gdb.find_module_addresses('/bin/bash', shell)
>>> os.path.basename(bash_libs[0].path) # doctest: +SKIP
'libc.so.6'
>>> hex(bash_libs[0].symbols['system']) # doctest: +SKIP
'0x7ffff7634660'
"""
#
# Download all of the remote libraries
#
if ssh:
runner = ssh.run
local_bin = ssh.download_file(binary)
local_elf = elf.ELF(os.path.basename(binary))
local_libs = ssh.libs(binary)
else:
runner = tubes.process.process
local_elf = elf.ELF(binary)
local_libs = local_elf.libs
entry = local_elf.header.e_entry
#
# Get the addresses from GDB
#
libs = {}
cmd = "gdb -q --args %s" % (binary)
expr = re.compile(r'(0x\S+)[^/]+(.*)')
if ulimit:
cmd = 'sh -c "(ulimit -s unlimited; %s)"' % cmd
cmd = shlex.split(cmd)
with runner(cmd) as gdb:
if context.aslr:
gdb.sendline('set disable-randomization off')
gdb.send("""
set prompt
break *%#x
run
""" % entry)
gdb.clean(2)
gdb.sendline('info sharedlibrary')
lines = gdb.recvrepeat(2)
for line in lines.splitlines():
m = expr.match(line)
if m:
libs[m.group(2)] = int(m.group(1),16)
gdb.sendline('kill')
gdb.sendline('y')
gdb.sendline('quit')
#
# Fix up all of the addresses against the .text address
#
rv = []
for remote_path,text_address in sorted(libs.items()):
# Match up the local copy to the remote path
try:
path = next(p for p in local_libs.keys() if remote_path in p)
except StopIteration:
print "Skipping %r" % remote_path
continue
# Load it
lib = elf.ELF(path)
# Find its text segment
text = lib.get_section_by_name('.text')
# Fix the address
lib.address = text_address - text.header.sh_addr
rv.append(lib)
return rv
def corefile(process):
r"""Drops a core file for the process.
Arguments:
process: Process to dump
Returns:
:class:`.Core`: The generated core file
"""
if context.noptrace:
log.warn_once("Skipping corefile since context.noptrace==True")
return
corefile_path = './core.%s.%i' % (os.path.basename(process.executable),
process.pid)
# Due to https://sourceware.org/bugzilla/show_bug.cgi?id=16092
# will disregard coredump_filter, and will not dump private mappings.
if version() < (7,11):
log.warn_once('The installed GDB (%s) does not emit core-dumps which '
'contain all of the data in the process.\n'
'Upgrade to GDB >= 7.11 for better core-dumps.' % binary())
# This is effectively the same as what the 'gcore' binary does
gdb_args = ['-batch',
'-q',
'--nx',
'-ex', '"set pagination off"',
'-ex', '"set height 0"',
'-ex', '"set width 0"',
'-ex', '"set use-coredump-filter on"',
'-ex', '"generate-core-file %s"' % corefile_path,
'-ex', 'detach']
with context.local(terminal = ['sh', '-c']):
with context.quiet:
pid = attach(process, gdb_args=gdb_args)
os.waitpid(pid, 0)
return elf.corefile.Core(corefile_path)
def version(program='gdb'):
"""Gets the current GDB version.
Note:
Requires that GDB version meets the following format:
``GNU gdb (GDB) 7.12``
Returns:
tuple: A tuple containing the version numbers
Example:
>>> (7,0) <= gdb.version() <= (8,0)
True
"""
program = misc.which(program)
expr = r'([0-9]+\.?)+'
with tubes.process.process([program, '--version'], level='error') as gdb:
version = gdb.recvline()
versions = re.search(expr, version).group()
return tuple(map(int, versions.split('.')))
| 31.587065 | 120 | 0.598834 |
"""
During exploit development, it is frequently useful to debug the
target binary under GDB.
Pwntools makes this easy-to-do with a handful of helper routines, designed
to make your exploit-debug-update cycles much faster.
Useful Functions
----------------
- :func:`attach` - Attach to an existing process
- :func:`debug` - Start a new process under a debugger, stopped at the first instruction
- :func:`debug_shellcode` - Build a binary with the provided shellcode, and start it under a debugger
Debugging Tips
--------------
The :func:`attach` and :func:`debug` functions will likely be your bread and
butter for debugging.
Both allow you to provide a script to pass to GDB when it is started, so that
it can automatically set your breakpoints.
Attaching to Processes
~~~~~~~~~~~~~~~~~~~~~~
To attach to an existing process, just use :func:`attach`. It is surprisingly
versatile, and can attach to a :class:`.process` for simple
binaries, or will automatically find the correct process to attach to for a
forking server, if given a :class:`.remote` object.
Spawning New Processes
~~~~~~~~~~~~~~~~~~~~~~
Attaching to processes with :func:`attach` is useful, but the state the process
is in may vary. If you need to attach to a process very early, and debug it from
the very first instruction (or even the start of ``main``), you instead should use
:func:`debug`.
When you use :func:`debug`, the return value is a :class:`.tube` object
that you interact with exactly like normal.
Tips and Troubleshooting
------------------------
``NOPTRACE`` magic argument
~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's quite cumbersom to comment and un-comment lines containing `attach`.
You can cause these lines to be a no-op by running your script with the
``NOPTRACE`` argument appended, or with ``PWNLIB_NOPTRACE=1`` in the environment.
::
$ python exploit.py NOPTRACE
[+] Starting local process '/bin/bash': Done
[!] Skipping debug attach since context.noptrace==True
...
Kernel Yama ptrace_scope
~~~~~~~~~~~~~~~~~~~~~~~~
The Linux kernel v3.4 introduced a security mechanism called ``ptrace_scope``,
which is intended to prevent processes from debugging eachother unless there is
a direct parent-child relationship.
This causes some issues with the normal Pwntools workflow, since the process
heirarchy looks like this:
::
python ---> target
`--> gdb
Note that ``python`` is the parent of ``target``, not ``gdb``.
In order to avoid this being a problem, Pwntools uses the function
``prctl(PR_SET_PTRACER, PR_SET_PTRACER_ANY)``. This disables Yama
for any processes launched by Pwntools via :class:`.process` or via
:meth:`.ssh.process`.
Older versions of Pwntools did not perform the ``prctl`` step, and
required that the Yama security feature was disabled systemwide, which
requires ``root`` access.
Member Documentation
===============================
"""
from __future__ import absolute_import
from __future__ import division
import os
import random
import re
import shlex
import tempfile
import time
from pwnlib import adb
from pwnlib import atexit
from pwnlib import elf
from pwnlib import qemu
from pwnlib import tubes
from pwnlib.asm import _bfdname
from pwnlib.asm import make_elf
from pwnlib.asm import make_elf_from_assembly
from pwnlib.context import LocalContext
from pwnlib.context import context
from pwnlib.log import getLogger
from pwnlib.util import misc
from pwnlib.util import proc
log = getLogger(__name__)
@LocalContext
def debug_assembly(asm, gdbscript=None, vma=None):
"""debug_assembly(asm, gdbscript=None, vma=None) -> tube
Creates an ELF file, and launches it under a debugger.
This is identical to debug_shellcode, except that
any defined symbols are available in GDB, and it
saves you the explicit call to asm().
Arguments:
asm(str): Assembly code to debug
gdbscript(str): Script to run in GDB
vma(int): Base address to load the shellcode at
**kwargs: Override any :obj:`pwnlib.context.context` values.
Returns:
:class:`.process`
Example:
.. code-block:: python
assembly = shellcraft.echo("Hello world!\n")
io = gdb.debug_assembly(assembly)
io.recvline()
# 'Hello world!'
"""
tmp_elf = make_elf_from_assembly(asm, vma=vma, extract=False)
os.chmod(tmp_elf, 0777)
atexit.register(lambda: os.unlink(tmp_elf))
if context.os == 'android':
android_path = '/data/data/%s' % os.path.basename(tmp_elf)
adb.push(tmp_elf, android_path)
tmp_elf = android_path
return debug(tmp_elf, gdbscript=gdbscript, arch=context.arch)
@LocalContext
def debug_shellcode(data, gdbscript=None, vma=None):
"""
Creates an ELF file, and launches it under a debugger.
Arguments:
data(str): Assembled shellcode bytes
gdbscript(str): Script to run in GDB
vma(int): Base address to load the shellcode at
**kwargs: Override any :obj:`pwnlib.context.context` values.
Returns:
:class:`.process`
Example:
.. code-block:: python
assembly = shellcraft.echo("Hello world!\n")
shellcode = asm(assembly)
io = gdb.debug_shellcode(shellcode)
io.recvline()
# 'Hello world!'
"""
if isinstance(data, unicode):
log.error("Shellcode is cannot be unicode. Did you mean debug_assembly?")
tmp_elf = make_elf(data, extract=False, vma=vma)
os.chmod(tmp_elf, 0777)
atexit.register(lambda: os.unlink(tmp_elf))
if context.os == 'android':
android_path = '/data/data/%s' % os.path.basename(tmp_elf)
adb.push(tmp_elf, android_path)
tmp_elf = android_path
return debug(tmp_elf, gdbscript=gdbscript, arch=context.arch)
def _gdbserver_args(pid=None, path=None, args=None, which=None):
"""_gdbserver_args(pid=None, path=None) -> list
Sets up a listening gdbserver, to either connect to the specified
PID, or launch the specified binary by its full path.
Arguments:
pid(int): Process ID to attach to
path(str): Process to launch
args(list): List of arguments to provide on the debugger command line
which(callaable): Function to find the path of a binary.
Returns:
A list of arguments to invoke gdbserver.
"""
if [pid, path, args].count(None) != 2:
log.error("Must specify exactly one of pid, path, or args")
if not which:
log.error("Must specify which.")
gdbserver = ''
if not args:
args = [str(path or pid)]
# Android targets have a distinct gdbserver
if context.bits == 64:
gdbserver = which('gdbserver64')
if not gdbserver:
gdbserver = which('gdbserver')
if not gdbserver:
log.error("gdbserver is not installed")
orig_args = args
gdbserver_args = [gdbserver, '--multi']
if context.aslr:
gdbserver_args += ['--no-disable-randomization']
else:
log.warn_once("Debugging process with ASLR disabled")
if pid:
gdbserver_args += ['--once', '--attach']
gdbserver_args += ['localhost:0']
gdbserver_args += args
return gdbserver_args
def _gdbserver_port(gdbserver, ssh):
which = _get_which(ssh)
# Process /bin/bash created; pid = 14366
# Listening on port 34816
process_created = gdbserver.recvline()
if process_created.startswith('ERROR:'):
raise ValueError(
'Failed to spawn process under gdbserver. gdbserver error message: %s' % process_created
)
gdbserver.pid = int(process_created.split()[-1], 0)
listening_on = ''
while 'Listening' not in listening_on:
listening_on = gdbserver.recvline()
port = int(listening_on.split()[-1])
# Set up port forarding for SSH
if ssh:
remote = ssh.connect_remote('127.0.0.1', port)
listener = tubes.listen.listen(0)
port = listener.lport
# Disable showing GDB traffic when debugging verbosity is increased
remote.level = 'error'
listener.level = 'error'
# Hook them up
remote <> listener
# Set up port forwarding for ADB
elif context.os == 'android':
adb.forward(port)
return port
def _get_which(ssh=None):
if ssh: return ssh.which
elif context.os == 'android': return adb.which
else: return misc.which
def _get_runner(ssh=None):
if ssh: return ssh.process
elif context.os == 'android': return adb.process
else: return tubes.process.process
@LocalContext
def debug(args, gdbscript=None, exe=None, ssh=None, env=None, sysroot=None, **kwargs):
"""debug(args) -> tube
Launch a GDB server with the specified command line,
and launches GDB to attach to it.
Arguments:
args(list): Arguments to the process, similar to :class:`.process`.
gdbscript(str): GDB script to run.
exe(str): Path to the executable on disk
env(dict): Environment to start the binary in
ssh(:class:`.ssh`): Remote ssh session to use to launch the process.
sysroot(str): Foreign-architecture sysroot, used for QEMU-emulated binaries
and Android targets.
Returns:
:class:`.process` or :class:`.ssh_channel`: A tube connected to the target process
Notes:
The debugger is attached automatically, and you can debug everything
from the very beginning. This requires that both ``gdb`` and ``gdbserver``
are installed on your machine.
When GDB opens via :func:`debug`, it will initially be stopped on the very first
instruction of the dynamic linker (``ld.so``) for dynamically-linked binaries.
Only the target binary and the linker will be loaded in memory, so you cannot
set breakpoints on shared library routines like ``malloc`` since ``libc.so``
has not even been loaded yet.
There are several ways to handle this:
1. Set a breakpoint on the executable's entry point (generally, ``_start``)
- This is only invoked after all of the required shared libraries
are loaded.
- You can generally get the address via the GDB command ``info file``.
2. Use pending breakpoints via ``set breakpoint pending on``
- This has the side-effect of setting breakpoints for **every** function
which matches the name. For ``malloc``, this will generally set a
breakpoint in the executable's PLT, in the linker's internal ``malloc``,
and eventaully in ``libc``'s malloc.
3. Wait for libraries to be loaded with ``set stop-on-solib-event 1``
- There is no way to stop on any specific library being loaded, and sometimes
multiple libraries are loaded and only a single breakpoint is issued.
- Generally, you just add a few ``continue`` commands until things are set up
the way you want it to be.
Examples:
.. code-block:: python
# Create a new process, and stop it at 'main'
io = gdb.debug('bash', '''
break main
continue
''')
# Send a command to Bash
io.sendline("echo hello")
# Interact with the process
io.interactive()
.. code-block:: python
# Create a new process, and stop it at 'main'
io = gdb.debug('bash', '''
# Wait until we hit the main executable's entry point
break _start
continue
# Now set breakpoint on shared library routines
break malloc
break free
continue
''')
# Send a command to Bash
io.sendline("echo hello")
# Interact with the process
io.interactive()
You can use :func:`debug` to spawn new processes on remote machines as well,
by using the ``ssh=`` keyword to pass in your :class:`.ssh` instance.
.. code-block:: python
# Connect to the SSH server
shell = ssh('passcode', 'pwnable.kr', 2222, password='guest')
# Start a process on the server
io = gdb.debug(['bash'],
ssh=shell,
gdbscript='''
break main
continue
''')
# Send a command to Bash
io.sendline("echo hello")
# Interact with the process
io.interactive()
"""
if isinstance(args, (int, tubes.process.process, tubes.ssh.ssh_channel)):
log.error("Use gdb.attach() to debug a running process")
if env is None:
env = os.environ
if isinstance(args, (str, unicode)):
args = [args]
orig_args = args
runner = _get_runner(ssh)
which = _get_which(ssh)
gdbscript = gdbscript or ''
if context.noptrace:
log.warn_once("Skipping debugger since context.noptrace==True")
return runner(args, executable=exe, env=env)
if ssh or context.native or (context.os == 'android'):
args = _gdbserver_args(args=args, which=which)
else:
qemu_port = random.randint(1024, 65535)
qemu_user = qemu.user_path()
sysroot = sysroot or qemu.ld_prefix(env=env)
if not qemu_user:
log.error("Cannot debug %s binaries without appropriate QEMU binaries" % context.arch)
args = [qemu_user, '-g', str(qemu_port)] + args
if not sysroot and context.os == 'android':
sysroot = 'remote:/'
if not which(args[0]):
log.error("%s is not installed" % args[0])
exe = exe or which(orig_args[0])
if not exe:
log.error("%s does not exist" % orig_args[0])
else:
gdbscript = 'file "%s"\n%s' % (exe, gdbscript)
gdbserver = runner(args, env=env, aslr=1, **kwargs)
gdbserver.executable = which(orig_args[0])
if context.native or (context.os == 'android'):
port = _gdbserver_port(gdbserver, ssh)
else:
port = qemu_port
host = '127.0.0.1'
if not ssh and context.os == 'android':
host = context.adb_host
attach((host, port), exe=exe, gdbscript=gdbscript, need_ptrace_scope = False, ssh=ssh, sysroot=sysroot)
garbage = gdbserver.recvline(timeout=1)
garbage2 = gdbserver.recvline_startswith("Remote debugging from host ", timeout=1)
return gdbserver
def get_gdb_arch():
return {
'amd64': 'i386:x86-64',
'powerpc': 'powerpc:common',
'powerpc64': 'powerpc:common64',
'mips64': 'mips:isa64',
'thumb': 'arm'
}.get(context.arch, context.arch)
def binary():
"""binary() -> str
Returns:
str: Path to the appropriate ``gdb`` binary to use.
Example:
>>> gdb.binary() # doctest: +SKIP
'/usr/bin/gdb'
"""
gdb = misc.which('pwntools-gdb') or misc.which('gdb')
if not context.native:
multiarch = misc.which('gdb-multiarch')
if multiarch:
return multiarch
log.warn_once('Cross-architecture debugging usually requires gdb-multiarch\n' \
'$ apt-get install gdb-multiarch')
if not gdb:
log.error('GDB is not installed\n'
'$ apt-get install gdb')
return gdb
@LocalContext
def attach(target, gdbscript = None, exe = None, need_ptrace_scope = True, gdb_args = None, ssh = None, sysroot = None):
"""attach(target, gdbscript = None, exe = None, arch = None, ssh = None) -> None
Start GDB in a new terminal and attach to `target`.
Arguments:
target: The target to attach to.
gdbscript(:obj:`str` or :obj:`file`): GDB script to run after attaching.
exe(str): The path of the target binary.
arch(str): Architechture of the target binary. If `exe` known GDB will
detect the architechture automatically (if it is supported).
gdb_args(list): List of additional arguments to pass to GDB.
sysroot(str): Foreign-architecture sysroot, used for QEMU-emulated binaries
and Android targets.
Returns:
PID of the GDB process (or the window which it is running in).
Notes:
The ``target`` argument is very robust, and can be any of the following:
:obj:`int`
PID of a process
:obj:`str`
Process name. The youngest process is selected.
:obj:`tuple`
Host, port pair of a listening ``gdbserver``
:class:`.process`
Process to connect to
:class:`.sock`
Connected socket. The executable on the other end of the connection is attached to.
Can be any socket type, including :class:`.listen` or :class:`.remote`.
:class:`.ssh_channel`
Remote process spawned via :meth:`.ssh.process`.
This will use the GDB installed on the remote machine.
If a password is required to connect, the ``sshpass`` program must be installed.
Examples:
.. code-block:: python
# Attach directly to pid 1234
gdb.attach(1234)
.. code-block:: python
# Attach to the youngest "bash" process
gdb.attach('bash')
.. code-block:: python
# Start a process
bash = process('bash')
# Attach the debugger
gdb.attach(bash, '''
set follow-fork-mode child
break execve
continue
''')
# Interact with the process
bash.sendline('whoami')
.. code-block:: python
# Start a forking server
server = process(['socat', 'tcp-listen:1234,fork,reuseaddr', 'exec:/bin/sh'])
# Connect to the server
io = remote('localhost', 1234)
# Connect the debugger to the server-spawned process
gdb.attach(io, '''
break exit
continue
''')
# Talk to the spawned 'sh'
io.sendline('exit')
.. code-block:: python
# Connect to the SSH server
shell = ssh('bandit0', 'bandit.labs.overthewire.org', password='bandit0', port=2220)
# Start a process on the server
cat = shell.process(['cat'])
# Attach a debugger to it
gdb.attach(cat, '''
break exit
continue
''')
# Cause `cat` to exit
cat.close()
"""
if context.noptrace:
log.warn_once("Skipping debug attach since context.noptrace==True")
return
if isinstance(gdbscript, file):
with gdbscript:
gdbscript = gdbscript.read()
if gdbscript and not gdbscript.endswith('\n'):
gdbscript += '\n'
if not sysroot and context.os == 'android':
sysroot = 'remote:/'
pre = ''
if not context.native:
pre += 'set endian %s\n' % context.endian
pre += 'set architecture %s\n' % get_gdb_arch()
if sysroot:
pre += 'set sysroot %s\n' % sysroot
if context.os == 'android':
pre += 'set gnutarget ' + _bfdname() + '\n'
pid = None
if isinstance(target, (int, long)):
# target is a pid, easy peasy
pid = target
elif isinstance(target, str):
# pidof picks the youngest process
pidof = proc.pidof
if context.os == 'android':
pidof = adb.pidof
pids = pidof(target)
if not pids:
log.error('No such process: %s' % target)
pid = pids[0]
log.info('Attaching to youngest process "%s" (PID = %d)' %
(target, pid))
elif isinstance(target, tubes.ssh.ssh_channel):
if not target.pid:
log.error("PID unknown for channel")
shell = target.parent
tmpfile = shell.mktemp()
gdbscript = 'shell rm %s\n%s' % (tmpfile, gdbscript)
shell.upload_data(gdbscript or '', tmpfile)
cmd = ['ssh', '-C', '-t', '-p', str(shell.port), '-l', shell.user, shell.host]
if shell.password:
if not misc.which('sshpass'):
log.error("sshpass must be installed to debug ssh processes")
cmd = ['sshpass', '-p', shell.password] + cmd
if shell.keyfile:
cmd += ['-i', shell.keyfile]
cmd += ['gdb -q %r %s -x "%s"' % (target.executable,
target.pid,
tmpfile)]
misc.run_in_new_terminal(' '.join(cmd))
return
elif isinstance(target, tubes.sock.sock):
pids = proc.pidof(target)
if not pids:
log.error('could not find remote process (%s:%d) on this machine' %
target.sock.getpeername())
pid = pids[0]
elif isinstance(target, tubes.process.process):
pid = proc.pidof(target)[0]
exe = exe or target.executable
elif isinstance(target, tuple) and len(target) == 2:
host, port = target
if context.os != 'android':
pre += 'target remote %s:%d\n' % (host, port)
else:
# Android debugging is done over gdbserver, which can't follow
pre += 'target extended-remote %s:%d\n' % (host, port)
pre += 'set detach-on-fork off\n'
def findexe():
for spid in proc.pidof(target):
sexe = proc.exe(spid)
name = os.path.basename(sexe)
if name.startswith('qemu-') or name.startswith('gdbserver'):
exe = proc.cmdline(spid)[-1]
return os.path.join(proc.cwd(spid), exe)
exe = exe or findexe()
elif isinstance(target, elf.corefile.Corefile):
pre += 'target core %s\n' % target.path
else:
log.error("don't know how to attach to target: %r" % target)
# if we have a pid but no exe, just look it up in /proc/
if pid and not exe:
exe_fn = proc.exe
if context.os == 'android':
exe_fn = adb.proc_exe
exe = exe_fn(pid)
if not pid and not exe:
log.error('could not find target process')
if exe:
# The 'file' statement should go first
pre = 'file "%s"\n%s' % (exe, pre)
cmd = binary()
if gdb_args:
cmd += ' '
cmd += ' '.join(gdb_args)
if context.gdbinit:
cmd += ' -nh ' # ignore ~/.gdbinit
cmd += ' -x %s ' % context.gdbinit # load custom gdbinit
cmd += ' -q '
if exe and context.native:
if not ssh and not os.path.isfile(exe):
log.error('No such file: %s' % exe)
cmd += ' "%s"' % exe
if pid and not context.os == 'android':
cmd += ' %d' % pid
if context.os == 'android' and pid:
runner = _get_runner()
which = _get_which()
gdb_cmd = _gdbserver_args(pid=pid, which=which)
gdbserver = runner(gdb_cmd)
port = _gdbserver_port(gdbserver, None)
host = context.adb_host
pre += 'target extended-remote %s:%i\n' % (context.adb_host, port)
# gdbserver on Android sets 'detach-on-fork on' which breaks things
# when you're trying to debug anything that forks.
pre += 'set detach-on-fork off\n'
gdbscript = pre + (gdbscript or '')
if gdbscript:
tmp = tempfile.NamedTemporaryFile(prefix = 'pwn', suffix = '.gdb',
delete = False)
log.debug('Wrote gdb script to %r\n%s' % (tmp.name, gdbscript))
gdbscript = 'shell rm %s\n%s' % (tmp.name, gdbscript)
tmp.write(gdbscript)
tmp.close()
cmd += ' -x "%s"' % (tmp.name)
log.info('running in new terminal: %s' % cmd)
gdb_pid = misc.run_in_new_terminal(cmd)
if pid and context.native:
proc.wait_for_debugger(pid)
return gdb_pid
def ssh_gdb(ssh, argv, gdbscript = None, arch = None, **kwargs):
if not isinstance(argv, (list, tuple)):
argv = [argv]
exe = argv[0]
argv = ["gdbserver", "--multi", "127.0.0.1:0"] + argv
local_exe = os.path.basename(exe)
ssh.download_file(ssh.which(exe), local_exe)
c = ssh.process(argv, **kwargs)
c.recvuntil('port ')
line = c.recvline().strip()
gdbport = re.match('[0-9]+', line)
if gdbport:
gdbport = int(gdbport.group(0))
l = tubes.listen.listen(0)
forwardport = l.lport
attach(('127.0.0.1', forwardport), gdbscript, local_exe, arch, ssh=ssh)
l.wait_for_connection() <> ssh.connect_remote('127.0.0.1', gdbport)
return c
def find_module_addresses(binary, ssh=None, ulimit=False):
"""
Cheat to find modules by using GDB.
We can't use ``/proc/$pid/map`` since some servers forbid it.
This breaks ``info proc`` in GDB, but ``info sharedlibrary`` still works.
Additionally, ``info sharedlibrary`` works on FreeBSD, which may not have
procfs enabled or accessible.
The output looks like this:
::
info proc mapping
process 13961
warning: unable to open /proc file '/proc/13961/maps'
info sharedlibrary
From To Syms Read Shared Object Library
0xf7fdc820 0xf7ff505f Yes (*) /lib/ld-linux.so.2
0xf7fbb650 0xf7fc79f8 Yes /lib32/libpthread.so.0
0xf7e26f10 0xf7f5b51c Yes (*) /lib32/libc.so.6
(*): Shared library is missing debugging information.
Note that the raw addresses provided by ``info sharedlibrary`` are actually
the address of the ``.text`` segment, not the image base address.
This routine automates the entire process of:
1. Downloading the binaries from the remote server
2. Scraping GDB for the information
3. Loading each library into an ELF
4. Fixing up the base address vs. the ``.text`` segment address
Arguments:
binary(str): Path to the binary on the remote server
ssh(pwnlib.tubes.tube): SSH connection through which to load the libraries.
If left as :const:`None`, will use a :class:`pwnlib.tubes.process.process`.
ulimit(bool): Set to :const:`True` to run "ulimit -s unlimited" before GDB.
Returns:
A list of pwnlib.elf.ELF objects, with correct base addresses.
Example:
>>> with context.local(log_level=9999): # doctest: +SKIP
... shell = ssh(host='bandit.labs.overthewire.org',user='bandit0',password='bandit0', port=2220)
... bash_libs = gdb.find_module_addresses('/bin/bash', shell)
>>> os.path.basename(bash_libs[0].path) # doctest: +SKIP
'libc.so.6'
>>> hex(bash_libs[0].symbols['system']) # doctest: +SKIP
'0x7ffff7634660'
"""
#
# Download all of the remote libraries
#
if ssh:
runner = ssh.run
local_bin = ssh.download_file(binary)
local_elf = elf.ELF(os.path.basename(binary))
local_libs = ssh.libs(binary)
else:
runner = tubes.process.process
local_elf = elf.ELF(binary)
local_libs = local_elf.libs
entry = local_elf.header.e_entry
#
# Get the addresses from GDB
#
libs = {}
cmd = "gdb -q --args %s" % (binary)
expr = re.compile(r'(0x\S+)[^/]+(.*)')
if ulimit:
cmd = 'sh -c "(ulimit -s unlimited; %s)"' % cmd
cmd = shlex.split(cmd)
with runner(cmd) as gdb:
if context.aslr:
gdb.sendline('set disable-randomization off')
gdb.send("""
set prompt
break *%#x
run
""" % entry)
gdb.clean(2)
gdb.sendline('info sharedlibrary')
lines = gdb.recvrepeat(2)
for line in lines.splitlines():
m = expr.match(line)
if m:
libs[m.group(2)] = int(m.group(1),16)
gdb.sendline('kill')
gdb.sendline('y')
gdb.sendline('quit')
#
# Fix up all of the addresses against the .text address
#
rv = []
for remote_path,text_address in sorted(libs.items()):
# Match up the local copy to the remote path
try:
path = next(p for p in local_libs.keys() if remote_path in p)
except StopIteration:
print "Skipping %r" % remote_path
continue
# Load it
lib = elf.ELF(path)
# Find its text segment
text = lib.get_section_by_name('.text')
# Fix the address
lib.address = text_address - text.header.sh_addr
rv.append(lib)
return rv
def corefile(process):
r"""Drops a core file for the process.
Arguments:
process: Process to dump
Returns:
:class:`.Core`: The generated core file
"""
if context.noptrace:
log.warn_once("Skipping corefile since context.noptrace==True")
return
corefile_path = './core.%s.%i' % (os.path.basename(process.executable),
process.pid)
# Due to https://sourceware.org/bugzilla/show_bug.cgi?id=16092
# will disregard coredump_filter, and will not dump private mappings.
if version() < (7,11):
log.warn_once('The installed GDB (%s) does not emit core-dumps which '
'contain all of the data in the process.\n'
'Upgrade to GDB >= 7.11 for better core-dumps.' % binary())
# This is effectively the same as what the 'gcore' binary does
gdb_args = ['-batch',
'-q',
'--nx',
'-ex', '"set pagination off"',
'-ex', '"set height 0"',
'-ex', '"set width 0"',
'-ex', '"set use-coredump-filter on"',
'-ex', '"generate-core-file %s"' % corefile_path,
'-ex', 'detach']
with context.local(terminal = ['sh', '-c']):
with context.quiet:
pid = attach(process, gdb_args=gdb_args)
os.waitpid(pid, 0)
return elf.corefile.Core(corefile_path)
def version(program='gdb'):
"""Gets the current GDB version.
Note:
Requires that GDB version meets the following format:
``GNU gdb (GDB) 7.12``
Returns:
tuple: A tuple containing the version numbers
Example:
>>> (7,0) <= gdb.version() <= (8,0)
True
"""
program = misc.which(program)
expr = r'([0-9]+\.?)+'
with tubes.process.process([program, '--version'], level='error') as gdb:
version = gdb.recvline()
versions = re.search(expr, version).group()
return tuple(map(int, versions.split('.')))
| false | true |
f720b7d1ae3ebf2758b3f637eac569a944ecce67 | 291 | py | Python | utils/test_clear_data.py | M1d0r1/py_mantis | 8d2b05601b9240e76e2e07b50770e39df5bcade9 | [
"Apache-2.0"
] | null | null | null | utils/test_clear_data.py | M1d0r1/py_mantis | 8d2b05601b9240e76e2e07b50770e39df5bcade9 | [
"Apache-2.0"
] | null | null | null | utils/test_clear_data.py | M1d0r1/py_mantis | 8d2b05601b9240e76e2e07b50770e39df5bcade9 | [
"Apache-2.0"
] | null | null | null | import random
def test_clear_projects_helper(app):
while app.project.count()>0:
app.project.navigate_to_manage_projects_page()
old_projects = app.project.get_project_list()
project = random.choice(old_projects)
app.project.delete_by_name(project.name)
| 26.454545 | 54 | 0.725086 | import random
def test_clear_projects_helper(app):
while app.project.count()>0:
app.project.navigate_to_manage_projects_page()
old_projects = app.project.get_project_list()
project = random.choice(old_projects)
app.project.delete_by_name(project.name)
| true | true |
f720b849c7ccaf49918d5f8db7e3b19f11f3203f | 5,729 | py | Python | tempest/api/compute/admin/test_servers_negative.py | rishabh20111990/tempest | df15531cd4231000b0da016f5cd8641523ce984e | [
"Apache-2.0"
] | 2 | 2015-08-13T00:07:49.000Z | 2020-08-07T06:38:44.000Z | tempest/api/compute/admin/test_servers_negative.py | rishabh20111990/tempest | df15531cd4231000b0da016f5cd8641523ce984e | [
"Apache-2.0"
] | 1 | 2019-08-08T10:36:44.000Z | 2019-08-09T05:58:23.000Z | tempest/api/compute/admin/test_servers_negative.py | rishabh20111990/tempest | df15531cd4231000b0da016f5cd8641523ce984e | [
"Apache-2.0"
] | 5 | 2016-06-24T20:03:52.000Z | 2020-02-05T10:14:54.000Z | # Copyright 2013 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from tempest.api.compute import base
from tempest.common import tempest_fixtures as fixtures
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class ServersAdminNegativeTestJSON(base.BaseV2ComputeAdminTest):
"""Tests Servers API using admin privileges"""
@classmethod
def setup_clients(cls):
super(ServersAdminNegativeTestJSON, cls).setup_clients()
cls.client = cls.os_admin.servers_client
cls.quotas_client = cls.os_admin.quotas_client
@classmethod
def resource_setup(cls):
super(ServersAdminNegativeTestJSON, cls).resource_setup()
cls.tenant_id = cls.client.tenant_id
server = cls.create_test_server(wait_until='ACTIVE')
cls.s1_id = server['id']
@decorators.idempotent_id('28dcec23-f807-49da-822c-56a92ea3c687')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@decorators.attr(type=['negative'])
def test_resize_server_using_overlimit_ram(self):
# NOTE(mriedem): Avoid conflicts with os-quota-class-sets tests.
self.useFixture(fixtures.LockFixture('compute_quotas'))
quota_set = self.quotas_client.show_quota_set(
self.tenant_id)['quota_set']
ram = quota_set['ram']
if ram == -1:
raise self.skipException("ram quota set is -1,"
" cannot test overlimit")
ram += 1
vcpus = 1
disk = 5
flavor_ref = self.create_flavor(ram=ram, vcpus=vcpus, disk=disk)
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.client.resize_server,
self.s1_id,
flavor_ref['id'])
@decorators.idempotent_id('7368a427-2f26-4ad9-9ba9-911a0ec2b0db')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@decorators.attr(type=['negative'])
def test_resize_server_using_overlimit_vcpus(self):
# NOTE(mriedem): Avoid conflicts with os-quota-class-sets tests.
self.useFixture(fixtures.LockFixture('compute_quotas'))
quota_set = self.quotas_client.show_quota_set(
self.tenant_id)['quota_set']
vcpus = quota_set['cores']
if vcpus == -1:
raise self.skipException("cores quota set is -1,"
" cannot test overlimit")
vcpus += 1
ram = 512
disk = 5
flavor_ref = self.create_flavor(ram=ram, vcpus=vcpus, disk=disk)
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.client.resize_server,
self.s1_id,
flavor_ref['id'])
@decorators.attr(type=['negative'])
@decorators.idempotent_id('b0b4d8af-1256-41ef-9ee7-25f1c19dde80')
def test_reset_state_server_invalid_state(self):
self.assertRaises(lib_exc.BadRequest,
self.client.reset_state, self.s1_id,
state='invalid')
@decorators.attr(type=['negative'])
@decorators.idempotent_id('4cdcc984-fab0-4577-9a9d-6d558527ee9d')
def test_reset_state_server_invalid_type(self):
self.assertRaises(lib_exc.BadRequest,
self.client.reset_state, self.s1_id,
state=1)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('e741298b-8df2-46f0-81cb-8f814ff2504c')
def test_reset_state_server_nonexistent_server(self):
self.assertRaises(lib_exc.NotFound,
self.client.reset_state, '999', state='error')
@decorators.attr(type=['negative'])
@decorators.idempotent_id('46a4e1ca-87ae-4d28-987a-1b6b136a0221')
def test_migrate_non_existent_server(self):
# migrate a non existent server
self.assertRaises(lib_exc.NotFound,
self.client.migrate_server,
data_utils.rand_uuid())
@decorators.idempotent_id('b0b17f83-d14e-4fc4-8f31-bcc9f3cfa629')
@testtools.skipUnless(CONF.compute_feature_enabled.cold_migration,
'Cold migration not available.')
@testtools.skipUnless(CONF.compute_feature_enabled.suspend,
'Suspend is not available.')
@decorators.attr(type=['negative'])
def test_migrate_server_invalid_state(self):
# create server.
server = self.create_test_server(wait_until='ACTIVE')
server_id = server['id']
# suspend the server.
self.client.suspend_server(server_id)
waiters.wait_for_server_status(self.client,
server_id, 'SUSPENDED')
# migrate a suspended server should fail
self.assertRaises(lib_exc.Conflict,
self.client.migrate_server,
server_id)
| 42.437037 | 78 | 0.64479 |
import testtools
from tempest.api.compute import base
from tempest.common import tempest_fixtures as fixtures
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class ServersAdminNegativeTestJSON(base.BaseV2ComputeAdminTest):
@classmethod
def setup_clients(cls):
super(ServersAdminNegativeTestJSON, cls).setup_clients()
cls.client = cls.os_admin.servers_client
cls.quotas_client = cls.os_admin.quotas_client
@classmethod
def resource_setup(cls):
super(ServersAdminNegativeTestJSON, cls).resource_setup()
cls.tenant_id = cls.client.tenant_id
server = cls.create_test_server(wait_until='ACTIVE')
cls.s1_id = server['id']
@decorators.idempotent_id('28dcec23-f807-49da-822c-56a92ea3c687')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@decorators.attr(type=['negative'])
def test_resize_server_using_overlimit_ram(self):
self.useFixture(fixtures.LockFixture('compute_quotas'))
quota_set = self.quotas_client.show_quota_set(
self.tenant_id)['quota_set']
ram = quota_set['ram']
if ram == -1:
raise self.skipException("ram quota set is -1,"
" cannot test overlimit")
ram += 1
vcpus = 1
disk = 5
flavor_ref = self.create_flavor(ram=ram, vcpus=vcpus, disk=disk)
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.client.resize_server,
self.s1_id,
flavor_ref['id'])
@decorators.idempotent_id('7368a427-2f26-4ad9-9ba9-911a0ec2b0db')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@decorators.attr(type=['negative'])
def test_resize_server_using_overlimit_vcpus(self):
self.useFixture(fixtures.LockFixture('compute_quotas'))
quota_set = self.quotas_client.show_quota_set(
self.tenant_id)['quota_set']
vcpus = quota_set['cores']
if vcpus == -1:
raise self.skipException("cores quota set is -1,"
" cannot test overlimit")
vcpus += 1
ram = 512
disk = 5
flavor_ref = self.create_flavor(ram=ram, vcpus=vcpus, disk=disk)
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.client.resize_server,
self.s1_id,
flavor_ref['id'])
@decorators.attr(type=['negative'])
@decorators.idempotent_id('b0b4d8af-1256-41ef-9ee7-25f1c19dde80')
def test_reset_state_server_invalid_state(self):
self.assertRaises(lib_exc.BadRequest,
self.client.reset_state, self.s1_id,
state='invalid')
@decorators.attr(type=['negative'])
@decorators.idempotent_id('4cdcc984-fab0-4577-9a9d-6d558527ee9d')
def test_reset_state_server_invalid_type(self):
self.assertRaises(lib_exc.BadRequest,
self.client.reset_state, self.s1_id,
state=1)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('e741298b-8df2-46f0-81cb-8f814ff2504c')
def test_reset_state_server_nonexistent_server(self):
self.assertRaises(lib_exc.NotFound,
self.client.reset_state, '999', state='error')
@decorators.attr(type=['negative'])
@decorators.idempotent_id('46a4e1ca-87ae-4d28-987a-1b6b136a0221')
def test_migrate_non_existent_server(self):
self.assertRaises(lib_exc.NotFound,
self.client.migrate_server,
data_utils.rand_uuid())
@decorators.idempotent_id('b0b17f83-d14e-4fc4-8f31-bcc9f3cfa629')
@testtools.skipUnless(CONF.compute_feature_enabled.cold_migration,
'Cold migration not available.')
@testtools.skipUnless(CONF.compute_feature_enabled.suspend,
'Suspend is not available.')
@decorators.attr(type=['negative'])
def test_migrate_server_invalid_state(self):
server = self.create_test_server(wait_until='ACTIVE')
server_id = server['id']
self.client.suspend_server(server_id)
waiters.wait_for_server_status(self.client,
server_id, 'SUSPENDED')
self.assertRaises(lib_exc.Conflict,
self.client.migrate_server,
server_id)
| true | true |
f720b868a2e8693f457acc29e9d2ffcfcf7e2f08 | 2,174 | py | Python | debug/ssd/test_ssd300.py | jjjkkkjjj/pytorch.dl | d82aa1191c14f328c62de85e391ac6fa1b4c7ee3 | [
"MIT"
] | 2 | 2021-02-06T22:40:13.000Z | 2021-03-26T09:15:34.000Z | debug/ssd/test_ssd300.py | jjjkkkjjj/pytorch.dl | d82aa1191c14f328c62de85e391ac6fa1b4c7ee3 | [
"MIT"
] | 8 | 2020-07-11T07:10:51.000Z | 2022-03-12T00:39:03.000Z | debug/ssd/test_ssd300.py | jjjkkkjjj/pytorch.dl | d82aa1191c14f328c62de85e391ac6fa1b4c7ee3 | [
"MIT"
] | 2 | 2021-03-26T09:19:42.000Z | 2021-07-27T02:38:09.000Z | from dl.data.objdetn import datasets, utils, target_transforms
from dl.data import transforms
from dl.models.ssd.ssd300 import SSD300
from dl.data.utils.converter import toVisualizeRectLabelRGBimg
from torch.utils.data import DataLoader
import cv2
if __name__ == '__main__':
augmentation = None
transform = transforms.Compose(
[transforms.Resize((300, 300)),
transforms.ToTensor(),
transforms.Normalize(rgb_means=(0.485, 0.456, 0.406), rgb_stds=(0.229, 0.224, 0.225))]
)
target_transform = target_transforms.Compose(
[target_transforms.Corners2Centroids(),
target_transforms.OneHot(class_nums=datasets.VOC_class_nums, add_background=True),
target_transforms.ToTensor()]
)
test_dataset = datasets.VOC2007_TestDataset(transform=transform, target_transform=target_transform, augmentation=augmentation)
test_loader = DataLoader(test_dataset,
batch_size=32,
shuffle=True,
collate_fn=utils.batch_ind_fn,
num_workers=4,
pin_memory=False)
model = SSD300(class_labels=datasets.VOC_class_labels, batch_norm=False)
model.load_weights('../../weights/ssd300-voc2007+12+coco/ssd300-voc2007+2012+coco_i-0025000_checkpoints20200611.pth')
#model.load_for_finetune('./weights/ssd300-voc2007+12+coco/ssd300-voc2007+2012+coco_i-30000.pth')
model.eval()
print(model)
#evaluator = VOC2007Evaluator(test_loader, iteration_interval=5000)
#ap = evaluator(model)
#print(ap)
image = cv2.cvtColor(cv2.imread('../../scripts/ssd/assets/coco_testimg.jpg'), cv2.COLOR_BGR2RGB)
infers, imgs, orig_imgs = model.infer(image, visualize=True, toNorm=True)
for i, img in enumerate(imgs):
cv2.imshow('result', cv2.cvtColor(img, cv2.COLOR_RGB2BGR))
cv2.waitKey()
images = [test_dataset[i][0] for i in range(20)]
inf, ret_imgs, orig_imgs = model.infer(images, visualize=True, toNorm=False)
for img in ret_imgs:
cv2.imshow('result', cv2.cvtColor(img, cv2.COLOR_RGB2BGR))
cv2.waitKey() | 43.48 | 130 | 0.677553 | from dl.data.objdetn import datasets, utils, target_transforms
from dl.data import transforms
from dl.models.ssd.ssd300 import SSD300
from dl.data.utils.converter import toVisualizeRectLabelRGBimg
from torch.utils.data import DataLoader
import cv2
if __name__ == '__main__':
augmentation = None
transform = transforms.Compose(
[transforms.Resize((300, 300)),
transforms.ToTensor(),
transforms.Normalize(rgb_means=(0.485, 0.456, 0.406), rgb_stds=(0.229, 0.224, 0.225))]
)
target_transform = target_transforms.Compose(
[target_transforms.Corners2Centroids(),
target_transforms.OneHot(class_nums=datasets.VOC_class_nums, add_background=True),
target_transforms.ToTensor()]
)
test_dataset = datasets.VOC2007_TestDataset(transform=transform, target_transform=target_transform, augmentation=augmentation)
test_loader = DataLoader(test_dataset,
batch_size=32,
shuffle=True,
collate_fn=utils.batch_ind_fn,
num_workers=4,
pin_memory=False)
model = SSD300(class_labels=datasets.VOC_class_labels, batch_norm=False)
model.load_weights('../../weights/ssd300-voc2007+12+coco/ssd300-voc2007+2012+coco_i-0025000_checkpoints20200611.pth')
model.eval()
print(model)
image = cv2.cvtColor(cv2.imread('../../scripts/ssd/assets/coco_testimg.jpg'), cv2.COLOR_BGR2RGB)
infers, imgs, orig_imgs = model.infer(image, visualize=True, toNorm=True)
for i, img in enumerate(imgs):
cv2.imshow('result', cv2.cvtColor(img, cv2.COLOR_RGB2BGR))
cv2.waitKey()
images = [test_dataset[i][0] for i in range(20)]
inf, ret_imgs, orig_imgs = model.infer(images, visualize=True, toNorm=False)
for img in ret_imgs:
cv2.imshow('result', cv2.cvtColor(img, cv2.COLOR_RGB2BGR))
cv2.waitKey() | true | true |
f720b8bdd62f9180ca3b2885a9c8833bcd68eaf4 | 2,706 | py | Python | obs.py | JTF4/cronicle-plugin-obs-studio | c3ccd0f0ffedd20b00052d1bcd3ddb8c53b8144f | [
"MIT"
] | null | null | null | obs.py | JTF4/cronicle-plugin-obs-studio | c3ccd0f0ffedd20b00052d1bcd3ddb8c53b8144f | [
"MIT"
] | null | null | null | obs.py | JTF4/cronicle-plugin-obs-studio | c3ccd0f0ffedd20b00052d1bcd3ddb8c53b8144f | [
"MIT"
] | 1 | 2021-06-29T13:09:16.000Z | 2021-06-29T13:09:16.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: David Stevens
import sys
import time
import json
import logging
logging.basicConfig(level=logging.INFO)
sys.path.append('../')
from obswebsocket import obsws, requests # noqa: E402
stdinput = sys.stdin.readline()
data = json.loads(stdinput)
try:
host = data['params']['host']
port = data['params']['port']
password = data['params']['password']
command = data['params']['command']
enableOverride = data['params']['enableOverride']
destinationScene = data['params']['destinationScene']
ws = obsws(host, port, password)
ws.connect()
getScenes = ws.call(requests.GetSceneList())
currentScene = getScenes.getCurrentScene()
getStreamInformation = ws.call(requests.GetStreamingStatus())
getStreamStatus = getStreamInformation.getStreaming()
print("Host:" + host)
print("Port:" + port)
print("Password:" + password)
print("Destination Scene:" + destinationScene)
print("Current Scene:" + currentScene)
print(getStreamStatus)
print("Override Status:")
print(enableOverride)
try:
#scenes = ws.call(requests.GetSceneList())
#for s in scenes.getScenes():
# name = s['name']
# print(u"Switching to {}".format(name))
# ws.call(requests.SetCurrentScene(name))
# time.sleep(2)
print("Started Command Processing")
if command == "Start Streaming Bool":
if getStreamStatus == False:
if currentScene == destinationScene:
print("Already running on the correct scene: Starting Stream")
ws.call(requests.StartStreaming())
else:
print("Setting scene to destination and starting stream")
ws.call(requests.SetCurrentScene(destinationScene))
time.sleep(2)
ws.call(requests.StartStreaming())
else:
print("Stream already running. Command halted.")
elif command == "Stop Stream":
ws.call(requests.StopStreaming())
elif command == "Start Stream":
ws.call(requests.StartStreaming())
elif command == "Switch Scene":
if enableOverride == "True" or getStreamStatus == False:
ws.call(requests.SetCurrentScene(destinationScene))
elif enableOverride == "False":
print("Override is not enabled.")
print("End of list")
except KeyboardInterrupt:
pass
ws.disconnect()
print('{ "complete": 1 }')
except:
print('{ "complete": 1, "code": 999, "description": "Failed to execute." }')
| 31.465116 | 82 | 0.603843 |
import sys
import time
import json
import logging
logging.basicConfig(level=logging.INFO)
sys.path.append('../')
from obswebsocket import obsws, requests
stdinput = sys.stdin.readline()
data = json.loads(stdinput)
try:
host = data['params']['host']
port = data['params']['port']
password = data['params']['password']
command = data['params']['command']
enableOverride = data['params']['enableOverride']
destinationScene = data['params']['destinationScene']
ws = obsws(host, port, password)
ws.connect()
getScenes = ws.call(requests.GetSceneList())
currentScene = getScenes.getCurrentScene()
getStreamInformation = ws.call(requests.GetStreamingStatus())
getStreamStatus = getStreamInformation.getStreaming()
print("Host:" + host)
print("Port:" + port)
print("Password:" + password)
print("Destination Scene:" + destinationScene)
print("Current Scene:" + currentScene)
print(getStreamStatus)
print("Override Status:")
print(enableOverride)
try:
print("Started Command Processing")
if command == "Start Streaming Bool":
if getStreamStatus == False:
if currentScene == destinationScene:
print("Already running on the correct scene: Starting Stream")
ws.call(requests.StartStreaming())
else:
print("Setting scene to destination and starting stream")
ws.call(requests.SetCurrentScene(destinationScene))
time.sleep(2)
ws.call(requests.StartStreaming())
else:
print("Stream already running. Command halted.")
elif command == "Stop Stream":
ws.call(requests.StopStreaming())
elif command == "Start Stream":
ws.call(requests.StartStreaming())
elif command == "Switch Scene":
if enableOverride == "True" or getStreamStatus == False:
ws.call(requests.SetCurrentScene(destinationScene))
elif enableOverride == "False":
print("Override is not enabled.")
print("End of list")
except KeyboardInterrupt:
pass
ws.disconnect()
print('{ "complete": 1 }')
except:
print('{ "complete": 1, "code": 999, "description": "Failed to execute." }')
| false | true |
f720b9d8103adc5ec7d583ef9b481eed71f4b5ce | 4,118 | py | Python | cinder/api/v3/backups.py | hashsos/hashcloudos-cinder | 6d8b648399e2160b419e3f9535eb520c7de9120e | [
"Apache-2.0"
] | null | null | null | cinder/api/v3/backups.py | hashsos/hashcloudos-cinder | 6d8b648399e2160b419e3f9535eb520c7de9120e | [
"Apache-2.0"
] | null | null | null | cinder/api/v3/backups.py | hashsos/hashcloudos-cinder | 6d8b648399e2160b419e3f9535eb520c7de9120e | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Intel, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The backups V3 API."""
from oslo_log import log as logging
from webob import exc
from cinder.api.contrib import backups as backups_v2
from cinder.api import microversions as mv
from cinder.api.openstack import wsgi
from cinder.api.v3.views import backups as backup_views
from cinder import exception
from cinder.i18n import _
from cinder.policies import backups as policy
LOG = logging.getLogger(__name__)
class BackupsController(backups_v2.BackupsController):
"""The backups API controller for the OpenStack API V3."""
_view_builder_class = backup_views.ViewBuilder
@wsgi.Controller.api_version(mv.BACKUP_UPDATE)
def update(self, req, id, body):
"""Update a backup."""
context = req.environ['cinder.context']
self.assert_valid_body(body, 'backup')
req_version = req.api_version_request
backup_update = body['backup']
self.validate_name_and_description(backup_update)
update_dict = {}
if 'name' in backup_update:
update_dict['display_name'] = backup_update.pop('name')
if 'description' in backup_update:
update_dict['display_description'] = (
backup_update.pop('description'))
if (req_version.matches(
mv.BACKUP_METADATA) and 'metadata' in backup_update):
update_dict['metadata'] = backup_update.pop('metadata')
# Check no unsupported fields.
if backup_update:
msg = _("Unsupported fields %s.") % (", ".join(backup_update))
raise exc.HTTPBadRequest(explanation=msg)
new_backup = self.backup_api.update(context, id, update_dict)
return self._view_builder.summary(req, new_backup)
def _add_backup_project_attribute(self, req, backup):
db_backup = req.get_db_backup(backup['id'])
key = "os-backup-project-attr:project_id"
backup[key] = db_backup['project_id']
def show(self, req, id):
"""Return data about the given backup."""
LOG.debug('Show backup with id %s.', id)
context = req.environ['cinder.context']
req_version = req.api_version_request
# Not found exception will be handled at the wsgi level
backup = self.backup_api.get(context, backup_id=id)
req.cache_db_backup(backup)
resp_backup = self._view_builder.detail(req, backup)
if req_version.matches(mv.BACKUP_PROJECT):
try:
context.authorize(policy.BACKUP_ATTRIBUTES_POLICY)
self._add_backup_project_attribute(req, resp_backup['backup'])
except exception.PolicyNotAuthorized:
pass
return resp_backup
def detail(self, req):
resp_backup = super(BackupsController, self).detail(req)
context = req.environ['cinder.context']
req_version = req.api_version_request
if req_version.matches(mv.BACKUP_PROJECT):
try:
context.authorize(policy.BACKUP_ATTRIBUTES_POLICY)
for bak in resp_backup['backups']:
self._add_backup_project_attribute(req, bak)
except exception.PolicyNotAuthorized:
pass
return resp_backup
def _convert_sort_name(self, req_version, sort_keys):
if req_version.matches(mv.BACKUP_SORT_NAME) and 'name' in sort_keys:
sort_keys[sort_keys.index('name')] = 'display_name'
def create_resource():
return wsgi.Resource(BackupsController())
| 37.099099 | 78 | 0.674114 |
from oslo_log import log as logging
from webob import exc
from cinder.api.contrib import backups as backups_v2
from cinder.api import microversions as mv
from cinder.api.openstack import wsgi
from cinder.api.v3.views import backups as backup_views
from cinder import exception
from cinder.i18n import _
from cinder.policies import backups as policy
LOG = logging.getLogger(__name__)
class BackupsController(backups_v2.BackupsController):
_view_builder_class = backup_views.ViewBuilder
@wsgi.Controller.api_version(mv.BACKUP_UPDATE)
def update(self, req, id, body):
context = req.environ['cinder.context']
self.assert_valid_body(body, 'backup')
req_version = req.api_version_request
backup_update = body['backup']
self.validate_name_and_description(backup_update)
update_dict = {}
if 'name' in backup_update:
update_dict['display_name'] = backup_update.pop('name')
if 'description' in backup_update:
update_dict['display_description'] = (
backup_update.pop('description'))
if (req_version.matches(
mv.BACKUP_METADATA) and 'metadata' in backup_update):
update_dict['metadata'] = backup_update.pop('metadata')
if backup_update:
msg = _("Unsupported fields %s.") % (", ".join(backup_update))
raise exc.HTTPBadRequest(explanation=msg)
new_backup = self.backup_api.update(context, id, update_dict)
return self._view_builder.summary(req, new_backup)
def _add_backup_project_attribute(self, req, backup):
db_backup = req.get_db_backup(backup['id'])
key = "os-backup-project-attr:project_id"
backup[key] = db_backup['project_id']
def show(self, req, id):
LOG.debug('Show backup with id %s.', id)
context = req.environ['cinder.context']
req_version = req.api_version_request
backup = self.backup_api.get(context, backup_id=id)
req.cache_db_backup(backup)
resp_backup = self._view_builder.detail(req, backup)
if req_version.matches(mv.BACKUP_PROJECT):
try:
context.authorize(policy.BACKUP_ATTRIBUTES_POLICY)
self._add_backup_project_attribute(req, resp_backup['backup'])
except exception.PolicyNotAuthorized:
pass
return resp_backup
def detail(self, req):
resp_backup = super(BackupsController, self).detail(req)
context = req.environ['cinder.context']
req_version = req.api_version_request
if req_version.matches(mv.BACKUP_PROJECT):
try:
context.authorize(policy.BACKUP_ATTRIBUTES_POLICY)
for bak in resp_backup['backups']:
self._add_backup_project_attribute(req, bak)
except exception.PolicyNotAuthorized:
pass
return resp_backup
def _convert_sort_name(self, req_version, sort_keys):
if req_version.matches(mv.BACKUP_SORT_NAME) and 'name' in sort_keys:
sort_keys[sort_keys.index('name')] = 'display_name'
def create_resource():
return wsgi.Resource(BackupsController())
| true | true |
f720ba2b3e7006741b82f2fe08ab0e27de7bf237 | 8,422 | py | Python | discordware/_vendors/hype/parser.py | znqi/discordware | e456bf7b0314ef8f29fabb9fa69f8c979f34d655 | [
"MIT"
] | 13 | 2021-07-31T12:07:06.000Z | 2022-03-24T15:00:50.000Z | discordware/_vendors/hype/parser.py | znqi/discordware | e456bf7b0314ef8f29fabb9fa69f8c979f34d655 | [
"MIT"
] | 2 | 2021-08-02T14:04:58.000Z | 2021-09-06T09:35:20.000Z | discordware/_vendors/hype/parser.py | znqi/discordware | e456bf7b0314ef8f29fabb9fa69f8c979f34d655 | [
"MIT"
] | 3 | 2021-08-07T13:23:54.000Z | 2022-01-24T13:23:08.000Z |
# Copyright (c) 2021, Serum Studio
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from typing import List
from hype.command import HypeCommand
import optparse
from optparse import HelpFormatter
import sys
import textwrap
class HypeParser(optparse.OptionParser):
"""
A command parser for Hype CLI that was built on the top of
`optparse.OptionParser` This parser is pretty simmilar with
OptionParser, the only difference is the commands.
Parameters:
commands (list):
A list of all HypeCommands
**options (dict):
A dictionary of kwargs
Example:
>>> greet = HypeCommand('greet', help="%prog [OPTIONS]")
>>> greet.add_option('--name', type=str)
>>> ...
>>> goodbye = HypeCommand('goodbye', help="%prog [OPTIONS]")
>>> goodbye.add_option('--name', type=str)
>>> ...
>>> parser = HypeParser( commands=(greet, goodbye) )
>>> options, commands, command_opt, args = parser.parse_args()
"""
_HelpCommand = HypeCommand('help', help="All details about the commands", aliases=('?'))
def __init__(
self,
commands: List[HypeCommand] = [],
*args,
**options
):
self.commands = commands
self.options = options
if 'usage' not in self.options:
self.options['usage'] = "%prog COMMAND [ARGS..]\n%prog help COMMAND"
super(HypeParser, self).__init__(*args, **options)
for command in self.commands:
command.parser.prog = "%s %s" % (self.get_prog_name(), command.name)
self.disable_interspersed_args()
def add_command(self, cmd: HypeCommand):
"""
Add a command.
Parameters
---
cmd (HypeCommand):
The command to be add.
Example:
>>> goodbye = HypeCommand(..)
>>> parser = HyperParser(...)
>>> ...
>>> parser.add_command(goodbye)
"""
if not isinstance(cmd, HypeCommand):
raise TypeError('{} is not a instance of HypeCommand'.format(cmd))
self.commands.append(cmd)
def remove_command(self, name: str):
"""
Remove the command name to the list of registered command.
Parameters
---
name (str):
The name of the command to be removed.
Example:
>>> goodbye = HypeCommand(..)
>>> parser = HyperParser(...)
>>> ...
>>> parser.add_command(goodbye)
>>> parser.remove_command('goodbye')
"""
for command in self.commands:
if command.name == name:
self.commands.remove(command)
def format_help(self, formatter=None) -> str:
out = optparse.OptionParser.format_help(self, formatter)
if formatter == None:
formatter = self.formatter
#: HEADER for the Help command
result = ['\n']
result.append(formatter.format_heading('Commands'))
formatter.indent()
display_names = []
help_position = 0
for command in self.commands:
name = command.name
if command.aliases:
#: Add aliases of the command
name += ' (%s)' % (', '.join(command.aliases))
display_names.append(name)
#: Set the help position based on the max width.
proposed_help_position = len(name) + formatter.current_indent + 2
if proposed_help_position <= formatter.max_help_position:
help_position = max(help_position, proposed_help_position)
#: Add the command to the output
for command, name in zip(self.commands, display_names):
#: From optparse.py
name_width = help_position - formatter.current_indent - 2
if len(name) > name_width:
name = "%*s%s\n" % (formatter.current_indent, "", name)
indent_first = help_position
else:
name = "%*s%-*s " % (formatter.current_indent, "",
name_width, name)
indent_first = 0
result.append(name)
help_width = formatter.width - help_position
help_lines = textwrap.wrap(command.help, help_width)
result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
result.extend(["%*s%s\n" % (help_position, "", line)
for line in help_lines[1:]])
result += ['\n']
formatter.dedent()
# Concatenate the original help message with the command list.
return out + "".join(result)
def __command_for_name(self, name):
"""
Return the command in self.commands matching the
given name. The name may either be the name of a subcommand or
an alias. If no subcommand matches, returns None.
Parameters:
name (str):
The name of the command to be matched.
"""
_command = None
for command in self.commands:
try:
if name == command.name or name in command.aliases:
_command = command
except TypeError:
pass
return _command
def parse_args(self, _args=None, _value=None):
"""
Just like the `parse_args` from OptionParser but add some more value.
Added Value:
---
- options: The option passed to the root parser
- command: the command object that was invoked
- command_opt: The option parsed to the command parser
- command_args: The positional arguments passed to the sub command
Parameters:
---
_args (any):
inherited from `optparse.OptionParser.parse_args`
_value (any):
inherited from `optparse.OptionParser.parse_args`
Example:
---
>>> parser = HypeParser(...)
>>> parser.add_option(...)
>>> ...
>>> options, command, \
... command_opt, command_args = parser.parse_args()
"""
self.commands.insert(len(self.commands), self._HelpCommand)
options, args = optparse.OptionParser.parse_args(self, _args, _value)
if not args:
# No command given, show the help message
self.print_help()
self.exit()
else:
command_name = args.pop(0)
command = self.__command_for_name(command_name)
if not command:
self.error('Unknown Command: {}'.format(command_name))
command_opt, command_args = command.parser.parse_args(args)
if command is self._HelpCommand:
if command_args:
command_name = command_args[0]
#: Check for the help command on the command arguments.
helpcommand = self.__command_for_name(command_name)
helpcommand.parser.print_help()
self.exit()
else:
self.print_help()
self.exit()
return options, command, command_opt, command_args
| 31.425373 | 92 | 0.572786 |
from typing import List
from hype.command import HypeCommand
import optparse
from optparse import HelpFormatter
import sys
import textwrap
class HypeParser(optparse.OptionParser):
_HelpCommand = HypeCommand('help', help="All details about the commands", aliases=('?'))
def __init__(
self,
commands: List[HypeCommand] = [],
*args,
**options
):
self.commands = commands
self.options = options
if 'usage' not in self.options:
self.options['usage'] = "%prog COMMAND [ARGS..]\n%prog help COMMAND"
super(HypeParser, self).__init__(*args, **options)
for command in self.commands:
command.parser.prog = "%s %s" % (self.get_prog_name(), command.name)
self.disable_interspersed_args()
def add_command(self, cmd: HypeCommand):
if not isinstance(cmd, HypeCommand):
raise TypeError('{} is not a instance of HypeCommand'.format(cmd))
self.commands.append(cmd)
def remove_command(self, name: str):
for command in self.commands:
if command.name == name:
self.commands.remove(command)
def format_help(self, formatter=None) -> str:
out = optparse.OptionParser.format_help(self, formatter)
if formatter == None:
formatter = self.formatter
result = ['\n']
result.append(formatter.format_heading('Commands'))
formatter.indent()
display_names = []
help_position = 0
for command in self.commands:
name = command.name
if command.aliases:
name += ' (%s)' % (', '.join(command.aliases))
display_names.append(name)
proposed_help_position = len(name) + formatter.current_indent + 2
if proposed_help_position <= formatter.max_help_position:
help_position = max(help_position, proposed_help_position)
for command, name in zip(self.commands, display_names):
name_width = help_position - formatter.current_indent - 2
if len(name) > name_width:
name = "%*s%s\n" % (formatter.current_indent, "", name)
indent_first = help_position
else:
name = "%*s%-*s " % (formatter.current_indent, "",
name_width, name)
indent_first = 0
result.append(name)
help_width = formatter.width - help_position
help_lines = textwrap.wrap(command.help, help_width)
result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
result.extend(["%*s%s\n" % (help_position, "", line)
for line in help_lines[1:]])
result += ['\n']
formatter.dedent()
return out + "".join(result)
def __command_for_name(self, name):
_command = None
for command in self.commands:
try:
if name == command.name or name in command.aliases:
_command = command
except TypeError:
pass
return _command
def parse_args(self, _args=None, _value=None):
self.commands.insert(len(self.commands), self._HelpCommand)
options, args = optparse.OptionParser.parse_args(self, _args, _value)
if not args:
self.print_help()
self.exit()
else:
command_name = args.pop(0)
command = self.__command_for_name(command_name)
if not command:
self.error('Unknown Command: {}'.format(command_name))
command_opt, command_args = command.parser.parse_args(args)
if command is self._HelpCommand:
if command_args:
command_name = command_args[0]
helpcommand = self.__command_for_name(command_name)
helpcommand.parser.print_help()
self.exit()
else:
self.print_help()
self.exit()
return options, command, command_opt, command_args
| true | true |
f720ba72a3c86311008ec04f4371a49d7784b17c | 433 | py | Python | xlwt/__init__.py | drmelectronic/MIT | e28a82cd02dcc52ac233b89b43f29ede00993d11 | [
"MIT"
] | 292 | 2015-09-12T14:19:32.000Z | 2022-02-19T08:46:12.000Z | xlwt/__init__.py | drmelectronic/MIT | e28a82cd02dcc52ac233b89b43f29ede00993d11 | [
"MIT"
] | 4 | 2015-11-18T08:10:14.000Z | 2017-03-25T13:32:20.000Z | xlwt/__init__.py | drmelectronic/MIT | e28a82cd02dcc52ac233b89b43f29ede00993d11 | [
"MIT"
] | 131 | 2015-09-14T06:32:03.000Z | 2021-06-11T02:31:38.000Z | # -*- coding: windows-1252 -*-
__VERSION__ = '0.7.4'
import sys
if sys.version_info[:2] < (2, 3):
print >> sys.stderr, "Sorry, xlwt requires Python 2.3 or later"
sys.exit(1)
from Workbook import Workbook
from Worksheet import Worksheet
from Row import Row
from Column import Column
from Formatting import Font, Alignment, Borders, Pattern, Protection
from Style import XFStyle, easyxf, easyfont
from ExcelFormula import *
| 25.470588 | 68 | 0.741339 |
__VERSION__ = '0.7.4'
import sys
if sys.version_info[:2] < (2, 3):
print >> sys.stderr, "Sorry, xlwt requires Python 2.3 or later"
sys.exit(1)
from Workbook import Workbook
from Worksheet import Worksheet
from Row import Row
from Column import Column
from Formatting import Font, Alignment, Borders, Pattern, Protection
from Style import XFStyle, easyxf, easyfont
from ExcelFormula import *
| true | true |
f720bb34265c748c8a67a6a2025eb32ff567dad4 | 773 | py | Python | src/tools/reshape.py | Lin-Lei/CenterNet | 0778dfcf4fb8e5b013dda7ab8c680f232ca851b1 | [
"MIT"
] | null | null | null | src/tools/reshape.py | Lin-Lei/CenterNet | 0778dfcf4fb8e5b013dda7ab8c680f232ca851b1 | [
"MIT"
] | null | null | null | src/tools/reshape.py | Lin-Lei/CenterNet | 0778dfcf4fb8e5b013dda7ab8c680f232ca851b1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 23 16:06:35 2018
@author: libo
"""
from PIL import Image
import os
def image_resize(image_path, new_path): # 统一图片尺寸
print('============>>修改图片尺寸')
for img_name in os.listdir(image_path):
img_path = image_path + "/" + img_name # 获取该图片全称
image = Image.open(img_path) # 打开特定一张图片
image = image.resize((512, 512)) # 设置需要转换的图片大小
# process the 1 channel image
image.save(new_path + '/' + img_name)
print("end the processing!")
if __name__ == '__main__':
print("ready for :::::::: ")
ori_path = r"Z:\pycharm_projects\ssd\VOC2007\JPEGImages" # 输入图片的文件夹路径
new_path = 'Z:/pycharm_projects/ssd/VOC2007/reshape' # resize之后的文件夹路径
image_resize(ori_path, new_path) | 30.92 | 74 | 0.635188 |
from PIL import Image
import os
def image_resize(image_path, new_path):
print('============>>修改图片尺寸')
for img_name in os.listdir(image_path):
img_path = image_path + "/" + img_name
image = Image.open(img_path)
image = image.resize((512, 512))
image.save(new_path + '/' + img_name)
print("end the processing!")
if __name__ == '__main__':
print("ready for :::::::: ")
ori_path = r"Z:\pycharm_projects\ssd\VOC2007\JPEGImages"
new_path = 'Z:/pycharm_projects/ssd/VOC2007/reshape'
image_resize(ori_path, new_path) | true | true |
f720bb35e7c423bcec868b5f5d320bcd94913cfe | 4,506 | py | Python | mturk/make_quiz.py | genp/flask_mturk | 83e22c7dfada6d35e52458582291997964182628 | [
"MIT"
] | null | null | null | mturk/make_quiz.py | genp/flask_mturk | 83e22c7dfada6d35e52458582291997964182628 | [
"MIT"
] | null | null | null | mturk/make_quiz.py | genp/flask_mturk | 83e22c7dfada6d35e52458582291997964182628 | [
"MIT"
] | null | null | null | import json
from app import db
from app.models import *
from utils import utils
# turn annotation labels by hit X into a quiz Job
def annotation_to_quiz(hit_id, alt_hit_id, quiz_label):
'''
hit_id and alt_hit_id should be for the same task. hit_id has the strictly correct answers and alt_hit_id has possibly correct.
'''
anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % hit_id)
cmd = {}
cmd['label'] = quiz_label
values, patch_ids, image_ids, label_ids = zip(*anns)
attr_ids = sorted(set(label_ids), key=lambda x: label_ids.index(x))
attributes = []
for id in attr_ids:
name = Label.query.get(id).name
attributes.append({'id': id, 'name': name})
cmd['attributes'] = attributes
unique_patch_ids = sorted(set(patch_ids), key=lambda x: patch_ids.index(x))
patches = []
for patch_id in unique_patch_ids:
p = Patch.query.get(patch_id)
seg = p.segmentation
img_id = p.image_id
patches.append({'id': patch_id, 'image_id': img_id, 'segmentation': str(seg)})
cmd['patches'] = patches
answers = {}
for idx, val in enumerate(values):
try:
cur_dict = answers[str(patch_ids[idx])]
except KeyError, e:
answers[str(patch_ids[idx])] = {}
cur_dict = answers[str(patch_ids[idx])]
cur_dict[str(label_ids[idx])] = 1 if val else 0
cmd['answers'] = answers
alt_anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % alt_hit_id)
values, patch_ids, image_ids, label_ids = zip(*alt_anns)
alt_answers = {}
for idx, val in enumerate(values):
try:
cur_dict = alt_answers[str(patch_ids[idx])]
except KeyError, e:
alt_answers[str(patch_ids[idx])] = {}
cur_dict = alt_answers[str(patch_ids[idx])]
cur_dict[str(label_ids[idx])] = 1 if val else 0
cmd['alt_answers'] = alt_answers
j = Jobs(cmd=json.dumps(cmd), job_type='quiz')
db.session.add(j)
db.session.commit()
return j.id
def allimgs_annotation_to_quiz(hit_id, alt_hit_id, quiz_label):
'''
hit_id and alt_hit_id should be for the same task. hit_id has the strictly correct answers and alt_hit_id has possibly correct.
'''
anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % hit_id)
cmd = {}
cmd['label'] = quiz_label
values, patch_ids, image_ids, label_ids = zip(*anns)
attr_id = label_ids[0]
name = Label.query.get(attr_id).name
attribute = {'id':attr_id, 'name': name}
cmd['attribute'] = attribute
unique_patch_ids = sorted(set(patch_ids), key=lambda x: patch_ids.index(x))
patches = []
# make patches have x, y, w, h
for patch_id in patch_ids:
p = Patch.query.get(patch_id)
seg = [json.loads(p.segmentation)[0]]
segx = [seg[0][ix] for ix in range(0,len(seg[0]),2)]
segy = [seg[0][iy] for iy in range(1,len(seg[0]),2)]
img_id = p.image_id
seg.append(p.x)
seg.append(p.y)
seg.append(p.width)
seg.append(p.height)
img = Image.query.get(img_id)
seg.append(img.width)
seg.append(img.height)
patches.append({'id': patch_id, 'image_id': img_id, 'segmentation': json.dumps(seg)})
cmd['patches'] = patches
answers = {}
for idx, val in enumerate(values):
try:
cur_dict = answers[str(patch_ids[idx])]
except KeyError, e:
answers[str(patch_ids[idx])] = {}
cur_dict = answers[str(patch_ids[idx])]
cur_dict[attr_id] = 1 if val else 0
cmd['answers'] = answers
alt_anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % alt_hit_id)
values, patch_ids, image_ids, label_ids = zip(*alt_anns)
attr_id = label_ids[0]
alt_answers = {}
for idx, val in enumerate(values):
try:
cur_dict = alt_answers[str(patch_ids[idx])]
except KeyError, e:
alt_answers[str(patch_ids[idx])] = {}
cur_dict = alt_answers[str(patch_ids[idx])]
cur_dict[attr_id] = 1 if val else 0
cmd['alt_answers'] = alt_answers
j = Jobs(cmd=json.dumps(cmd), job_type='quiz')
db.session.add(j)
db.session.commit()
return j.id
| 34.136364 | 131 | 0.622947 | import json
from app import db
from app.models import *
from utils import utils
def annotation_to_quiz(hit_id, alt_hit_id, quiz_label):
'''
hit_id and alt_hit_id should be for the same task. hit_id has the strictly correct answers and alt_hit_id has possibly correct.
'''
anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % hit_id)
cmd = {}
cmd['label'] = quiz_label
values, patch_ids, image_ids, label_ids = zip(*anns)
attr_ids = sorted(set(label_ids), key=lambda x: label_ids.index(x))
attributes = []
for id in attr_ids:
name = Label.query.get(id).name
attributes.append({'id': id, 'name': name})
cmd['attributes'] = attributes
unique_patch_ids = sorted(set(patch_ids), key=lambda x: patch_ids.index(x))
patches = []
for patch_id in unique_patch_ids:
p = Patch.query.get(patch_id)
seg = p.segmentation
img_id = p.image_id
patches.append({'id': patch_id, 'image_id': img_id, 'segmentation': str(seg)})
cmd['patches'] = patches
answers = {}
for idx, val in enumerate(values):
try:
cur_dict = answers[str(patch_ids[idx])]
except KeyError, e:
answers[str(patch_ids[idx])] = {}
cur_dict = answers[str(patch_ids[idx])]
cur_dict[str(label_ids[idx])] = 1 if val else 0
cmd['answers'] = answers
alt_anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % alt_hit_id)
values, patch_ids, image_ids, label_ids = zip(*alt_anns)
alt_answers = {}
for idx, val in enumerate(values):
try:
cur_dict = alt_answers[str(patch_ids[idx])]
except KeyError, e:
alt_answers[str(patch_ids[idx])] = {}
cur_dict = alt_answers[str(patch_ids[idx])]
cur_dict[str(label_ids[idx])] = 1 if val else 0
cmd['alt_answers'] = alt_answers
j = Jobs(cmd=json.dumps(cmd), job_type='quiz')
db.session.add(j)
db.session.commit()
return j.id
def allimgs_annotation_to_quiz(hit_id, alt_hit_id, quiz_label):
'''
hit_id and alt_hit_id should be for the same task. hit_id has the strictly correct answers and alt_hit_id has possibly correct.
'''
anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % hit_id)
cmd = {}
cmd['label'] = quiz_label
values, patch_ids, image_ids, label_ids = zip(*anns)
attr_id = label_ids[0]
name = Label.query.get(attr_id).name
attribute = {'id':attr_id, 'name': name}
cmd['attribute'] = attribute
unique_patch_ids = sorted(set(patch_ids), key=lambda x: patch_ids.index(x))
patches = []
for patch_id in patch_ids:
p = Patch.query.get(patch_id)
seg = [json.loads(p.segmentation)[0]]
segx = [seg[0][ix] for ix in range(0,len(seg[0]),2)]
segy = [seg[0][iy] for iy in range(1,len(seg[0]),2)]
img_id = p.image_id
seg.append(p.x)
seg.append(p.y)
seg.append(p.width)
seg.append(p.height)
img = Image.query.get(img_id)
seg.append(img.width)
seg.append(img.height)
patches.append({'id': patch_id, 'image_id': img_id, 'segmentation': json.dumps(seg)})
cmd['patches'] = patches
answers = {}
for idx, val in enumerate(values):
try:
cur_dict = answers[str(patch_ids[idx])]
except KeyError, e:
answers[str(patch_ids[idx])] = {}
cur_dict = answers[str(patch_ids[idx])]
cur_dict[attr_id] = 1 if val else 0
cmd['answers'] = answers
alt_anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % alt_hit_id)
values, patch_ids, image_ids, label_ids = zip(*alt_anns)
attr_id = label_ids[0]
alt_answers = {}
for idx, val in enumerate(values):
try:
cur_dict = alt_answers[str(patch_ids[idx])]
except KeyError, e:
alt_answers[str(patch_ids[idx])] = {}
cur_dict = alt_answers[str(patch_ids[idx])]
cur_dict[attr_id] = 1 if val else 0
cmd['alt_answers'] = alt_answers
j = Jobs(cmd=json.dumps(cmd), job_type='quiz')
db.session.add(j)
db.session.commit()
return j.id
| false | true |
f720bb687d0f99c146065e48003e17cc75396b8a | 2,926 | py | Python | applications/classification/evaluate_multiclass_labels.py | ShankarNara/shogun | 8ab196de16b8d8917e5c84770924c8d0f5a3d17c | [
"BSD-3-Clause"
] | 2,753 | 2015-01-02T11:34:13.000Z | 2022-03-25T07:04:27.000Z | applications/classification/evaluate_multiclass_labels.py | ShankarNara/shogun | 8ab196de16b8d8917e5c84770924c8d0f5a3d17c | [
"BSD-3-Clause"
] | 2,404 | 2015-01-02T19:31:41.000Z | 2022-03-09T10:58:22.000Z | applications/classification/evaluate_multiclass_labels.py | ShankarNara/shogun | 8ab196de16b8d8917e5c84770924c8d0f5a3d17c | [
"BSD-3-Clause"
] | 1,156 | 2015-01-03T01:57:21.000Z | 2022-03-26T01:06:28.000Z | #!/usr/bin/env python
# Copyright (c) The Shogun Machine Learning Toolbox
# Written (w) 2014 Daniel Pyrathon
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the Shogun Development Team.
import argparse
import logging
import numpy as np
from shogun import (LibSVMFile, MulticlassLabels, MulticlassAccuracy)
from utils import get_features_and_labels
LOGGER = logging.getLogger(__file__)
def parse_arguments():
parser = argparse.ArgumentParser(description="Evaluate predicted \
labels againsy bare truth")
parser.add_argument('--actual', required=True, type=str,
help='Path to LibSVM dataset.')
parser.add_argument('--predicted', required=True, type=str,
help='Path to serialized predicted labels')
return parser.parse_args()
def main(actual, predicted):
LOGGER.info("SVM Multiclass evaluator")
# Load SVMLight dataset
feats, labels = get_features_and_labels(LibSVMFile(actual))
# Load predicted labels
with open(predicted, 'r') as f:
predicted_labels_arr = np.array([float(l) for l in f])
predicted_labels = MulticlassLabels(predicted_labels_arr)
# Evaluate accuracy
multiclass_measures = MulticlassAccuracy()
LOGGER.info("Accuracy = %s" % multiclass_measures.evaluate(
labels, predicted_labels))
LOGGER.info("Confusion matrix:")
res = multiclass_measures.get_confusion_matrix(labels, predicted_labels)
print res
if __name__ == '__main__':
args = parse_arguments()
main(args.actual, args.predicted)
| 40.082192 | 82 | 0.774778 |
import argparse
import logging
import numpy as np
from shogun import (LibSVMFile, MulticlassLabels, MulticlassAccuracy)
from utils import get_features_and_labels
LOGGER = logging.getLogger(__file__)
def parse_arguments():
parser = argparse.ArgumentParser(description="Evaluate predicted \
labels againsy bare truth")
parser.add_argument('--actual', required=True, type=str,
help='Path to LibSVM dataset.')
parser.add_argument('--predicted', required=True, type=str,
help='Path to serialized predicted labels')
return parser.parse_args()
def main(actual, predicted):
LOGGER.info("SVM Multiclass evaluator")
feats, labels = get_features_and_labels(LibSVMFile(actual))
with open(predicted, 'r') as f:
predicted_labels_arr = np.array([float(l) for l in f])
predicted_labels = MulticlassLabels(predicted_labels_arr)
multiclass_measures = MulticlassAccuracy()
LOGGER.info("Accuracy = %s" % multiclass_measures.evaluate(
labels, predicted_labels))
LOGGER.info("Confusion matrix:")
res = multiclass_measures.get_confusion_matrix(labels, predicted_labels)
print res
if __name__ == '__main__':
args = parse_arguments()
main(args.actual, args.predicted)
| false | true |
f720bbb8b5465f2391f8ff3bd20b2b9312393ba6 | 5,112 | py | Python | BlindTest/display.py | smart-fun/Raspberry | e2ac8caff2732786bc51a7c5ab64507e7a9a8fac | [
"Apache-2.0"
] | null | null | null | BlindTest/display.py | smart-fun/Raspberry | e2ac8caff2732786bc51a7c5ab64507e7a9a8fac | [
"Apache-2.0"
] | null | null | null | BlindTest/display.py | smart-fun/Raspberry | e2ac8caff2732786bc51a7c5ab64507e7a9a8fac | [
"Apache-2.0"
] | null | null | null | import pygame as pg
import pygame_widgets as pw
from math import sin, cos
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
WHITE = (255,255,255)
YELLOW = (220,220,0)
RED = (220,0,0)
GREY = (180,180,180)
BLACK = (0,0,0)
GREEN = (0,200,0)
BUTTON_COLOR = (0,0,220)
BUTTON_HOVER_COLOR = GREEN
BUTTON_PRESS_COLOR = (0,100,0)
def createScreen():
screen = pg.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
screen.fill(GREY)
return screen
def displayCircle(screen, message, yellow, red):
x = SCREEN_WIDTH / 2
y = SCREEN_HEIGHT / 2
radius = SCREEN_HEIGHT / 4
if (yellow and red):
pg.draw.circle(screen, RED, [x, y], radius, 0, draw_top_right=True, draw_bottom_right=True)
pg.draw.circle(screen, YELLOW, [x, y], radius, 0, draw_top_left=True , draw_bottom_left=True)
elif yellow:
pg.draw.circle(screen, YELLOW, [x, y], radius, 0)
elif red:
pg.draw.circle(screen, RED, [x, y], radius, 0)
font = pg.font.SysFont(None, 40)
text = font.render(message, True, BLACK)
textRect = text.get_rect()
textRect.centerx = screen.get_rect().centerx
textRect.centery = screen.get_rect().centery
screen.blit(text,textRect)
def simulateNeoPixel(screen, neopixel):
size = 10
radius = 100
angle = 0
for color in neopixel.pixels:
x = int((SCREEN_WIDTH / 2) + radius*cos(angle))
y = int((SCREEN_HEIGHT / 2) - radius*sin(angle))
pg.draw.circle(screen, color, [x, y], size, 0)
angle += 3.14159 / 12
def displayStartButton(screen, callback):
width = 200
height = 50
x = (SCREEN_WIDTH - width) / 2
y = SCREEN_HEIGHT * 0.8
button = pw.Button(
screen, x, y, width, height, text='START',
fontSize=50,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=10,
onClick=callback
)
return button
def displayYesButton(screen, callback):
width = 200
height = 50
x = (SCREEN_WIDTH * 0.45) - width
y = SCREEN_HEIGHT * 0.8
button = pw.Button(
screen, x, y, width, height, text='YES',
fontSize=50,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=10,
onClick=callback
)
return button
def displayNoButton(screen, callback):
width = 200
height = 50
x = (SCREEN_WIDTH * 0.55)
y = SCREEN_HEIGHT * 0.8
button = pw.Button(
screen, x, y, width, height, text='NO',
fontSize=50,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=10,
onClick=callback
)
return button
def createRoundButton(screen, callback, x, y, text, color):
width = 40
height = 40
button = pw.Button(
screen, x, y, width, height, text=text,
fontSize=60,
textColour=(255,255,255),
inactiveColour=color,
hoverColour=color,
pressedColour=color,
radius=20,
onClick=callback
)
return button
def displayIncYellowButton(screen, callback):
x = 20
y = SCREEN_HEIGHT * 0.4
return createRoundButton(screen, callback, x, y, "+", YELLOW)
def displayDecYellowButton(screen, callback):
x = 20
y = SCREEN_HEIGHT * 0.5
return createRoundButton(screen, callback, x, y, "-", YELLOW)
def displayIncRedButton(screen, callback):
x = SCREEN_WIDTH - 40 - 20
y = SCREEN_HEIGHT * 0.4
return createRoundButton(screen, callback, x, y, "+", RED)
def displayDecRedButton(screen, callback):
x = SCREEN_WIDTH - 40 - 20
y = SCREEN_HEIGHT * 0.5
return createRoundButton(screen, callback, x, y, "-", RED)
def createSkipButton(screen, callback):
width = 100
height = 40
x = (SCREEN_WIDTH - width) * 0.5
y = SCREEN_HEIGHT - 50 - 10
button = pw.Button(
screen, x, y, width, height, text="SKIP",
fontSize=30,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=20,
onClick=callback
)
return button
def displayScore(screen, yellow, red):
font = pg.font.SysFont(None, 100)
text = font.render(str(yellow), True, YELLOW)
textRect = text.get_rect()
textRect.centerx = SCREEN_WIDTH * 0.17
textRect.centery = screen.get_rect().centery
screen.blit(text,textRect)
text = font.render(str(red), True, RED)
textRect = text.get_rect()
textRect.centerx = SCREEN_WIDTH * (1 - 0.17)
textRect.centery = screen.get_rect().centery
screen.blit(text,textRect)
def displayMusicTitle(screen, title):
font = pg.font.SysFont(None, 30)
text = font.render(str(title), True, BLACK)
textRect = text.get_rect()
textRect.centerx = int(SCREEN_WIDTH * 0.5)
textRect.centery = int(SCREEN_HEIGHT * 0.1)
screen.blit(text,textRect)
| 28.719101 | 101 | 0.635759 | import pygame as pg
import pygame_widgets as pw
from math import sin, cos
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
WHITE = (255,255,255)
YELLOW = (220,220,0)
RED = (220,0,0)
GREY = (180,180,180)
BLACK = (0,0,0)
GREEN = (0,200,0)
BUTTON_COLOR = (0,0,220)
BUTTON_HOVER_COLOR = GREEN
BUTTON_PRESS_COLOR = (0,100,0)
def createScreen():
screen = pg.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
screen.fill(GREY)
return screen
def displayCircle(screen, message, yellow, red):
x = SCREEN_WIDTH / 2
y = SCREEN_HEIGHT / 2
radius = SCREEN_HEIGHT / 4
if (yellow and red):
pg.draw.circle(screen, RED, [x, y], radius, 0, draw_top_right=True, draw_bottom_right=True)
pg.draw.circle(screen, YELLOW, [x, y], radius, 0, draw_top_left=True , draw_bottom_left=True)
elif yellow:
pg.draw.circle(screen, YELLOW, [x, y], radius, 0)
elif red:
pg.draw.circle(screen, RED, [x, y], radius, 0)
font = pg.font.SysFont(None, 40)
text = font.render(message, True, BLACK)
textRect = text.get_rect()
textRect.centerx = screen.get_rect().centerx
textRect.centery = screen.get_rect().centery
screen.blit(text,textRect)
def simulateNeoPixel(screen, neopixel):
size = 10
radius = 100
angle = 0
for color in neopixel.pixels:
x = int((SCREEN_WIDTH / 2) + radius*cos(angle))
y = int((SCREEN_HEIGHT / 2) - radius*sin(angle))
pg.draw.circle(screen, color, [x, y], size, 0)
angle += 3.14159 / 12
def displayStartButton(screen, callback):
width = 200
height = 50
x = (SCREEN_WIDTH - width) / 2
y = SCREEN_HEIGHT * 0.8
button = pw.Button(
screen, x, y, width, height, text='START',
fontSize=50,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=10,
onClick=callback
)
return button
def displayYesButton(screen, callback):
width = 200
height = 50
x = (SCREEN_WIDTH * 0.45) - width
y = SCREEN_HEIGHT * 0.8
button = pw.Button(
screen, x, y, width, height, text='YES',
fontSize=50,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=10,
onClick=callback
)
return button
def displayNoButton(screen, callback):
width = 200
height = 50
x = (SCREEN_WIDTH * 0.55)
y = SCREEN_HEIGHT * 0.8
button = pw.Button(
screen, x, y, width, height, text='NO',
fontSize=50,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=10,
onClick=callback
)
return button
def createRoundButton(screen, callback, x, y, text, color):
width = 40
height = 40
button = pw.Button(
screen, x, y, width, height, text=text,
fontSize=60,
textColour=(255,255,255),
inactiveColour=color,
hoverColour=color,
pressedColour=color,
radius=20,
onClick=callback
)
return button
def displayIncYellowButton(screen, callback):
x = 20
y = SCREEN_HEIGHT * 0.4
return createRoundButton(screen, callback, x, y, "+", YELLOW)
def displayDecYellowButton(screen, callback):
x = 20
y = SCREEN_HEIGHT * 0.5
return createRoundButton(screen, callback, x, y, "-", YELLOW)
def displayIncRedButton(screen, callback):
x = SCREEN_WIDTH - 40 - 20
y = SCREEN_HEIGHT * 0.4
return createRoundButton(screen, callback, x, y, "+", RED)
def displayDecRedButton(screen, callback):
x = SCREEN_WIDTH - 40 - 20
y = SCREEN_HEIGHT * 0.5
return createRoundButton(screen, callback, x, y, "-", RED)
def createSkipButton(screen, callback):
width = 100
height = 40
x = (SCREEN_WIDTH - width) * 0.5
y = SCREEN_HEIGHT - 50 - 10
button = pw.Button(
screen, x, y, width, height, text="SKIP",
fontSize=30,
textColour=(255,255,255),
inactiveColour=BUTTON_COLOR,
hoverColour=BUTTON_HOVER_COLOR,
pressedColour=BUTTON_PRESS_COLOR,
radius=20,
onClick=callback
)
return button
def displayScore(screen, yellow, red):
font = pg.font.SysFont(None, 100)
text = font.render(str(yellow), True, YELLOW)
textRect = text.get_rect()
textRect.centerx = SCREEN_WIDTH * 0.17
textRect.centery = screen.get_rect().centery
screen.blit(text,textRect)
text = font.render(str(red), True, RED)
textRect = text.get_rect()
textRect.centerx = SCREEN_WIDTH * (1 - 0.17)
textRect.centery = screen.get_rect().centery
screen.blit(text,textRect)
def displayMusicTitle(screen, title):
font = pg.font.SysFont(None, 30)
text = font.render(str(title), True, BLACK)
textRect = text.get_rect()
textRect.centerx = int(SCREEN_WIDTH * 0.5)
textRect.centery = int(SCREEN_HEIGHT * 0.1)
screen.blit(text,textRect)
| true | true |
f720bbc6a4f8599443bb6753b941ccb39af1e390 | 647 | py | Python | merchant/migrations/0001_initial.py | Pesenin-Team/pesenin | 6b3dcc84e6e48768ce231ffedc43c56981fc6606 | [
"MIT"
] | 4 | 2019-10-15T12:35:15.000Z | 2019-10-16T12:38:51.000Z | merchant/migrations/0001_initial.py | Pesenin-Team/pesenin | 6b3dcc84e6e48768ce231ffedc43c56981fc6606 | [
"MIT"
] | null | null | null | merchant/migrations/0001_initial.py | Pesenin-Team/pesenin | 6b3dcc84e6e48768ce231ffedc43c56981fc6606 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.6 on 2019-10-17 10:38
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Merchant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nama_merchant', models.CharField(max_length=100)),
('desc', models.CharField(max_length=200)),
('link_gambar', models.CharField(max_length=200)),
],
),
]
| 26.958333 | 115 | 0.55796 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Merchant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nama_merchant', models.CharField(max_length=100)),
('desc', models.CharField(max_length=200)),
('link_gambar', models.CharField(max_length=200)),
],
),
]
| true | true |
f720bbea22a5dbf7c0ffdeeda3c286344fc9500b | 12,806 | py | Python | protonvpn-applet.py | seadanda/protonvpn-applet | f32978192f523ed8ee661d200c508b221e0ffccd | [
"MIT"
] | 15 | 2019-09-13T07:11:52.000Z | 2021-05-23T10:13:57.000Z | protonvpn-applet.py | seadanda/pvpn-applet | f32978192f523ed8ee661d200c508b221e0ffccd | [
"MIT"
] | 11 | 2019-11-26T12:08:20.000Z | 2020-10-24T13:08:24.000Z | protonvpn-applet.py | seadanda/pvpn-applet | f32978192f523ed8ee661d200c508b221e0ffccd | [
"MIT"
] | 2 | 2019-11-24T00:44:55.000Z | 2020-06-28T20:31:42.000Z | #!/usr/bin/env python3
import sys
import subprocess
import functools
from enum import Enum
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QSystemTrayIcon, QMenu, QAction, qApp, QMessageBox
from PyQt5.QtCore import QSize, QThread, pyqtSignal
from PyQt5.QtGui import QIcon
from protonvpn_cli import utils, country_codes
from protonvpn_cli.utils import is_connected
PROTONVPN_APPLET_VERSION = "0.1.7"
class VPNStatusException(Exception):
"""General exception to throw when anything goes wrong
"""
class VPNCommand(Enum):
"""Commands to run the CLI
"""
status = 'protonvpn s'
connect_fastest = 'protonvpn c -f'
disconnect = 'protonvpn d'
version = 'protonvpn -v'
connect_random = 'protonvpn c -r'
connect_fastest_cc = 'protonvpn c --cc'
connect_fastest_p2p = 'protonvpn c --p2p'
connect_fastest_sc = 'protonvpn c --sc'
connect_fastest_tor = 'protonvpn c --tor'
reconnect = 'protonvpn r'
def check_single_instance():
"""Use pgrep to check if protonvpn-applet is already running
"""
pid = None
try:
pid = subprocess.run('pgrep protonvpn-applet'.split(), check=True, capture_output=True)
except subprocess.CalledProcessError:
try:
pid = subprocess.run('pgrep protonvpn-applet.py'.split(), check=True, capture_output=True)
except subprocess.CalledProcessError:
pass
if pid is not None:
print('There is an instance already running.')
sys.exit(1)
class Status(Enum):
"""Enum to keep track of the previous connection state
"""
connected = 'Connected'
disconnected = 'Disconnected'
class Polling(QThread):
"""Thread to check the VPN state every second and notifies on disconnection
"""
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
while self.applet.is_polling():
if is_connected():
self.applet.tray_icon.setIcon(QIcon('icons/16x16/protonvpn-connected.png'))
self.applet.previous_status = Status.connected
else:
# notify on disconnection
if self.applet.show_notifications() and self.applet.previous_status == Status.connected:
CheckStatus(self).start()
self.applet.tray_icon.setIcon(QIcon('icons/16x16/protonvpn-disconnected.png'))
self.applet.previous_status = Status.disconnected
self.sleep(1)
class ConnectVPN(QThread):
"""Thread to connect using the specified profile
"""
def __init__(self, applet, command):
QThread.__init__(self)
self.applet = applet
self.command = command
print(self.command)
def __del__(self):
self.wait()
def run(self):
subprocess.run([self.applet.auth] + self.command.split(), check=False)
self.applet.status_vpn()
class DisconnectVPN(QThread):
"""Thread to disconnect the VPN
"""
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
subprocess.run([self.applet.auth] + VPNCommand.disconnect.value.split(), check=False)
self.applet.status_vpn()
class ReconnectVPN(QThread):
"""Thread to connect using previously used profile
"""
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
subprocess.run([self.applet.auth] + VPNCommand.reconnect.value.split(), check=False)
self.applet.status_vpn()
class CheckStatus(QThread):
"""Thread to report ProtonVPN status
"""
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
result = subprocess.run(VPNCommand.status.value.split(), check=False, capture_output=True)
Notify.Notification.new(result.stdout.decode()).show()
class CheckProtonVPNVersion(QThread):
"""Thread to check version
"""
protonvpn_version_ready = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.parent = parent
self.version = 'None'
def __del__(self):
self.wait()
def run(self):
self.version = subprocess.check_output(VPNCommand.version.value.split()).decode(sys.stdout.encoding)
self.protonvpn_version_ready.emit(self.version)
class PVPNApplet(QMainWindow):
"""Main applet body
"""
tray_icon = None
polling = True
previous_status = None
#auth = 'pkexec'
auth = 'sudo'
# Override the class constructor
def __init__(self):
super(PVPNApplet, self).__init__()
self.country_codes = country_codes # Keep a list of country codes
# Init QSystemTrayIcon
self.tray_icon = QSystemTrayIcon(self)
self.tray_icon.setIcon(QIcon('icons/16x16/protonvpn-disconnected.png'))
# Init libnotify
Notify.init('ProtonVPN')
# Refresh server list, store the resulting servers so we can populate the menu
self.servers = self.update_available_servers()
# Menu actions
connect_fastest_action = QAction('Connect fastest', self)
reconnect_action = QAction('Reconnect', self)
disconnect_action = QAction('Disconnect', self)
status_action = QAction('Status', self)
connect_fastest_sc_action = QAction('Secure Core', self)
connect_fastest_p2p_action = QAction('P2P', self)
connect_fastest_tor_action = QAction('Tor', self)
connect_random_action = QAction('Random', self)
show_protonvpn_applet_version_action = QAction('About ProtonVPN-Applet', self)
show_protonvpn_version_action = QAction('About ProtonVPN', self)
quit_action = QAction('Exit', self)
self.show_notifications_action = QAction('Show Notifications')
self.show_notifications_action.setCheckable(True)
self.show_notifications_action.setChecked(False)
# Triggers
quit_action.triggered.connect(qApp.quit)
connect_fastest_action.triggered.connect(self.connect_fastest)
disconnect_action.triggered.connect(self.disconnect_vpn)
status_action.triggered.connect(self.status_vpn)
show_protonvpn_applet_version_action.triggered.connect(self.show_protonvpn_applet_version)
show_protonvpn_version_action.triggered.connect(self.get_protonvpn_version)
connect_fastest_sc_action.triggered.connect(self.connect_fastest_sc)
connect_fastest_p2p_action.triggered.connect(self.connect_fastest_p2p)
connect_fastest_tor_action.triggered.connect(self.connect_fastest_tor)
connect_random_action.triggered.connect(self.connect_random)
reconnect_action.triggered.connect(self.reconnect_vpn)
# Generate connection menu for specific countries
connect_country_actions = []
for country_name in self.get_available_countries(self.servers):
# Get the ISO-3166 Alpha-2 country code
country_name_to_code = {v: k for k, v in country_codes.country_codes.items()}
country_code = country_name_to_code[country_name]
# Dynamically create functions for connecting to each country; each function just passes its respective
# country code to `self.connect_fastest_cc()`
setattr(self, f'connect_fastest_{country_code}', functools.partial(self.connect_fastest_cc, country_code))
# Generate an action for each country; set up the trigger; append to actions list
country_action = QAction(f'{country_name}', self)
country_action.triggered.connect(getattr(self, f'connect_fastest_{country_code}'))
connect_country_actions.append(country_action)
# Create a scrollable country connection menu
connect_country_menu = QMenu("Country...", self)
connect_country_menu.setStyleSheet('QMenu { menu-scrollable: 1; }')
connect_country_menu.addActions(connect_country_actions)
# Generate connection menu
connection_menu = QMenu("Other connections...", self)
connection_menu.addMenu(connect_country_menu)
connection_menu.addAction(connect_fastest_sc_action)
connection_menu.addAction(connect_fastest_p2p_action)
connection_menu.addAction(connect_fastest_tor_action)
connection_menu.addAction(connect_random_action)
# Draw menu
tray_menu = QMenu()
tray_menu.addAction(connect_fastest_action)
tray_menu.addAction(reconnect_action)
tray_menu.addMenu(connection_menu)
tray_menu.addAction(disconnect_action)
tray_menu.addAction(status_action)
tray_menu.addSeparator()
tray_menu.addAction(self.show_notifications_action)
tray_menu.addAction(show_protonvpn_applet_version_action)
tray_menu.addAction(show_protonvpn_version_action)
tray_menu.addAction(quit_action)
self.tray_icon.setContextMenu(tray_menu)
self.tray_icon.show()
# Polling thread
self.start_polling()
def is_polling(self):
return self.polling
def kill_polling(self):
self.polling = False
def start_polling(self):
self.polling = True
self.polling_thread = Polling(self)
self.polling_thread.start()
def _connect_vpn(self, command):
self.kill_polling()
connect_thread = ConnectVPN(self, command)
connect_thread.finished.connect(self.start_polling)
connect_thread.start()
def connect_fastest(self):
self._connect_vpn(VPNCommand.connect_fastest.value)
def connect_fastest_p2p(self):
self._connect_vpn(VPNCommand.connect_fastest_p2p.value)
def connect_fastest_sc(self):
self._connect_vpn(VPNCommand.connect_fastest_sc.value)
def connect_fastest_cc(self, cc):
command = VPNCommand.connect_fastest_cc.value + f' {cc}'
self._connect_vpn(command)
def connect_fastest_tor(self):
self._connect_vpn(VPNCommand.connect_fastest_tor.value)
def connect_random(self):
self._connect_vpn(VPNCommand.connect_random.value)
def disconnect_vpn(self):
disconnect_thread = DisconnectVPN(self)
disconnect_thread.start()
def status_vpn(self):
status_thread = CheckStatus(self)
status_thread.start()
def reconnect_vpn(self):
reconnect_thread = ReconnectVPN(self)
reconnect_thread.start()
# Override closeEvent to intercept the window closing event
def closeEvent(self, event):
event.ignore()
self.hide()
def show_notifications(self):
return self.show_notifications_action.isChecked()
def show_protonvpn_applet_version(self):
"""Show the protonvpn-applet version.
"""
name = '© 2020 Dónal Murray'
email = 'dmurray654@gmail.com'
github = 'https://github.com/seadanda/protonvpn-applet'
info = [f'<center>Version: {PROTONVPN_APPLET_VERSION}',
f'{name}',
f"<a href='{email}'>{email}</a>",
f"<a href='{github}'>{github}</a></center>"]
centered_text = f'<center>{"<br>".join(info)}</center>'
QMessageBox.information(self, 'protonvpn-applet', centered_text)
def get_protonvpn_version(self):
"""Start the CheckProtonVPNVersion thread; when it gets the version, it will call `self.show_protonvpn_version`
"""
print('called get_protonvpn_version')
check_protonvpn_version_thread = CheckProtonVPNVersion(self)
check_protonvpn_version_thread.protonvpn_version_ready.connect(self.show_protonvpn_version)
check_protonvpn_version_thread.start()
def show_protonvpn_version(self, version):
"""
Show the ProtonVPN version in a QMessageBox.
Parameters
----------
version : str
Version number to be shown.
"""
print('called show_protonvpn_version')
QMessageBox.information(self, 'ProtonVPN Version', f'Version: {version}')
def update_available_servers(self):
utils.pull_server_data()
return utils.get_servers()
@staticmethod
def get_available_countries(servers):
return sorted(list({utils.get_country_name(server['ExitCountry']) for server in servers}))
if __name__ == '__main__':
check_single_instance()
app = QApplication(sys.argv)
mw = PVPNApplet()
sys.exit(app.exec())
| 33.878307 | 119 | 0.679291 |
import sys
import subprocess
import functools
from enum import Enum
import gi
gi.require_version('Notify', '0.7')
from gi.repository import Notify
from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QSystemTrayIcon, QMenu, QAction, qApp, QMessageBox
from PyQt5.QtCore import QSize, QThread, pyqtSignal
from PyQt5.QtGui import QIcon
from protonvpn_cli import utils, country_codes
from protonvpn_cli.utils import is_connected
PROTONVPN_APPLET_VERSION = "0.1.7"
class VPNStatusException(Exception):
class VPNCommand(Enum):
status = 'protonvpn s'
connect_fastest = 'protonvpn c -f'
disconnect = 'protonvpn d'
version = 'protonvpn -v'
connect_random = 'protonvpn c -r'
connect_fastest_cc = 'protonvpn c --cc'
connect_fastest_p2p = 'protonvpn c --p2p'
connect_fastest_sc = 'protonvpn c --sc'
connect_fastest_tor = 'protonvpn c --tor'
reconnect = 'protonvpn r'
def check_single_instance():
pid = None
try:
pid = subprocess.run('pgrep protonvpn-applet'.split(), check=True, capture_output=True)
except subprocess.CalledProcessError:
try:
pid = subprocess.run('pgrep protonvpn-applet.py'.split(), check=True, capture_output=True)
except subprocess.CalledProcessError:
pass
if pid is not None:
print('There is an instance already running.')
sys.exit(1)
class Status(Enum):
connected = 'Connected'
disconnected = 'Disconnected'
class Polling(QThread):
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
while self.applet.is_polling():
if is_connected():
self.applet.tray_icon.setIcon(QIcon('icons/16x16/protonvpn-connected.png'))
self.applet.previous_status = Status.connected
else:
if self.applet.show_notifications() and self.applet.previous_status == Status.connected:
CheckStatus(self).start()
self.applet.tray_icon.setIcon(QIcon('icons/16x16/protonvpn-disconnected.png'))
self.applet.previous_status = Status.disconnected
self.sleep(1)
class ConnectVPN(QThread):
def __init__(self, applet, command):
QThread.__init__(self)
self.applet = applet
self.command = command
print(self.command)
def __del__(self):
self.wait()
def run(self):
subprocess.run([self.applet.auth] + self.command.split(), check=False)
self.applet.status_vpn()
class DisconnectVPN(QThread):
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
subprocess.run([self.applet.auth] + VPNCommand.disconnect.value.split(), check=False)
self.applet.status_vpn()
class ReconnectVPN(QThread):
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
subprocess.run([self.applet.auth] + VPNCommand.reconnect.value.split(), check=False)
self.applet.status_vpn()
class CheckStatus(QThread):
def __init__(self, applet):
QThread.__init__(self)
self.applet = applet
def __del__(self):
self.wait()
def run(self):
result = subprocess.run(VPNCommand.status.value.split(), check=False, capture_output=True)
Notify.Notification.new(result.stdout.decode()).show()
class CheckProtonVPNVersion(QThread):
protonvpn_version_ready = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.parent = parent
self.version = 'None'
def __del__(self):
self.wait()
def run(self):
self.version = subprocess.check_output(VPNCommand.version.value.split()).decode(sys.stdout.encoding)
self.protonvpn_version_ready.emit(self.version)
class PVPNApplet(QMainWindow):
tray_icon = None
polling = True
previous_status = None
auth = 'sudo'
def __init__(self):
super(PVPNApplet, self).__init__()
self.country_codes = country_codes
self.tray_icon = QSystemTrayIcon(self)
self.tray_icon.setIcon(QIcon('icons/16x16/protonvpn-disconnected.png'))
Notify.init('ProtonVPN')
self.servers = self.update_available_servers()
connect_fastest_action = QAction('Connect fastest', self)
reconnect_action = QAction('Reconnect', self)
disconnect_action = QAction('Disconnect', self)
status_action = QAction('Status', self)
connect_fastest_sc_action = QAction('Secure Core', self)
connect_fastest_p2p_action = QAction('P2P', self)
connect_fastest_tor_action = QAction('Tor', self)
connect_random_action = QAction('Random', self)
show_protonvpn_applet_version_action = QAction('About ProtonVPN-Applet', self)
show_protonvpn_version_action = QAction('About ProtonVPN', self)
quit_action = QAction('Exit', self)
self.show_notifications_action = QAction('Show Notifications')
self.show_notifications_action.setCheckable(True)
self.show_notifications_action.setChecked(False)
quit_action.triggered.connect(qApp.quit)
connect_fastest_action.triggered.connect(self.connect_fastest)
disconnect_action.triggered.connect(self.disconnect_vpn)
status_action.triggered.connect(self.status_vpn)
show_protonvpn_applet_version_action.triggered.connect(self.show_protonvpn_applet_version)
show_protonvpn_version_action.triggered.connect(self.get_protonvpn_version)
connect_fastest_sc_action.triggered.connect(self.connect_fastest_sc)
connect_fastest_p2p_action.triggered.connect(self.connect_fastest_p2p)
connect_fastest_tor_action.triggered.connect(self.connect_fastest_tor)
connect_random_action.triggered.connect(self.connect_random)
reconnect_action.triggered.connect(self.reconnect_vpn)
connect_country_actions = []
for country_name in self.get_available_countries(self.servers):
country_name_to_code = {v: k for k, v in country_codes.country_codes.items()}
country_code = country_name_to_code[country_name]
setattr(self, f'connect_fastest_{country_code}', functools.partial(self.connect_fastest_cc, country_code))
country_action = QAction(f'{country_name}', self)
country_action.triggered.connect(getattr(self, f'connect_fastest_{country_code}'))
connect_country_actions.append(country_action)
connect_country_menu = QMenu("Country...", self)
connect_country_menu.setStyleSheet('QMenu { menu-scrollable: 1; }')
connect_country_menu.addActions(connect_country_actions)
connection_menu = QMenu("Other connections...", self)
connection_menu.addMenu(connect_country_menu)
connection_menu.addAction(connect_fastest_sc_action)
connection_menu.addAction(connect_fastest_p2p_action)
connection_menu.addAction(connect_fastest_tor_action)
connection_menu.addAction(connect_random_action)
tray_menu = QMenu()
tray_menu.addAction(connect_fastest_action)
tray_menu.addAction(reconnect_action)
tray_menu.addMenu(connection_menu)
tray_menu.addAction(disconnect_action)
tray_menu.addAction(status_action)
tray_menu.addSeparator()
tray_menu.addAction(self.show_notifications_action)
tray_menu.addAction(show_protonvpn_applet_version_action)
tray_menu.addAction(show_protonvpn_version_action)
tray_menu.addAction(quit_action)
self.tray_icon.setContextMenu(tray_menu)
self.tray_icon.show()
self.start_polling()
def is_polling(self):
return self.polling
def kill_polling(self):
self.polling = False
def start_polling(self):
self.polling = True
self.polling_thread = Polling(self)
self.polling_thread.start()
def _connect_vpn(self, command):
self.kill_polling()
connect_thread = ConnectVPN(self, command)
connect_thread.finished.connect(self.start_polling)
connect_thread.start()
def connect_fastest(self):
self._connect_vpn(VPNCommand.connect_fastest.value)
def connect_fastest_p2p(self):
self._connect_vpn(VPNCommand.connect_fastest_p2p.value)
def connect_fastest_sc(self):
self._connect_vpn(VPNCommand.connect_fastest_sc.value)
def connect_fastest_cc(self, cc):
command = VPNCommand.connect_fastest_cc.value + f' {cc}'
self._connect_vpn(command)
def connect_fastest_tor(self):
self._connect_vpn(VPNCommand.connect_fastest_tor.value)
def connect_random(self):
self._connect_vpn(VPNCommand.connect_random.value)
def disconnect_vpn(self):
disconnect_thread = DisconnectVPN(self)
disconnect_thread.start()
def status_vpn(self):
status_thread = CheckStatus(self)
status_thread.start()
def reconnect_vpn(self):
reconnect_thread = ReconnectVPN(self)
reconnect_thread.start()
def closeEvent(self, event):
event.ignore()
self.hide()
def show_notifications(self):
return self.show_notifications_action.isChecked()
def show_protonvpn_applet_version(self):
name = '© 2020 Dónal Murray'
email = 'dmurray654@gmail.com'
github = 'https://github.com/seadanda/protonvpn-applet'
info = [f'<center>Version: {PROTONVPN_APPLET_VERSION}',
f'{name}',
f"<a href='{email}'>{email}</a>",
f"<a href='{github}'>{github}</a></center>"]
centered_text = f'<center>{"<br>".join(info)}</center>'
QMessageBox.information(self, 'protonvpn-applet', centered_text)
def get_protonvpn_version(self):
print('called get_protonvpn_version')
check_protonvpn_version_thread = CheckProtonVPNVersion(self)
check_protonvpn_version_thread.protonvpn_version_ready.connect(self.show_protonvpn_version)
check_protonvpn_version_thread.start()
def show_protonvpn_version(self, version):
print('called show_protonvpn_version')
QMessageBox.information(self, 'ProtonVPN Version', f'Version: {version}')
def update_available_servers(self):
utils.pull_server_data()
return utils.get_servers()
@staticmethod
def get_available_countries(servers):
return sorted(list({utils.get_country_name(server['ExitCountry']) for server in servers}))
if __name__ == '__main__':
check_single_instance()
app = QApplication(sys.argv)
mw = PVPNApplet()
sys.exit(app.exec())
| true | true |
f720bbec31bcc03b0a76267cc6d1919b2116ffc8 | 3,982 | py | Python | pygraphblas/demo/dnn.py | szarnyasg/pygraphblas | 7465ef6fcc77c9901869b70ddf1d77a86570c336 | [
"Apache-2.0"
] | null | null | null | pygraphblas/demo/dnn.py | szarnyasg/pygraphblas | 7465ef6fcc77c9901869b70ddf1d77a86570c336 | [
"Apache-2.0"
] | null | null | null | pygraphblas/demo/dnn.py | szarnyasg/pygraphblas | 7465ef6fcc77c9901869b70ddf1d77a86570c336 | [
"Apache-2.0"
] | null | null | null | import os
from functools import wraps, partial
from time import time
from statistics import mean
from pathlib import Path
from pygraphblas import *
from multiprocessing.pool import ThreadPool
from multiprocessing import cpu_count
NFEATURES = 60000
BIAS = {1024: -0.3, 4096: -0.35, 16384: -0.4, 65536: -0.45}
def timing(f):
@wraps(f)
def wrap(*args, **kw):
ts = time()
result = f(*args, **kw)
te = time()
print('func:%r took: %2.4f' % (f.__name__, te-ts))
return result
return wrap
@timing
def dnn(W, B, Y):
for w, b in zip(W, B):
Y = Y @ w
with plus_plus:
Y = Y @ b
Y = Y.select('>0')
M = Y.select('>', 32)
if len(M):
Y[M] = 32
return Y
@timing
def dnn2(W, B, Y):
for w, b in zip(W, B):
Y = Y.mxm(w, out=Y)
with plus_plus:
Y = Y.mxm(b, out=Y)
Y.select('>0', out=Y)
M = Y.select('>', 32)
if len(M):
Y[M] = 32
return Y
@timing
def load_images(neurons, dest):
fname = '{}/sparse-images-{}.{}'
binfile = fname.format(dest, neurons, 'ssb')
if Path(binfile).exists():
return Matrix.from_binfile(binfile.encode('ascii'))
images = Path(fname.format(dest, neurons, 'tsv'))
with images.open() as i:
m = Matrix.from_tsv(i, FP32, NFEATURES, neurons)
m.to_binfile(binfile.encode('ascii'))
return m
def load_categories(neurons, nlayers, dest):
fname = '{}/neuron{}-l{}-categories.tsv'
cats = Path(fname.format(dest, neurons, nlayers))
result = Vector.from_type(BOOL, NFEATURES)
with cats.open() as i:
for line in i.readlines():
result[int(line.strip())-1] = True
return result
def load_layer(i, dest):
fname = '{}/neuron{}/n{}-l{}.{}'
binfile = fname.format(dest, neurons, neurons, str(i+1), 'ssb')
if Path(binfile).exists():
return Matrix.from_binfile(binfile.encode('ascii'))
l = Path(fname.format(dest, neurons, neurons, str(i+1), 'tsv'))
with l.open() as f:
m = Matrix.from_tsv(f, FP32, neurons, neurons)
m.to_binfile(binfile.encode('ascii'))
return m
@timing
def generate_layers(neurons, nlayers, dest):
neurons = Path('{}/neuron{}'.format(dest, neurons))
with ThreadPool(cpu_count()) as pool:
return pool.map(partial(load_layer, dest=dest), range(nlayers))
@timing
def generate_bias(neurons, nlayers):
result = []
for i in range(nlayers):
bias = Matrix.from_type(FP32, neurons, neurons)
for i in range(neurons):
bias[i,i] = BIAS[neurons]
bias.nvals # causes async completion
result.append(bias)
return result
@timing
def run(neurons, images, layers, bias, dest):
result = dnn2(layers,
bias,
images)
r = result.reduce_vector()
cats = r.apply(lib.GxB_ONE_BOOL, out=Vector.from_type(BOOL, r.size))
truecats = load_categories(neurons, nlayers, dest)
assert cats == truecats
num_neurons = [1024, 4096, 16384, 65536]
num_layers = [120, 480, 1920]
if __name__ == '__main__':
dest = os.getenv('DEST')
neurons = os.getenv('NEURONS')
nlayers = os.getenv('NLAYERS')
if neurons and nlayers:
neurons = int(neurons)
nlayers = int(nlayers)
images = load_images(neurons, dest)
layers = generate_layers(neurons, nlayers, dest)
bias = generate_bias(neurons, nlayers)
run(neurons, images, layers, bias, dest)
else:
for neurons in num_neurons:
print('Building layers for %s neurons' % neurons)
layers = generate_layers(neurons, 1920, dest)
bias = generate_bias(neurons, 1920)
images = load_images(neurons, dest)
for nlayers in num_layers:
print('Benching %s neurons %s layers' % (neurons, nlayers))
run(neurons, images, layers[:nlayers], bias[:nlayers], dest)
| 30.396947 | 76 | 0.594927 | import os
from functools import wraps, partial
from time import time
from statistics import mean
from pathlib import Path
from pygraphblas import *
from multiprocessing.pool import ThreadPool
from multiprocessing import cpu_count
NFEATURES = 60000
BIAS = {1024: -0.3, 4096: -0.35, 16384: -0.4, 65536: -0.45}
def timing(f):
@wraps(f)
def wrap(*args, **kw):
ts = time()
result = f(*args, **kw)
te = time()
print('func:%r took: %2.4f' % (f.__name__, te-ts))
return result
return wrap
@timing
def dnn(W, B, Y):
for w, b in zip(W, B):
Y = Y @ w
with plus_plus:
Y = Y @ b
Y = Y.select('>0')
M = Y.select('>', 32)
if len(M):
Y[M] = 32
return Y
@timing
def dnn2(W, B, Y):
for w, b in zip(W, B):
Y = Y.mxm(w, out=Y)
with plus_plus:
Y = Y.mxm(b, out=Y)
Y.select('>0', out=Y)
M = Y.select('>', 32)
if len(M):
Y[M] = 32
return Y
@timing
def load_images(neurons, dest):
fname = '{}/sparse-images-{}.{}'
binfile = fname.format(dest, neurons, 'ssb')
if Path(binfile).exists():
return Matrix.from_binfile(binfile.encode('ascii'))
images = Path(fname.format(dest, neurons, 'tsv'))
with images.open() as i:
m = Matrix.from_tsv(i, FP32, NFEATURES, neurons)
m.to_binfile(binfile.encode('ascii'))
return m
def load_categories(neurons, nlayers, dest):
fname = '{}/neuron{}-l{}-categories.tsv'
cats = Path(fname.format(dest, neurons, nlayers))
result = Vector.from_type(BOOL, NFEATURES)
with cats.open() as i:
for line in i.readlines():
result[int(line.strip())-1] = True
return result
def load_layer(i, dest):
fname = '{}/neuron{}/n{}-l{}.{}'
binfile = fname.format(dest, neurons, neurons, str(i+1), 'ssb')
if Path(binfile).exists():
return Matrix.from_binfile(binfile.encode('ascii'))
l = Path(fname.format(dest, neurons, neurons, str(i+1), 'tsv'))
with l.open() as f:
m = Matrix.from_tsv(f, FP32, neurons, neurons)
m.to_binfile(binfile.encode('ascii'))
return m
@timing
def generate_layers(neurons, nlayers, dest):
neurons = Path('{}/neuron{}'.format(dest, neurons))
with ThreadPool(cpu_count()) as pool:
return pool.map(partial(load_layer, dest=dest), range(nlayers))
@timing
def generate_bias(neurons, nlayers):
result = []
for i in range(nlayers):
bias = Matrix.from_type(FP32, neurons, neurons)
for i in range(neurons):
bias[i,i] = BIAS[neurons]
bias.nvals
result.append(bias)
return result
@timing
def run(neurons, images, layers, bias, dest):
result = dnn2(layers,
bias,
images)
r = result.reduce_vector()
cats = r.apply(lib.GxB_ONE_BOOL, out=Vector.from_type(BOOL, r.size))
truecats = load_categories(neurons, nlayers, dest)
assert cats == truecats
num_neurons = [1024, 4096, 16384, 65536]
num_layers = [120, 480, 1920]
if __name__ == '__main__':
dest = os.getenv('DEST')
neurons = os.getenv('NEURONS')
nlayers = os.getenv('NLAYERS')
if neurons and nlayers:
neurons = int(neurons)
nlayers = int(nlayers)
images = load_images(neurons, dest)
layers = generate_layers(neurons, nlayers, dest)
bias = generate_bias(neurons, nlayers)
run(neurons, images, layers, bias, dest)
else:
for neurons in num_neurons:
print('Building layers for %s neurons' % neurons)
layers = generate_layers(neurons, 1920, dest)
bias = generate_bias(neurons, 1920)
images = load_images(neurons, dest)
for nlayers in num_layers:
print('Benching %s neurons %s layers' % (neurons, nlayers))
run(neurons, images, layers[:nlayers], bias[:nlayers], dest)
| true | true |
f720bca64500834838fa25d7053779b0ff0a3d49 | 1,252 | py | Python | backend/server.py | ryzbaka/Niyuddha | ca54a5c79b8e733aca494f996f05c10ef5cf4950 | [
"MIT"
] | null | null | null | backend/server.py | ryzbaka/Niyuddha | ca54a5c79b8e733aca494f996f05c10ef5cf4950 | [
"MIT"
] | null | null | null | backend/server.py | ryzbaka/Niyuddha | ca54a5c79b8e733aca494f996f05c10ef5cf4950 | [
"MIT"
] | null | null | null | from flask import Flask,jsonify,request
import os
from subprocess import PIPE,Popen
app = Flask(__name__)
@app.route("/",methods=["GET"])
def home():
return "Working"
@app.route("/sendcode",methods=["POST"])
def sendCode():
print(request.json)
owd = os.getcwd() # chdir into this once done executing.
username = request.json['username']
code = request.json['code']
os.chdir('users')
userFolders=os.listdir()
if username not in userFolders:
os.mkdir(username)
os.chdir(username)
with open(f"{username}.py","w") as f:
f.write(code)
os.system(f'docker run -it --name {username}container --detach --rm python:3')
os.system(f'docker cp {username}.py {username}container:/{username}.py')
# result = os.popen(f'docker exec {username}container python {username}.py').read()
p = Popen(f"docker exec {username}container python {username}.py",shell=True,stdout=PIPE,stderr=PIPE)
stdout,stderr = p.communicate()
os.system(f'docker kill {username}container')
os.chdir(owd)#switching back to original directory
print(os.path.abspath(os.curdir))
return jsonify({"message":stdout.decode(),"error":stderr.decode()})
if __name__=='__main__':
app.run(port=5555,debug=True) | 36.823529 | 105 | 0.682907 | from flask import Flask,jsonify,request
import os
from subprocess import PIPE,Popen
app = Flask(__name__)
@app.route("/",methods=["GET"])
def home():
return "Working"
@app.route("/sendcode",methods=["POST"])
def sendCode():
print(request.json)
owd = os.getcwd()
username = request.json['username']
code = request.json['code']
os.chdir('users')
userFolders=os.listdir()
if username not in userFolders:
os.mkdir(username)
os.chdir(username)
with open(f"{username}.py","w") as f:
f.write(code)
os.system(f'docker run -it --name {username}container --detach --rm python:3')
os.system(f'docker cp {username}.py {username}container:/{username}.py')
p = Popen(f"docker exec {username}container python {username}.py",shell=True,stdout=PIPE,stderr=PIPE)
stdout,stderr = p.communicate()
os.system(f'docker kill {username}container')
os.chdir(owd)
print(os.path.abspath(os.curdir))
return jsonify({"message":stdout.decode(),"error":stderr.decode()})
if __name__=='__main__':
app.run(port=5555,debug=True) | true | true |
f720bcd371ce68d744f5e4f9a76e113f3947b3e5 | 3,220 | py | Python | pyrasterframes/src/main/python/pyrasterframes/rf_context.py | mjohns-databricks/rasterframes | 44f40726b79e4b3600d6990b73c815b6f891be07 | [
"Apache-2.0"
] | 180 | 2018-03-21T13:34:08.000Z | 2022-03-19T03:31:24.000Z | pyrasterframes/src/main/python/pyrasterframes/rf_context.py | mjohns-databricks/rasterframes | 44f40726b79e4b3600d6990b73c815b6f891be07 | [
"Apache-2.0"
] | 442 | 2018-05-02T13:14:35.000Z | 2022-03-28T21:49:58.000Z | pyrasterframes/src/main/python/pyrasterframes/rf_context.py | mjohns-databricks/rasterframes | 44f40726b79e4b3600d6990b73c815b6f891be07 | [
"Apache-2.0"
] | 45 | 2018-05-03T13:46:04.000Z | 2022-01-30T23:16:00.000Z | #
# This software is licensed under the Apache 2 license, quoted below.
#
# Copyright 2019 Astraea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# [http://www.apache.org/licenses/LICENSE-2.0]
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
This module contains access to the jvm SparkContext with RasterFrameLayer support.
"""
from pyspark import SparkContext
from pyspark.sql import SparkSession
from typing import Any, List
from py4j.java_gateway import JavaMember
from py4j.java_collections import JavaList, JavaMap
from typing import Tuple
__all__ = ['RFContext']
class RFContext(object):
"""
Entrypoint to RasterFrames services
"""
def __init__(self, spark_session: SparkSession):
self._spark_session = spark_session
self._gateway = spark_session.sparkContext._gateway
self._jvm = self._gateway.jvm
jsess = self._spark_session._jsparkSession
self._jrfctx = self._jvm.org.locationtech.rasterframes.py.PyRFContext(jsess)
def list_to_seq(self, py_list: List[Any]) -> JavaList:
conv = self.lookup('_listToSeq')
return conv(py_list)
def lookup(self, function_name: str) -> JavaMember:
return getattr(self._jrfctx, function_name)
def build_info(self) -> JavaMap:
return self._jrfctx.buildInfo()
def companion_of(self, classname: str):
if not classname.endswith("$"):
classname = classname + "$"
companion_module = getattr(self._jvm, classname)
singleton = getattr(companion_module, "MODULE$")
return singleton
# NB: Tightly coupled to `org.locationtech.rasterframes.py.PyRFContext._resolveRasterRef`
def _resolve_raster_ref(self, ref_struct):
f = self.lookup("_resolveRasterRef")
return f(
ref_struct.source.raster_source_kryo,
ref_struct.bandIndex,
ref_struct.subextent.xmin,
ref_struct.subextent.ymin,
ref_struct.subextent.xmax,
ref_struct.subextent.ymax,
)
@staticmethod
def active():
"""
Get the active Python RFContext and throw an error if it is not enabled for RasterFrames.
"""
sc = SparkContext._active_spark_context
if not hasattr(sc, '_rf_context'):
raise AttributeError(
"RasterFrames have not been enabled for the active session. Call 'SparkSession.withRasterFrames()'.")
return sc._rf_context
@staticmethod
def call(name, *args):
f = RFContext.active().lookup(name)
return f(*args)
@staticmethod
def jvm():
"""
Get the active Scala PyRFContext and throw an error if it is not enabled for RasterFrames.
"""
return RFContext.active()._jvm
| 32.525253 | 117 | 0.684472 |
from pyspark import SparkContext
from pyspark.sql import SparkSession
from typing import Any, List
from py4j.java_gateway import JavaMember
from py4j.java_collections import JavaList, JavaMap
from typing import Tuple
__all__ = ['RFContext']
class RFContext(object):
def __init__(self, spark_session: SparkSession):
self._spark_session = spark_session
self._gateway = spark_session.sparkContext._gateway
self._jvm = self._gateway.jvm
jsess = self._spark_session._jsparkSession
self._jrfctx = self._jvm.org.locationtech.rasterframes.py.PyRFContext(jsess)
def list_to_seq(self, py_list: List[Any]) -> JavaList:
conv = self.lookup('_listToSeq')
return conv(py_list)
def lookup(self, function_name: str) -> JavaMember:
return getattr(self._jrfctx, function_name)
def build_info(self) -> JavaMap:
return self._jrfctx.buildInfo()
def companion_of(self, classname: str):
if not classname.endswith("$"):
classname = classname + "$"
companion_module = getattr(self._jvm, classname)
singleton = getattr(companion_module, "MODULE$")
return singleton
def _resolve_raster_ref(self, ref_struct):
f = self.lookup("_resolveRasterRef")
return f(
ref_struct.source.raster_source_kryo,
ref_struct.bandIndex,
ref_struct.subextent.xmin,
ref_struct.subextent.ymin,
ref_struct.subextent.xmax,
ref_struct.subextent.ymax,
)
@staticmethod
def active():
sc = SparkContext._active_spark_context
if not hasattr(sc, '_rf_context'):
raise AttributeError(
"RasterFrames have not been enabled for the active session. Call 'SparkSession.withRasterFrames()'.")
return sc._rf_context
@staticmethod
def call(name, *args):
f = RFContext.active().lookup(name)
return f(*args)
@staticmethod
def jvm():
return RFContext.active()._jvm
| true | true |
f720bd0c2b5ca565bfafb6e86a7b848c423f5997 | 686 | py | Python | tests/scrubber/test_scrubber.py | scottkleinman/lexos | d362ddd05ef23b5173ce303eb7b08ff3583ac709 | [
"MIT"
] | null | null | null | tests/scrubber/test_scrubber.py | scottkleinman/lexos | d362ddd05ef23b5173ce303eb7b08ff3583ac709 | [
"MIT"
] | null | null | null | tests/scrubber/test_scrubber.py | scottkleinman/lexos | d362ddd05ef23b5173ce303eb7b08ff3583ac709 | [
"MIT"
] | null | null | null | """test_scrubber.py."""
# Import a minimal text loader class, the functions for scrubber pipelines,
# and the scrubber function registry
from lexos.io.basic import Loader
from lexos.scrubber.pipeline import make_pipeline
from lexos.scrubber.registry import scrubber_components
from lexos.scrubber.scrubber import Scrubber
# Load a text
data = "tests/test_data/Austen_Pride.txt"
loader = Loader()
loader.load(data)
text = loader.texts[0]
lower_case = scrubber_components.get("lower_case")
scrub = make_pipeline(lower_case)
pipeline = (lower_case)
s = Scrubber()
s.add_pipeline(pipeline)
show_pipeline = s.get_pipeline()
texts = s.scrub(text)
for text in texts:
print(text[0:50])
| 26.384615 | 75 | 0.781341 |
from lexos.io.basic import Loader
from lexos.scrubber.pipeline import make_pipeline
from lexos.scrubber.registry import scrubber_components
from lexos.scrubber.scrubber import Scrubber
data = "tests/test_data/Austen_Pride.txt"
loader = Loader()
loader.load(data)
text = loader.texts[0]
lower_case = scrubber_components.get("lower_case")
scrub = make_pipeline(lower_case)
pipeline = (lower_case)
s = Scrubber()
s.add_pipeline(pipeline)
show_pipeline = s.get_pipeline()
texts = s.scrub(text)
for text in texts:
print(text[0:50])
| true | true |
f720be44decd15c1c50cc613e248b09f157857d5 | 335 | py | Python | resources/ai/swagger/__init__.py | GMKrieger/ai_api | 9ed661d29afb3232b7930727d056abdedfb91b43 | [
"MIT"
] | null | null | null | resources/ai/swagger/__init__.py | GMKrieger/ai_api | 9ed661d29afb3232b7930727d056abdedfb91b43 | [
"MIT"
] | 10 | 2020-01-28T22:15:24.000Z | 2021-04-30T20:36:27.000Z | resources/ai/swagger/__init__.py | GMKrieger/ai_api | 9ed661d29afb3232b7930727d056abdedfb91b43 | [
"MIT"
] | null | null | null | """
swagger module -
A package defining the swagger features. This module creates the swagger
structure and defines the data to show when the swagger is activated.
It does not contain the html and css files used to create the page,
only the underlying structure. The html and css can be found at the static module.
""" | 41.875 | 86 | 0.746269 | true | true | |
f720bf58d889e6191c183282ec836d74afba0701 | 704 | py | Python | tools/mo/openvino/tools/mo/front/caffe/grn_ext.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 1 | 2019-09-22T01:05:07.000Z | 2019-09-22T01:05:07.000Z | tools/mo/openvino/tools/mo/front/caffe/grn_ext.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 58 | 2020-11-06T12:13:45.000Z | 2022-03-28T13:20:11.000Z | tools/mo/openvino/tools/mo/front/caffe/grn_ext.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 2 | 2021-07-14T07:40:50.000Z | 2021-07-27T01:40:03.000Z | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from openvino.tools.mo.ops.grn import GRNOp
from openvino.tools.mo.front.caffe.collect_attributes import merge_attrs
from openvino.tools.mo.front.extractor import FrontExtractorOp
class GRNFrontExtractor(FrontExtractorOp):
op = 'GRN'
enabled = True
@classmethod
def extract(cls, node):
proto_layer = node.pb
param = proto_layer.grn_param
update_attrs = {
'bias': param.bias,
}
mapping_rule = merge_attrs(param, update_attrs)
# update the attributes of the node
GRNOp.update_node_stat(node, mapping_rule)
return cls.enabled
| 26.074074 | 72 | 0.693182 |
from openvino.tools.mo.ops.grn import GRNOp
from openvino.tools.mo.front.caffe.collect_attributes import merge_attrs
from openvino.tools.mo.front.extractor import FrontExtractorOp
class GRNFrontExtractor(FrontExtractorOp):
op = 'GRN'
enabled = True
@classmethod
def extract(cls, node):
proto_layer = node.pb
param = proto_layer.grn_param
update_attrs = {
'bias': param.bias,
}
mapping_rule = merge_attrs(param, update_attrs)
GRNOp.update_node_stat(node, mapping_rule)
return cls.enabled
| true | true |
f720bf66d4521a60fee34b616ef7d1b5989d5e01 | 256 | py | Python | code/learn-AI/matplotlib/graph/sigmoid_function.py | lsieun/learn-AI | 0a164bc2e6317de3aa03c747c0e6f15d93e7f49a | [
"Apache-2.0"
] | 1 | 2019-03-27T23:22:44.000Z | 2019-03-27T23:22:44.000Z | code/learn-AI/matplotlib/graph/sigmoid_function.py | lsieun/learn-AI | 0a164bc2e6317de3aa03c747c0e6f15d93e7f49a | [
"Apache-2.0"
] | null | null | null | code/learn-AI/matplotlib/graph/sigmoid_function.py | lsieun/learn-AI | 0a164bc2e6317de3aa03c747c0e6f15d93e7f49a | [
"Apache-2.0"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
def func(x):
return 1 / (1 + np.exp(-x))
# Return evenly spaced numbers over a specified interval.
xdata = np.linspace(-8, 8, 960,endpoint=True)
ydata = func(xdata)
plt.plot(xdata,ydata)
plt.show() | 19.692308 | 57 | 0.707031 | import numpy as np
import matplotlib.pyplot as plt
def func(x):
return 1 / (1 + np.exp(-x))
xdata = np.linspace(-8, 8, 960,endpoint=True)
ydata = func(xdata)
plt.plot(xdata,ydata)
plt.show() | true | true |
f720bf6a9fb2642c27030209f924c321a1edff82 | 3,343 | py | Python | DownData/Link_down.py | Max-astro/A2Project | 5d40263742133f214936b06b622d08092e694aed | [
"MIT"
] | null | null | null | DownData/Link_down.py | Max-astro/A2Project | 5d40263742133f214936b06b622d08092e694aed | [
"MIT"
] | null | null | null | DownData/Link_down.py | Max-astro/A2Project | 5d40263742133f214936b06b622d08092e694aed | [
"MIT"
] | null | null | null | import requests
import sys
import h5py
import numpy as np
import os
def get(path, params=None, savedir=None):
# make HTTP GET request to path
headers = {"api-key":"27d44ba55cd115b10f2dd9153589aff0"}
r = requests.get(path, params=params, headers=headers)
# raise exception if response code is not HTTP SUCCESS (200)
r.raise_for_status()
if r.headers['content-type'] == 'application/json':
return r.json() # parse json responses automatically
if 'content-disposition' in r.headers:
filename = r.headers['content-disposition'].split("filename=")[1]
if savedir != None:
filename = savedir + filename
with open(filename, 'wb') as f:
f.write(r.content)
return filename # return the filename string
return r
def HaloProgenitors(haloID):
'''
haloID is the subhalo's ID in snap_099
return a dict = {'SnapNum' : SubfindID}
'''
url = "http://www.tng-project.org/api/TNG100-1/snapshots/99/subhalos/%haloID/sublink/simple.json"%haloID
try:
sublink = get(url, savedir='/home/sublink/')
except:
print(sys.exc_info()[0])
return -1
f = sublink
#Find halo's Subfind ID with redshift(ie:SnapNum), and save the dict in '/Raid0/zhouzb/diskHalo_Sublink/'
snap_num = np.array(f['SnapNum'])
subfind_ID = np.array(f['SubfindID'])
Progenitors_dict = {}
for i in range(len(snap_num)):
Progenitors_dict['%d'%snap_num[i]] = subfind_ID[i]
f.close()
return Progenitors_dict
'''
snap_91 z=0.1
snap_84 z=0.2
snap_78 z=0.3
snap_72 z=0.4
snap_67 z=0.5
snap_59 z=0.7
snap_50 z=1.0
snap_40 z=1.5
snap_33 z=2.0
'''
barred = np.load('F:/Linux/data/099fig/barredID.npy')
snap = [99, 91, 84, 78, 72, 67, 59, 50, 40, 33]
errorHalo = []
for haloID in barred:
Prog_dict = HaloProgenitors(haloID)
if Prog_dict == -1:
print('halo: %d Network ERROR, Try next'%haloID)
errorHalo.append(haloID)
continue
else:
#Download stellar particles' information in all selected snapshot z
for z in snap:
print('Now download halo %d in snap_%d'%(haloID, z))
try:
subID = Prog_dict['%d'%z]
cutoff_url = 'http://www.tng-project.org/api/TNG100-1/snapshots/%d/subhalos/%d/cutout.hdf5?stars=Masses,Coordinates,Velocities,GFM_StellarFormationTime'%(z, subID)
if os.path.isfile('F:/Linux/data/TNG/cutoff/disk_%d/cutout_%d.hdf5'%(z, subID)) == False:
get(cutoff_url, savedir='F:/Linux/data/TNG/cutoff/disk_%d/'%z)
except:
print("halo %d in snap_%d Fail:"%(haloID, z), sys.exc_info()[0])
print("You need to reload this halo.")
errorHalo.append(haloID)
break
else:
print('halo %d in snap_%d downloaded'%(haloID, z))
print('halo %d in all snapshot download Completed'%haloID)
if len(errorHalo) == 0:
print('All done.')
else:
print('%d halo download faild'%len(errorHalo))
print("Error halo's ID were saved in '/Raid0/zhouzb/downError.log.npy'.")
np.save('F:/Linux/data/TNG/errorID.npy', errorHalo)
| 29.324561 | 180 | 0.597368 | import requests
import sys
import h5py
import numpy as np
import os
def get(path, params=None, savedir=None):
headers = {"api-key":"27d44ba55cd115b10f2dd9153589aff0"}
r = requests.get(path, params=params, headers=headers)
r.raise_for_status()
if r.headers['content-type'] == 'application/json':
return r.json()
if 'content-disposition' in r.headers:
filename = r.headers['content-disposition'].split("filename=")[1]
if savedir != None:
filename = savedir + filename
with open(filename, 'wb') as f:
f.write(r.content)
return filename
return r
def HaloProgenitors(haloID):
url = "http://www.tng-project.org/api/TNG100-1/snapshots/99/subhalos/%haloID/sublink/simple.json"%haloID
try:
sublink = get(url, savedir='/home/sublink/')
except:
print(sys.exc_info()[0])
return -1
f = sublink
snap_num = np.array(f['SnapNum'])
subfind_ID = np.array(f['SubfindID'])
Progenitors_dict = {}
for i in range(len(snap_num)):
Progenitors_dict['%d'%snap_num[i]] = subfind_ID[i]
f.close()
return Progenitors_dict
barred = np.load('F:/Linux/data/099fig/barredID.npy')
snap = [99, 91, 84, 78, 72, 67, 59, 50, 40, 33]
errorHalo = []
for haloID in barred:
Prog_dict = HaloProgenitors(haloID)
if Prog_dict == -1:
print('halo: %d Network ERROR, Try next'%haloID)
errorHalo.append(haloID)
continue
else:
#Download stellar particles' information in all selected snapshot z
for z in snap:
print('Now download halo %d in snap_%d'%(haloID, z))
try:
subID = Prog_dict['%d'%z]
cutoff_url = 'http://www.tng-project.org/api/TNG100-1/snapshots/%d/subhalos/%d/cutout.hdf5?stars=Masses,Coordinates,Velocities,GFM_StellarFormationTime'%(z, subID)
if os.path.isfile('F:/Linux/data/TNG/cutoff/disk_%d/cutout_%d.hdf5'%(z, subID)) == False:
get(cutoff_url, savedir='F:/Linux/data/TNG/cutoff/disk_%d/'%z)
except:
print("halo %d in snap_%d Fail:"%(haloID, z), sys.exc_info()[0])
print("You need to reload this halo.")
errorHalo.append(haloID)
break
else:
print('halo %d in snap_%d downloaded'%(haloID, z))
print('halo %d in all snapshot download Completed'%haloID)
if len(errorHalo) == 0:
print('All done.')
else:
print('%d halo download faild'%len(errorHalo))
print("Error halo's ID were saved in '/Raid0/zhouzb/downError.log.npy'.")
np.save('F:/Linux/data/TNG/errorID.npy', errorHalo)
| true | true |
f720bf86d570b0fdfb0907c0f3e9814300ec73f6 | 15,954 | py | Python | aphla/gui/qrangeslider.py | NSLS-II/aphla | ceb5410dc836a8fb16321b6dc5e10d442be765c5 | [
"BSD-3-Clause"
] | null | null | null | aphla/gui/qrangeslider.py | NSLS-II/aphla | ceb5410dc836a8fb16321b6dc5e10d442be765c5 | [
"BSD-3-Clause"
] | 1 | 2020-02-17T18:56:18.000Z | 2020-02-20T17:06:20.000Z | aphla/gui/qrangeslider.py | NSLS-II/aphla | ceb5410dc836a8fb16321b6dc5e10d442be765c5 | [
"BSD-3-Clause"
] | 1 | 2021-03-08T16:07:11.000Z | 2021-03-08T16:07:11.000Z | #!/usr/bin/env python
# ------------------------------------------------------------------------------
# Copyright (c) 2011-2012, Ryan Galloway (ryan@rsgalloway.com)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the name of the software nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ------------------------------------------------------------------------------
# docs and latest version available for download at
# http://rsgalloway.github.com/qrangeslider
# ------------------------------------------------------------------------------
__author__ = "Ryan Galloway <ryan@rsgalloway.com>"
__version__ = "0.1"
# ------------------------------------------------------------------------------
# SUMMARY
# ------------------------------------------------------------------------------
"""The QRangeSlider class implements a horizontal range slider widget.
"""
# ------------------------------------------------------------------------------
# TODO
# ------------------------------------------------------------------------------
"""
- smoother mouse move event handler
- support splits and joins
- verticle sliders
- ticks
"""
# ------------------------------------------------------------------------------
# IMPORTS
# ------------------------------------------------------------------------------
import os
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
__all__ = ['QRangeSlider']
DEFAULT_CSS = """
QRangeSlider * {
border: 0px;
padding: 0px;
}
QRangeSlider #Head {
background: #fff;
}
QRangeSlider #Span {
background: #393;
}
QRangeSlider #Span:active {
background: #282;
}
QRangeSlider #Tail {
background: #fff;
}
QRangeSlider > QSplitter::handle {
background: #393;
}
QRangeSlider > QSplitter::handle:vertical {
height: 4px;
}
QRangeSlider > QSplitter::handle:pressed {
background: #ca5;
}
"""
class Ui_Form(object):
"""default range slider form"""
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("QRangeSlider"))
Form.resize(300, 30)
Form.setStyleSheet(_fromUtf8(DEFAULT_CSS))
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self._splitter = QtGui.QSplitter(Form)
self._splitter.setMinimumSize(QtCore.QSize(0, 0))
self._splitter.setMaximumSize(QtCore.QSize(16777215, 16777215))
self._splitter.setOrientation(QtCore.Qt.Horizontal)
self._splitter.setObjectName(_fromUtf8("splitter"))
self._head = QtGui.QGroupBox(self._splitter)
self._head.setTitle(_fromUtf8(""))
self._head.setObjectName(_fromUtf8("Head"))
self._handle = QtGui.QGroupBox(self._splitter)
self._handle.setTitle(_fromUtf8(""))
self._handle.setObjectName(_fromUtf8("Span"))
self._tail = QtGui.QGroupBox(self._splitter)
self._tail.setTitle(_fromUtf8(""))
self._tail.setObjectName(_fromUtf8("Tail"))
self.gridLayout.addWidget(self._splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
encoding = QtGui.QApplication.UnicodeUTF8
Form.setWindowTitle(QtGui.QApplication.translate("QRangeSlider",
"QRangeSlider",
None, encoding))
class Element(QtGui.QGroupBox):
def __init__(self, parent, main):
super(Element, self).__init__(parent)
self.main = main
def setStyleSheet(self, style):
"""redirect style to parent groupbox"""
self.parent().setStyleSheet(style)
def textColor(self):
"""text paint color"""
return getattr(self, '__textColor', QtGui.QColor(125, 125, 125))
def setTextColor(self, color):
"""set the text paint color"""
if type(color) == tuple and len(color) == 3:
color = QtGui.QColor(color[0], color[1], color[2])
elif type(color) == int:
color = QtGui.QColor(color, color, color)
setattr(self, '__textColor', color)
def paintEvent(self, event):
"""overrides paint event to handle text"""
qp = QtGui.QPainter()
qp.begin(self)
if self.main.drawValues():
self.drawText(event, qp)
qp.end()
class Head(Element):
"""area before the handle"""
def __init__(self, parent, main):
super(Head, self).__init__(parent, main)
def drawText(self, event, qp):
qp.setPen(self.textColor())
qp.setFont(QtGui.QFont('Arial', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignLeft, str(self.main.min()))
class Tail(Element):
"""area after the handle"""
def __init__(self, parent, main):
super(Tail, self).__init__(parent, main)
def drawText(self, event, qp):
qp.setPen(self.textColor())
qp.setFont(QtGui.QFont('Arial', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignRight, str(self.main.max()))
class Handle(Element):
"""handle area"""
def __init__(self, parent, main):
super(Handle, self).__init__(parent, main)
def drawText(self, event, qp):
qp.setPen(self.textColor())
qp.setFont(QtGui.QFont('Arial', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignLeft, str(self.main.start()))
qp.drawText(event.rect(), QtCore.Qt.AlignRight, str(self.main.end()))
def mouseMoveEvent(self, event):
event.accept()
mx = event.globalX()
_mx = getattr(self, '__mx', None)
if not _mx:
setattr(self, '__mx', mx)
dx = 0
else:
dx = mx - _mx
setattr(self, '__mx', mx)
if dx == 0:
event.ignore()
return
elif dx > 0:
dx = 1
elif dx < 0:
dx = -1
s = self.main.start() + dx
e = self.main.end() + dx
if s >= self.main.min() and e <= self.main.max():
self.main.setRange(s, e)
class QRangeSlider(QtGui.QWidget, Ui_Form):
"""
The QRangeSlider class implements a horizontal range slider widget.
Inherits QWidget.
Methods
* __init__ (self, QWidget parent = None)
* bool drawValues (self)
* int end (self)
* (int, int) getRange (self)
* int max (self)
* int min (self)
* int start (self)
* setBackgroundStyle (self, QString styleSheet)
* setDrawValues (self, bool draw)
* setEnd (self, int end)
* setStart (self, int start)
* setRange (self, int start, int end)
* setSpanStyle (self, QString styleSheet)
Signals
* endValueChanged (int)
* maxValueChanged (int)
* minValueChanged (int)
* startValueChanged (int)
Customizing QRangeSlider
You can style the range slider as below:
::
QRangeSlider * {
border: 0px;
padding: 0px;
}
QRangeSlider #Head {
background: #222;
}
QRangeSlider #Span {
background: #393;
}
QRangeSlider #Span:active {
background: #282;
}
QRangeSlider #Tail {
background: #222;
}
Styling the range slider handles follows QSplitter options:
::
QRangeSlider > QSplitter::handle {
background: #393;
}
QRangeSlider > QSplitter::handle:vertical {
height: 4px;
}
QRangeSlider > QSplitter::handle:pressed {
background: #ca5;
}
"""
endValueChanged = QtCore.pyqtSignal(int)
maxValueChanged = QtCore.pyqtSignal(int)
minValueChanged = QtCore.pyqtSignal(int)
startValueChanged = QtCore.pyqtSignal(int)
# define splitter indices
_SPLIT_START = 1
_SPLIT_END = 2
def __init__(self, parent=None):
"""Create a new QRangeSlider instance.
:param parent: QWidget parent
:return: New QRangeSlider instance.
"""
super(QRangeSlider, self).__init__(parent)
self.setupUi(self)
self.setMouseTracking(False)
#self._splitter.setChildrenCollapsible(False)
self._splitter.splitterMoved.connect(self._handleMoveSplitter)
# head layout
self._head_layout = QtGui.QHBoxLayout()
self._head_layout.setSpacing(0)
self._head_layout.setMargin(0)
self._head.setLayout(self._head_layout)
self.head = Head(self._head, main=self)
self._head_layout.addWidget(self.head)
# handle layout
self._handle_layout = QtGui.QHBoxLayout()
self._handle_layout.setSpacing(0)
self._handle_layout.setMargin(0)
self._handle.setLayout(self._handle_layout)
self.handle = Handle(self._handle, main=self)
self.handle.setTextColor((150, 255, 150))
self._handle_layout.addWidget(self.handle)
# tail layout
self._tail_layout = QtGui.QHBoxLayout()
self._tail_layout.setSpacing(0)
self._tail_layout.setMargin(0)
self._tail.setLayout(self._tail_layout)
self.tail = Tail(self._tail, main=self)
self._tail_layout.addWidget(self.tail)
# defaults
self.setMin(0)
self.setMax(99)
self.setStart(0)
self.setEnd(99)
self.setDrawValues(True)
def min(self):
""":return: minimum value"""
return getattr(self, '__min', None)
def max(self):
""":return: maximum value"""
return getattr(self, '__max', None)
def setMin(self, value):
"""sets minimum value"""
assert type(value) is int
setattr(self, '__min', value)
self.minValueChanged.emit(value)
def setMax(self, value):
"""sets maximum value"""
assert type(value) is int
setattr(self, '__max', value)
self.maxValueChanged.emit(value)
def start(self):
""":return: range slider start value"""
return getattr(self, '__start', None)
def end(self):
""":return: range slider end value"""
return getattr(self, '__end', None)
def _setStart(self, value):
"""stores the start value only"""
setattr(self, '__start', value)
self.startValueChanged.emit(value)
def setStart(self, value):
"""sets the range slider start value"""
assert type(value) is int
v = self._valueToPos(value)
self._splitter.moveSplitter(v, self._SPLIT_START)
self._setStart(value)
def _setEnd(self, value):
"""stores the end value only"""
setattr(self, '__end', value)
self.endValueChanged.emit(value)
def setEnd(self, value):
"""set the range slider end value"""
assert type(value) is int
v = self._valueToPos(value)
self._splitter.moveSplitter(v, self._SPLIT_END)
self._setEnd(value)
def drawValues(self):
""":return: True if slider values will be drawn"""
return getattr(self, '__drawValues', None)
def setDrawValues(self, draw):
"""sets draw values boolean to draw slider values"""
assert type(draw) is bool
setattr(self, '__drawValues', draw)
def getRange(self):
""":return: the start and end values as a tuple"""
return (self.start(), self.end())
def setRange(self, start, end):
"""set the start and end values"""
self.setStart(start)
self.setEnd(end)
def keyPressEvent(self, event):
"""overrides key press event to move range left and right"""
key = event.key()
if key == QtCore.Qt.Key_Left:
s = self.start()-1
e = self.end()-1
elif key == QtCore.Qt.Key_Right:
s = self.start()+1
e = self.end()+1
else:
event.ignore()
return
event.accept()
if s >= self.min() and e <= self.max():
self.setRange(s, e)
def setBackgroundStyle(self, style):
"""sets background style"""
self._tail.setStyleSheet(style)
self._head.setStyleSheet(style)
def setSpanStyle(self, style):
"""sets range span handle style"""
self._handle.setStyleSheet(style)
def _valueToPos(self, value):
"""converts slider value to local pixel x coord"""
return int(self.width() * (float(value) / self.max()))
def _posToValue(self, xpos):
"""converts local pixel x coord to slider value"""
return int(((xpos + self._splitter.handleWidth()) / float(self.width())) * self.max())
def _handleMoveSplitter(self, xpos, index):
"""private method for handling moving splitter handles"""
hw = self._splitter.handleWidth()
def _lockWidth(widget):
width = widget.size().width()
widget.setMinimumWidth(width)
widget.setMaximumWidth(width)
def _unlockWidth(widget):
widget.setMinimumWidth(0)
widget.setMaximumWidth(16777215)
v = self._posToValue(xpos)
if index == self._SPLIT_START:
_lockWidth(self._tail)
if v >= self.end():
return
offset = -20
w = xpos + offset
self._setStart(v)
elif index == self._SPLIT_END:
_lockWidth(self._head)
if v <= self.start():
return
offset = -40
w = self.width() - xpos + offset
self._setEnd(v)
_unlockWidth(self._tail)
_unlockWidth(self._head)
_unlockWidth(self._handle)
#-------------------------------------------------------------------------------
# MAIN
#-------------------------------------------------------------------------------
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
rs = QRangeSlider()
rs.show()
rs.setRange(15, 35)
rs.setBackgroundStyle('background: qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #222, stop:1 #333);')
rs.handle.setStyleSheet('background: qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #282, stop:1 #393);')
app.exec_()
| 31.529644 | 109 | 0.573587 |
__author__ = "Ryan Galloway <ryan@rsgalloway.com>"
__version__ = "0.1"
import os
import sys
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4 import uic
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
__all__ = ['QRangeSlider']
DEFAULT_CSS = """
QRangeSlider * {
border: 0px;
padding: 0px;
}
QRangeSlider #Head {
background: #fff;
}
QRangeSlider #Span {
background: #393;
}
QRangeSlider #Span:active {
background: #282;
}
QRangeSlider #Tail {
background: #fff;
}
QRangeSlider > QSplitter::handle {
background: #393;
}
QRangeSlider > QSplitter::handle:vertical {
height: 4px;
}
QRangeSlider > QSplitter::handle:pressed {
background: #ca5;
}
"""
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("QRangeSlider"))
Form.resize(300, 30)
Form.setStyleSheet(_fromUtf8(DEFAULT_CSS))
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self._splitter = QtGui.QSplitter(Form)
self._splitter.setMinimumSize(QtCore.QSize(0, 0))
self._splitter.setMaximumSize(QtCore.QSize(16777215, 16777215))
self._splitter.setOrientation(QtCore.Qt.Horizontal)
self._splitter.setObjectName(_fromUtf8("splitter"))
self._head = QtGui.QGroupBox(self._splitter)
self._head.setTitle(_fromUtf8(""))
self._head.setObjectName(_fromUtf8("Head"))
self._handle = QtGui.QGroupBox(self._splitter)
self._handle.setTitle(_fromUtf8(""))
self._handle.setObjectName(_fromUtf8("Span"))
self._tail = QtGui.QGroupBox(self._splitter)
self._tail.setTitle(_fromUtf8(""))
self._tail.setObjectName(_fromUtf8("Tail"))
self.gridLayout.addWidget(self._splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
encoding = QtGui.QApplication.UnicodeUTF8
Form.setWindowTitle(QtGui.QApplication.translate("QRangeSlider",
"QRangeSlider",
None, encoding))
class Element(QtGui.QGroupBox):
def __init__(self, parent, main):
super(Element, self).__init__(parent)
self.main = main
def setStyleSheet(self, style):
self.parent().setStyleSheet(style)
def textColor(self):
return getattr(self, '__textColor', QtGui.QColor(125, 125, 125))
def setTextColor(self, color):
if type(color) == tuple and len(color) == 3:
color = QtGui.QColor(color[0], color[1], color[2])
elif type(color) == int:
color = QtGui.QColor(color, color, color)
setattr(self, '__textColor', color)
def paintEvent(self, event):
qp = QtGui.QPainter()
qp.begin(self)
if self.main.drawValues():
self.drawText(event, qp)
qp.end()
class Head(Element):
def __init__(self, parent, main):
super(Head, self).__init__(parent, main)
def drawText(self, event, qp):
qp.setPen(self.textColor())
qp.setFont(QtGui.QFont('Arial', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignLeft, str(self.main.min()))
class Tail(Element):
def __init__(self, parent, main):
super(Tail, self).__init__(parent, main)
def drawText(self, event, qp):
qp.setPen(self.textColor())
qp.setFont(QtGui.QFont('Arial', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignRight, str(self.main.max()))
class Handle(Element):
def __init__(self, parent, main):
super(Handle, self).__init__(parent, main)
def drawText(self, event, qp):
qp.setPen(self.textColor())
qp.setFont(QtGui.QFont('Arial', 10))
qp.drawText(event.rect(), QtCore.Qt.AlignLeft, str(self.main.start()))
qp.drawText(event.rect(), QtCore.Qt.AlignRight, str(self.main.end()))
def mouseMoveEvent(self, event):
event.accept()
mx = event.globalX()
_mx = getattr(self, '__mx', None)
if not _mx:
setattr(self, '__mx', mx)
dx = 0
else:
dx = mx - _mx
setattr(self, '__mx', mx)
if dx == 0:
event.ignore()
return
elif dx > 0:
dx = 1
elif dx < 0:
dx = -1
s = self.main.start() + dx
e = self.main.end() + dx
if s >= self.main.min() and e <= self.main.max():
self.main.setRange(s, e)
class QRangeSlider(QtGui.QWidget, Ui_Form):
endValueChanged = QtCore.pyqtSignal(int)
maxValueChanged = QtCore.pyqtSignal(int)
minValueChanged = QtCore.pyqtSignal(int)
startValueChanged = QtCore.pyqtSignal(int)
_SPLIT_START = 1
_SPLIT_END = 2
def __init__(self, parent=None):
super(QRangeSlider, self).__init__(parent)
self.setupUi(self)
self.setMouseTracking(False)
self._splitter.splitterMoved.connect(self._handleMoveSplitter)
self._head_layout = QtGui.QHBoxLayout()
self._head_layout.setSpacing(0)
self._head_layout.setMargin(0)
self._head.setLayout(self._head_layout)
self.head = Head(self._head, main=self)
self._head_layout.addWidget(self.head)
self._handle_layout = QtGui.QHBoxLayout()
self._handle_layout.setSpacing(0)
self._handle_layout.setMargin(0)
self._handle.setLayout(self._handle_layout)
self.handle = Handle(self._handle, main=self)
self.handle.setTextColor((150, 255, 150))
self._handle_layout.addWidget(self.handle)
self._tail_layout = QtGui.QHBoxLayout()
self._tail_layout.setSpacing(0)
self._tail_layout.setMargin(0)
self._tail.setLayout(self._tail_layout)
self.tail = Tail(self._tail, main=self)
self._tail_layout.addWidget(self.tail)
self.setMin(0)
self.setMax(99)
self.setStart(0)
self.setEnd(99)
self.setDrawValues(True)
def min(self):
return getattr(self, '__min', None)
def max(self):
return getattr(self, '__max', None)
def setMin(self, value):
assert type(value) is int
setattr(self, '__min', value)
self.minValueChanged.emit(value)
def setMax(self, value):
assert type(value) is int
setattr(self, '__max', value)
self.maxValueChanged.emit(value)
def start(self):
return getattr(self, '__start', None)
def end(self):
return getattr(self, '__end', None)
def _setStart(self, value):
setattr(self, '__start', value)
self.startValueChanged.emit(value)
def setStart(self, value):
assert type(value) is int
v = self._valueToPos(value)
self._splitter.moveSplitter(v, self._SPLIT_START)
self._setStart(value)
def _setEnd(self, value):
setattr(self, '__end', value)
self.endValueChanged.emit(value)
def setEnd(self, value):
assert type(value) is int
v = self._valueToPos(value)
self._splitter.moveSplitter(v, self._SPLIT_END)
self._setEnd(value)
def drawValues(self):
return getattr(self, '__drawValues', None)
def setDrawValues(self, draw):
assert type(draw) is bool
setattr(self, '__drawValues', draw)
def getRange(self):
return (self.start(), self.end())
def setRange(self, start, end):
self.setStart(start)
self.setEnd(end)
def keyPressEvent(self, event):
key = event.key()
if key == QtCore.Qt.Key_Left:
s = self.start()-1
e = self.end()-1
elif key == QtCore.Qt.Key_Right:
s = self.start()+1
e = self.end()+1
else:
event.ignore()
return
event.accept()
if s >= self.min() and e <= self.max():
self.setRange(s, e)
def setBackgroundStyle(self, style):
self._tail.setStyleSheet(style)
self._head.setStyleSheet(style)
def setSpanStyle(self, style):
self._handle.setStyleSheet(style)
def _valueToPos(self, value):
return int(self.width() * (float(value) / self.max()))
def _posToValue(self, xpos):
return int(((xpos + self._splitter.handleWidth()) / float(self.width())) * self.max())
def _handleMoveSplitter(self, xpos, index):
hw = self._splitter.handleWidth()
def _lockWidth(widget):
width = widget.size().width()
widget.setMinimumWidth(width)
widget.setMaximumWidth(width)
def _unlockWidth(widget):
widget.setMinimumWidth(0)
widget.setMaximumWidth(16777215)
v = self._posToValue(xpos)
if index == self._SPLIT_START:
_lockWidth(self._tail)
if v >= self.end():
return
offset = -20
w = xpos + offset
self._setStart(v)
elif index == self._SPLIT_END:
_lockWidth(self._head)
if v <= self.start():
return
offset = -40
w = self.width() - xpos + offset
self._setEnd(v)
_unlockWidth(self._tail)
_unlockWidth(self._head)
_unlockWidth(self._handle)
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
rs = QRangeSlider()
rs.show()
rs.setRange(15, 35)
rs.setBackgroundStyle('background: qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #222, stop:1 #333);')
rs.handle.setStyleSheet('background: qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #282, stop:1 #393);')
app.exec_()
| true | true |
f720c1aa5ab9a0a14470949b5e358729876b9eb7 | 11,985 | py | Python | solar_monitor.py | weidnerm/solar_data_monitor | 48bcf9b45ab911bdb7af3dff17d28c8f16d2c925 | [
"MIT"
] | null | null | null | solar_monitor.py | weidnerm/solar_data_monitor | 48bcf9b45ab911bdb7af3dff17d28c8f16d2c925 | [
"MIT"
] | null | null | null | solar_monitor.py | weidnerm/solar_data_monitor | 48bcf9b45ab911bdb7af3dff17d28c8f16d2c925 | [
"MIT"
] | null | null | null |
#!/usr/bin/python
from Subfact_ina219 import INA219
import time
import os
import glob
import Tkinter as tk
import math
import copy
from OneFifo import OneFifo
import json
import socket
import select
from SolarMonitor import SolarMonitor
from SolarSensors import SolarSensors
from SolarServer import SolarServer
from SolarDb import SolarDb
def orig_main():
ina = INA219()
result = ina.getBusVoltage_V()
print "Shunt : %.3f mV" % ina.getShuntVoltage_mV()
print "Bus : %.3f V" % ina.getBusVoltage_V()
print "Current : %.3f mA" % ina.getCurrent_mA()
class Solar:
def __init__(self, sensors, timestamper, filenamePrefix="solarLog_"):
self.m_SolarSensors = sensors;
self.m_SolarDb = SolarDb(filenamePrefix);
self.m_Timestamper = timestamper;
def gatherData(self):
data = self.m_SolarSensors.getData();
return data;
def formatPrintData(self, results):
returnValue = []
returnValue.append( "%-20s %-20s %-20s %-20s %-20s %-20s" % (results["names"][0],results["names"][1],results["names"][2],results["names"][4],results["names"][5],results["names"][3]));
returnValue.append( "%2.3f V %2.3f V %2.3f V %2.3f V %2.3f V %2.3f V" % (results["voltage"][0],results["voltage"][1],results["voltage"][2],results["voltage"][4],results["voltage"][5],results["voltage"][3]));
returnValue.append( "%5.0f mA %5.0f mA %5.0f mA %5.0f mA %5.0f mA %5.0f mA" % (results["current"][0],results["current"][1],results["current"][2],results["current"][4],results["current"][5],results["current"][3]));
returnValue.append( "%5.0f mW %5.0f mW %5.0f mW %5.0f mW %5.0f mW %5.0f mW" % (results["voltage"][0]*results["current"][0],results["voltage"][1]*results["current"][1],results["voltage"][2]*results["current"][2],results["voltage"][4]*results["current"][4],results["voltage"][5]*results["current"][5],results["voltage"][3]*results["current"][3]));
return returnValue;
def printResults(self, results):
text = self.formatPrintData(results)
print;
for index in xrange(len(text)):
print(text[index]);
def recordData(self,data):
rollOver = self.m_SolarDb.addEntry(self.m_Timestamper.getDate(), self.m_Timestamper.getTime(), data );
return rollOver
def getEmptyStatsDB(self):
results = []
for channelIndex in xrange(6):
tempVal = {}
tempVal["minEnergy"] = 0
tempVal["maxEnergy"] = 0
tempVal["cumulativeEnergy"] = 0
results.append(tempVal);
return results
def computeNetPower(self, data, prevPwr=None):
if prevPwr == None:
results = self.getEmptyStatsDB()
else:
results = prevPwr
for channelIndex in xrange(6):
for index in xrange( len(data[channelIndex]["voltage"])-1 ):
timeDelta = self.convertTimeString( data[channelIndex]["time"][index+1]) - self.convertTimeString(data[channelIndex]["time"][index])
if (timeDelta <= 12 ):
# power=data[channelIndex]["voltage"][index] * data[channelIndex]["current"][index]
power=data[channelIndex]["current"][index] # use mAHr for power.
energy = power*timeDelta
results[channelIndex]["cumulativeEnergy"] = results[channelIndex]["cumulativeEnergy"] + energy
if results[channelIndex]["cumulativeEnergy"] < results[channelIndex]["minEnergy"]:
results[channelIndex]["minEnergy"] = results[channelIndex]["cumulativeEnergy"];
elif results[channelIndex]["cumulativeEnergy"] > results[channelIndex]["maxEnergy"]:
results[channelIndex]["maxEnergy"] = results[channelIndex]["cumulativeEnergy"]
for channelIndex in xrange(6):
print("minEnergy=%.1f mAHr maxEnergy=%.1f mAHr cumulative=%.1f mAHr" % ( results[channelIndex]["minEnergy"]/3600.0, results[channelIndex]["maxEnergy"]/3600.0, results[channelIndex]["cumulativeEnergy"]/3600.0))
print
return results
def convertTimeString(self, time):
timeSec = 0;
timeSec = timeSec + int(time[0:2])*60*60
timeSec = timeSec + int(time[3:5])*60
timeSec = timeSec + int(time[6:8])
return timeSec
class TimestamperInterface:
def getDate(self):
pass;
def getTime(self):
pass
class Timestamper(TimestamperInterface):
def getDate(self):
return (time.strftime("%Y_%m_%d"))
def getTime(self):
return (time.strftime("%H:%M:%S"))
#class Application(tk.Frame):
class Application():
def __init__(self, master=None):
#tk.Frame.__init__(self, master)
#self.grid(sticky=tk.N+tk.S+tk.E+tk.W)
#self.createWidgets()
self.plotData = None;
self.leftPad = 40
self.topPad = 10
self.bottomPad = 30
self.rightPad = 10
self.currentParm = -1;
self.currentFileIndex = 0; # most recent
self.firstPoint = 0
self.lastPoint = 0;
self.currentBatPwr = 0
self.currentPanelPwr = 0
self.currentLoadPwr = 0
self.currentBatPwrList = []
for index in xrange(4):
self.currentBatPwrList.append(0)
self.plotheight = 1; # dummy values.
self.plotwidth = 1; # dummy values.
self.todayStats = None
self.batmap = [1,2,4,5] # list of channels that are batteries
def setSolar(self, solar):
self.mySolar = solar
(plotData, filename) = self.mySolar.m_SolarDb.readDayLog(self.currentFileIndex);
self.todayStats = self.mySolar.computeNetPower(plotData)
self.prevStats = None
for index in xrange(1,-1,-1): # fixme put back to 4,-1,-1
(plotData, filename) = self.mySolar.m_SolarDb.readDayLog(self.currentFileIndex+index);
print("processing %s" % filename)
self.prevStats = self.mySolar.computeNetPower(plotData, prevPwr=self.prevStats)
#~ def createWidgets(self):
#~ #
#~ # set up frames for the 6 sensors
#~ #
#~ top=self.winfo_toplevel()
#~ top.rowconfigure(0, weight=1)
#~ top.columnconfigure(0, weight=1)
#~ #
#~ # set up overall window frame
#~ #
#~ self.energy_LabelFrame = tk.LabelFrame(top, text="System Summary")
#~ self.energy_LabelFrame.grid(column=0, row=0, sticky=tk.N+tk.S+tk.E+tk.W)
#~ #
#~ # set up frames for the 6 sensors
#~ #
#~ self.energy_Col_LabelFrame = []
#~ labels = ["Batt 1","Batt 2","Batt 3","Batt 4","Today","Now"]
#~ for sensorIndex in xrange(6):
#~ myField = tk.LabelFrame(self.energy_LabelFrame, text=labels[sensorIndex] )
#~ myField.grid(column=sensorIndex, row=0, sticky=tk.N+tk.S+tk.E+tk.W)
#~ myField.rowconfigure(0, weight=1)
#~ myField.rowconfigure(1, weight=0)
#~ myField.columnconfigure(0, weight=1)
#~ self.energy_LabelFrame.rowconfigure(0, weight=1, minsize=100)
#~ self.energy_LabelFrame.columnconfigure(sensorIndex, weight=1, minsize=70)
#~ self.energy_Col_LabelFrame.append( myField )
#~ #
#~ # set canvas for each bar graph
#~ #
#~ self.energy_Col_graph_canvas = []
#~ for sensorIndex in xrange(6):
#~ myField = tk.Canvas(self.energy_Col_LabelFrame[sensorIndex], width=70, height=200)
#~ myField.grid(column=0,row=0, sticky=tk.E + tk.W + tk.N + tk.S )
#~ self.energy_Col_graph_canvas.append( myField )
#~ # myTextField = myField.create_text(anchor=tk.SW)
#~ #
#~ # add resize handler
#~ #
#~ #self.energy_Col_graph_canvas[0].bind("<Configure>", self.on_resize)
#~ #
#~ # set text fields for each bottom
#~ #
#~ self.energy_Col_Label = []
#~ self.energy_Col_text = []
#~ for sensorIndex in xrange(6):
#~ myStringVar = tk.StringVar()
#~ myStringVar.set("0 mA")
#~ myField = tk.Label(self.energy_Col_LabelFrame[sensorIndex], textvariable=myStringVar)
#~ myField.grid(column=0,row=1, sticky=tk.E + tk.W + tk.N + tk.S )
#~ self.energy_Col_Label.append( myField )
#~ self.energy_Col_text.append( myStringVar )
def accumulateEnergy(self, solarData):
# 0-panel; 1-bat 1; 2-bat 2; 3-load; 4-bat 3; 5-bat 4
powerInts = []
for index in xrange(6):
value = int(solarData["current"][index])
powerInts.append(value)
#~ bat_1_pwr = int(solarData["current"][1])
#~ bat_2_pwr = int(solarData["current"][2])
#~ bat_3_pwr = int(solarData["current"][4])
#~ bat_4_pwr = int(solarData["current"][5])
#~ self.currentBatPwrList.append( bat_1_pwr )
#~ self.currentBatPwrList.append( bat_2_pwr )
#~ self.currentBatPwrList.append( bat_3_pwr )
#~ self.currentBatPwrList.append( bat_4_pwr )
self.currentBatPwr = 0;
#~ self.currentBatPwrList = []
for index in xrange(4):
self.currentBatPwrList[index] = powerInts[self.batmap[index]]
self.currentBatPwr = self.currentBatPwr + self.currentBatPwrList[index]
panelPwr = powerInts[0]
loadPwr = powerInts[3]
self.currentPanelPwr = int( panelPwr )
self.currentLoadPwr = int( loadPwr )
# add new readings to totals; assume 1 second integration window
for index in xrange(6):
self.todayStats[index]["cumulativeEnergy"] = self.todayStats[index]["cumulativeEnergy"] + powerInts[index]
self.prevStats[index]["cumulativeEnergy"] = self.prevStats[index]["cumulativeEnergy"] + powerInts[index]
if self.prevStats[index]["cumulativeEnergy"] < self.prevStats[index]["minEnergy"]:
self.prevStats[index]["minEnergy"] = self.prevStats[index]["cumulativeEnergy"];
elif self.prevStats[index]["cumulativeEnergy"] > self.prevStats[index]["maxEnergy"]:
self.prevStats[index]["maxEnergy"] = self.prevStats[index]["cumulativeEnergy"]
def periodicEventHandler(self):
#self.after(1000,self.periodicEventHandler);
data = self.mySolar.gatherData();
self.accumulateEnergy(data);
#~ self.plotGraph()
rollOver = self.mySolar.recordData(data);
if rollOver:
self.todayStats = self.mySolar.getEmptyStatsDB() # we had a day rollover. reset the daily stats
self.mySolar.printResults(data)
self.mySolarServer.sendUpdate(data, self)
def main(config):
#~ app = Application()
#~ app.setSolar( setupSolar() )
#~ app.mySolarServer = SolarServer()
#~ mySolarSensors = SolarSensors(config)
#~ mySolarServer = SolarServer()
mySolarMonitor = SolarMonitor(config)
mySolarMonitor.run()
#~ while True:
#~ # app.periodicEventHandler()
#~ live_data = mySolarSensors.getData()
#~ mySolarServer.sendUpdate(live_data, cumulative_data)
#~ print(live_data)
#~ time.sleep(1.0)
if __name__ == "__main__":
fp = open("config.json", "r")
config_string = fp.read()
fp.close()
config = json.loads(config_string)
length = len(config)
for index in range(length-1, -1, -1):
print('index=%d' % (index))
if 'enable' in config[index]:
if config[index]['enable'] != 1:
dropped_entry = config.pop(index)
print('dropping disabled entry from config')
print(dropped_entry)
main(config)
| 36.876923 | 413 | 0.596996 |
from Subfact_ina219 import INA219
import time
import os
import glob
import Tkinter as tk
import math
import copy
from OneFifo import OneFifo
import json
import socket
import select
from SolarMonitor import SolarMonitor
from SolarSensors import SolarSensors
from SolarServer import SolarServer
from SolarDb import SolarDb
def orig_main():
ina = INA219()
result = ina.getBusVoltage_V()
print "Shunt : %.3f mV" % ina.getShuntVoltage_mV()
print "Bus : %.3f V" % ina.getBusVoltage_V()
print "Current : %.3f mA" % ina.getCurrent_mA()
class Solar:
def __init__(self, sensors, timestamper, filenamePrefix="solarLog_"):
self.m_SolarSensors = sensors;
self.m_SolarDb = SolarDb(filenamePrefix);
self.m_Timestamper = timestamper;
def gatherData(self):
data = self.m_SolarSensors.getData();
return data;
def formatPrintData(self, results):
returnValue = []
returnValue.append( "%-20s %-20s %-20s %-20s %-20s %-20s" % (results["names"][0],results["names"][1],results["names"][2],results["names"][4],results["names"][5],results["names"][3]));
returnValue.append( "%2.3f V %2.3f V %2.3f V %2.3f V %2.3f V %2.3f V" % (results["voltage"][0],results["voltage"][1],results["voltage"][2],results["voltage"][4],results["voltage"][5],results["voltage"][3]));
returnValue.append( "%5.0f mA %5.0f mA %5.0f mA %5.0f mA %5.0f mA %5.0f mA" % (results["current"][0],results["current"][1],results["current"][2],results["current"][4],results["current"][5],results["current"][3]));
returnValue.append( "%5.0f mW %5.0f mW %5.0f mW %5.0f mW %5.0f mW %5.0f mW" % (results["voltage"][0]*results["current"][0],results["voltage"][1]*results["current"][1],results["voltage"][2]*results["current"][2],results["voltage"][4]*results["current"][4],results["voltage"][5]*results["current"][5],results["voltage"][3]*results["current"][3]));
return returnValue;
def printResults(self, results):
text = self.formatPrintData(results)
print;
for index in xrange(len(text)):
print(text[index]);
def recordData(self,data):
rollOver = self.m_SolarDb.addEntry(self.m_Timestamper.getDate(), self.m_Timestamper.getTime(), data );
return rollOver
def getEmptyStatsDB(self):
results = []
for channelIndex in xrange(6):
tempVal = {}
tempVal["minEnergy"] = 0
tempVal["maxEnergy"] = 0
tempVal["cumulativeEnergy"] = 0
results.append(tempVal);
return results
def computeNetPower(self, data, prevPwr=None):
if prevPwr == None:
results = self.getEmptyStatsDB()
else:
results = prevPwr
for channelIndex in xrange(6):
for index in xrange( len(data[channelIndex]["voltage"])-1 ):
timeDelta = self.convertTimeString( data[channelIndex]["time"][index+1]) - self.convertTimeString(data[channelIndex]["time"][index])
if (timeDelta <= 12 ):
power=data[channelIndex]["current"][index]
energy = power*timeDelta
results[channelIndex]["cumulativeEnergy"] = results[channelIndex]["cumulativeEnergy"] + energy
if results[channelIndex]["cumulativeEnergy"] < results[channelIndex]["minEnergy"]:
results[channelIndex]["minEnergy"] = results[channelIndex]["cumulativeEnergy"];
elif results[channelIndex]["cumulativeEnergy"] > results[channelIndex]["maxEnergy"]:
results[channelIndex]["maxEnergy"] = results[channelIndex]["cumulativeEnergy"]
for channelIndex in xrange(6):
print("minEnergy=%.1f mAHr maxEnergy=%.1f mAHr cumulative=%.1f mAHr" % ( results[channelIndex]["minEnergy"]/3600.0, results[channelIndex]["maxEnergy"]/3600.0, results[channelIndex]["cumulativeEnergy"]/3600.0))
print
return results
def convertTimeString(self, time):
timeSec = 0;
timeSec = timeSec + int(time[0:2])*60*60
timeSec = timeSec + int(time[3:5])*60
timeSec = timeSec + int(time[6:8])
return timeSec
class TimestamperInterface:
def getDate(self):
pass;
def getTime(self):
pass
class Timestamper(TimestamperInterface):
def getDate(self):
return (time.strftime("%Y_%m_%d"))
def getTime(self):
return (time.strftime("%H:%M:%S"))
class Application():
def __init__(self, master=None):
self.plotData = None;
self.leftPad = 40
self.topPad = 10
self.bottomPad = 30
self.rightPad = 10
self.currentParm = -1;
self.currentFileIndex = 0;
self.firstPoint = 0
self.lastPoint = 0;
self.currentBatPwr = 0
self.currentPanelPwr = 0
self.currentLoadPwr = 0
self.currentBatPwrList = []
for index in xrange(4):
self.currentBatPwrList.append(0)
self.plotheight = 1;
self.plotwidth = 1;
self.todayStats = None
self.batmap = [1,2,4,5]
def setSolar(self, solar):
self.mySolar = solar
(plotData, filename) = self.mySolar.m_SolarDb.readDayLog(self.currentFileIndex);
self.todayStats = self.mySolar.computeNetPower(plotData)
self.prevStats = None
for index in xrange(1,-1,-1):
(plotData, filename) = self.mySolar.m_SolarDb.readDayLog(self.currentFileIndex+index);
print("processing %s" % filename)
self.prevStats = self.mySolar.computeNetPower(plotData, prevPwr=self.prevStats)
def accumulateEnergy(self, solarData):
powerInts = []
for index in xrange(6):
value = int(solarData["current"][index])
powerInts.append(value)
self.currentBatPwr = 0;
for index in xrange(4):
self.currentBatPwrList[index] = powerInts[self.batmap[index]]
self.currentBatPwr = self.currentBatPwr + self.currentBatPwrList[index]
panelPwr = powerInts[0]
loadPwr = powerInts[3]
self.currentPanelPwr = int( panelPwr )
self.currentLoadPwr = int( loadPwr )
for index in xrange(6):
self.todayStats[index]["cumulativeEnergy"] = self.todayStats[index]["cumulativeEnergy"] + powerInts[index]
self.prevStats[index]["cumulativeEnergy"] = self.prevStats[index]["cumulativeEnergy"] + powerInts[index]
if self.prevStats[index]["cumulativeEnergy"] < self.prevStats[index]["minEnergy"]:
self.prevStats[index]["minEnergy"] = self.prevStats[index]["cumulativeEnergy"];
elif self.prevStats[index]["cumulativeEnergy"] > self.prevStats[index]["maxEnergy"]:
self.prevStats[index]["maxEnergy"] = self.prevStats[index]["cumulativeEnergy"]
def periodicEventHandler(self):
data = self.mySolar.gatherData();
self.accumulateEnergy(data);
rollOver = self.mySolar.recordData(data);
if rollOver:
self.todayStats = self.mySolar.getEmptyStatsDB()
self.mySolar.printResults(data)
self.mySolarServer.sendUpdate(data, self)
def main(config):
mySolarMonitor = SolarMonitor(config)
mySolarMonitor.run()
if __name__ == "__main__":
fp = open("config.json", "r")
config_string = fp.read()
fp.close()
config = json.loads(config_string)
length = len(config)
for index in range(length-1, -1, -1):
print('index=%d' % (index))
if 'enable' in config[index]:
if config[index]['enable'] != 1:
dropped_entry = config.pop(index)
print('dropping disabled entry from config')
print(dropped_entry)
main(config)
| false | true |
f720c25b3abb18927b7fd60019577787312ad4c2 | 3,406 | py | Python | backend/remap/predictors.py | hugocalcad/remap_rev | fa435784f897b7f4186b8ff703b3e08f48160b9f | [
"Apache-2.0"
] | 17 | 2018-08-30T22:46:47.000Z | 2021-12-23T08:19:50.000Z | backend/remap/predictors.py | red-list-ecosystem/REMAP | e1e60c56dad76dc1927af5f24a30cb28144a91c8 | [
"Apache-2.0"
] | 3 | 2019-11-01T13:58:19.000Z | 2021-03-11T10:21:51.000Z | backend/remap/predictors.py | hugocalcad/remap_rev | fa435784f897b7f4186b8ff703b3e08f48160b9f | [
"Apache-2.0"
] | 2 | 2017-11-29T02:40:03.000Z | 2017-12-20T22:00:37.000Z | predictors = [
{
"description": "todo",
"long_name": "Normalised Difference Vegetation index",
"short_name": "NDVI",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": True,
"ramp": '000000, 00FF00'
},
{
"description": "todo",
"long_name": "Normalised Difference Water index",
"short_name": "NDWI",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": True,
"ramp": '070467, 17ffed'
},
{
"description": "todo",
"long_name": "Water Band Index",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"short_name": "WBI",
"vis": False
},
{
"description": "todo",
"long_name": "Blue band minus Red band",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"short_name": "BR",
"vis": False
},
{
"description": "todo",
"long_name": "Normalised Difference Blue Green",
"short_name": "BG",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": " Blue band",
"short_name": "Blue",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": "Green band",
"short_name": "Green",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": "Red band",
"short_name": "Red",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": "Near Infrared band",
"short_name": "NIR",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": True,
"ramp": '000000,ffffff',
},
{
"description": "todo",
"type": "Elevation",
"long_name": "SRTM Digital Elevation Data 30m",
"short_name": "Elevation",
"ee_import": 'USGS/SRTMGL1_003',
"checked": True,
"vis": True,
"ramp": "00a0b0,edc951,ed6841,cc2a36,4f372d"
},
{
"description": "todo",
"type": "Elevation",
"long_name": "SRTM Slope",
"short_name": "Slope",
"ee_import": 'USGS/SRTMGL1_003',
"checked": True,
"vis": True,
"ramp": "edc951,ed6841,cc2a36,4f372d,00a0b0"
},
{
"description": "todo",
"type": "BIOCLIM",
"long_name": "Mean Annual Temperature",
"ee_import": 'WORLDCLIM/V1/BIO',
"short_name": "Mean Annual Temperature",
"vis": True,
"ramp": "39018a,0090fe,98ff77,ffff0b,fa0100,590000"
},
{
"description": "todo",
"long_name": "Annual Precipitation",
"type": "BIOCLIM",
"ee_import": 'WORLDCLIM/V1/BIO',
"short_name": "Annual Precipitation",
"vis": True,
"ramp": 'ffffff,c7d6f7,00057a'
}
]
predictor_dict = {}
# build a dict for vis lookup later
for p in predictors:
predictor_dict[p['short_name']] = p
| 26 | 62 | 0.491486 | predictors = [
{
"description": "todo",
"long_name": "Normalised Difference Vegetation index",
"short_name": "NDVI",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": True,
"ramp": '000000, 00FF00'
},
{
"description": "todo",
"long_name": "Normalised Difference Water index",
"short_name": "NDWI",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": True,
"ramp": '070467, 17ffed'
},
{
"description": "todo",
"long_name": "Water Band Index",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"short_name": "WBI",
"vis": False
},
{
"description": "todo",
"long_name": "Blue band minus Red band",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"short_name": "BR",
"vis": False
},
{
"description": "todo",
"long_name": "Normalised Difference Blue Green",
"short_name": "BG",
"type": "Index",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": " Blue band",
"short_name": "Blue",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": "Green band",
"short_name": "Green",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": "Red band",
"short_name": "Red",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": False
},
{
"description": "todo",
"long_name": "Near Infrared band",
"short_name": "NIR",
"type": "Band Value",
"ee_import": 'LANDSAT/LC8_SR',
"checked": True,
"vis": True,
"ramp": '000000,ffffff',
},
{
"description": "todo",
"type": "Elevation",
"long_name": "SRTM Digital Elevation Data 30m",
"short_name": "Elevation",
"ee_import": 'USGS/SRTMGL1_003',
"checked": True,
"vis": True,
"ramp": "00a0b0,edc951,ed6841,cc2a36,4f372d"
},
{
"description": "todo",
"type": "Elevation",
"long_name": "SRTM Slope",
"short_name": "Slope",
"ee_import": 'USGS/SRTMGL1_003',
"checked": True,
"vis": True,
"ramp": "edc951,ed6841,cc2a36,4f372d,00a0b0"
},
{
"description": "todo",
"type": "BIOCLIM",
"long_name": "Mean Annual Temperature",
"ee_import": 'WORLDCLIM/V1/BIO',
"short_name": "Mean Annual Temperature",
"vis": True,
"ramp": "39018a,0090fe,98ff77,ffff0b,fa0100,590000"
},
{
"description": "todo",
"long_name": "Annual Precipitation",
"type": "BIOCLIM",
"ee_import": 'WORLDCLIM/V1/BIO',
"short_name": "Annual Precipitation",
"vis": True,
"ramp": 'ffffff,c7d6f7,00057a'
}
]
predictor_dict = {}
for p in predictors:
predictor_dict[p['short_name']] = p
| true | true |
f720c41c798b6b469d19eb94e5aad777e60b831a | 2,865 | py | Python | getListOfEvents.py | chiara-rizzi/Optimization | 6dd5bfcfc74d3cf7e90e313f107a4b1c414a6219 | [
"MIT"
] | 3 | 2017-03-25T00:38:14.000Z | 2018-03-13T15:05:38.000Z | getListOfEvents.py | chiara-rizzi/Optimization | 6dd5bfcfc74d3cf7e90e313f107a4b1c414a6219 | [
"MIT"
] | 21 | 2017-01-13T03:29:52.000Z | 2019-09-10T01:27:17.000Z | getListOfEvents.py | chiara-rizzi/Optimization | 6dd5bfcfc74d3cf7e90e313f107a4b1c414a6219 | [
"MIT"
] | 7 | 2017-03-25T00:38:00.000Z | 2021-04-07T04:31:25.000Z | from optimize import logger, get_ttree, selection_to_branches, tree_get_branches, cuts_to_selection
import json
import root_numpy as rnp
import glob
import itertools
import numexpr as ne
import numpy as np
import os
from collections import defaultdict
skipRegions = ["old", "SR", "VR0"]
regions = sorted([region for region in glob.glob('supercuts/*-*.json') if all([skipRegion not in region for skipRegion in skipRegions])], key=lambda x: int(x.split('.')[0].split('-')[1]))
eventNumbers = defaultdict(list)
tree_name = 'oTree'
eventWeightBranch = 'event_number'
files = glob.glob("TA02_MBJ13V4-6/ttbarExc_0L/fetch/data-optimizationTree/*407012*.root")
for region in regions:
supercuts = json.load(file(region))
tree = get_ttree(tree_name, files, eventWeightBranch)
branchesSpecified = list(set(itertools.chain.from_iterable(selection_to_branches(supercut['selections']) for supercut in supercuts)))
eventWeightBranchesSpecified = list(set(selection_to_branches(eventWeightBranch)))
# get actual list of branches in the file
availableBranches = tree_get_branches(tree, eventWeightBranchesSpecified)
# remove anything that doesn't exist
branchesToUse = [branch for branch in branchesSpecified if branch in availableBranches]
branchesSkipped = list(set(branchesSpecified) - set(branchesToUse))
if branchesSkipped:
logger.info("The following branches have been skipped...")
for branch in branchesSkipped:
logger.info("\t{0:s}".format(branch))
tree = rnp.tree2array(tree, branches=eventWeightBranchesSpecified+branchesToUse)
entireSelection = '{0:s}*{1:s}'.format(eventWeightBranch, cuts_to_selection(supercuts))
result = ne.evaluate(entireSelection, local_dict = tree)
for event_number in result[np.where(result!=0)]:
eventNumbers[event_number].append(region)
# print "\t", event_number
overlapsByColumn = [0]*len(regions)
atLeastOneOverlap = 0
print "{0:s}\t\t{1:s}\t| {2:s}".format("Event #", "\t".join(map(lambda x: os.path.basename(x).split('.')[0], regions)), "# Overlaps")
print "-"*80
for event_number, in_regions in eventNumbers.iteritems():
overlaps = [bool(region in in_regions) for region in regions]
numOverlapsInRow = 0
for i in range(0, len(overlaps), 2):
numOverlapsInRow += overlaps[i]&overlaps[i+1]
overlapsByColumn[i] += overlaps[i]&overlaps[i+1]
print "{0:d}\t\t{1:s}\t| {2:>10d}".format(event_number, "\t".join(("x" if overlap else "") for overlap in overlaps), numOverlapsInRow)
if numOverlapsInRow > 0:
atLeastOneOverlap += 1
print "-"*80
for i in range(0, len(overlaps), 2):
overlapsByColumn[i+1] = round(float(overlapsByColumn[i])/len(eventNumbers), 2)
print "{0:s}\t{1:s}\t| {2:>10d}".format("{0:d} evts".format(len(eventNumbers)), "\t".join(map(str, overlapsByColumn)), atLeastOneOverlap)
| 42.761194 | 187 | 0.722164 | from optimize import logger, get_ttree, selection_to_branches, tree_get_branches, cuts_to_selection
import json
import root_numpy as rnp
import glob
import itertools
import numexpr as ne
import numpy as np
import os
from collections import defaultdict
skipRegions = ["old", "SR", "VR0"]
regions = sorted([region for region in glob.glob('supercuts/*-*.json') if all([skipRegion not in region for skipRegion in skipRegions])], key=lambda x: int(x.split('.')[0].split('-')[1]))
eventNumbers = defaultdict(list)
tree_name = 'oTree'
eventWeightBranch = 'event_number'
files = glob.glob("TA02_MBJ13V4-6/ttbarExc_0L/fetch/data-optimizationTree/*407012*.root")
for region in regions:
supercuts = json.load(file(region))
tree = get_ttree(tree_name, files, eventWeightBranch)
branchesSpecified = list(set(itertools.chain.from_iterable(selection_to_branches(supercut['selections']) for supercut in supercuts)))
eventWeightBranchesSpecified = list(set(selection_to_branches(eventWeightBranch)))
availableBranches = tree_get_branches(tree, eventWeightBranchesSpecified)
branchesToUse = [branch for branch in branchesSpecified if branch in availableBranches]
branchesSkipped = list(set(branchesSpecified) - set(branchesToUse))
if branchesSkipped:
logger.info("The following branches have been skipped...")
for branch in branchesSkipped:
logger.info("\t{0:s}".format(branch))
tree = rnp.tree2array(tree, branches=eventWeightBranchesSpecified+branchesToUse)
entireSelection = '{0:s}*{1:s}'.format(eventWeightBranch, cuts_to_selection(supercuts))
result = ne.evaluate(entireSelection, local_dict = tree)
for event_number in result[np.where(result!=0)]:
eventNumbers[event_number].append(region)
# print "\t", event_number
overlapsByColumn = [0]*len(regions)
atLeastOneOverlap = 0
print "{0:s}\t\t{1:s}\t| {2:s}".format("Event #", "\t".join(map(lambda x: os.path.basename(x).split('.')[0], regions)), "# Overlaps")
print "-"*80
for event_number, in_regions in eventNumbers.iteritems():
overlaps = [bool(region in in_regions) for region in regions]
numOverlapsInRow = 0
for i in range(0, len(overlaps), 2):
numOverlapsInRow += overlaps[i]&overlaps[i+1]
overlapsByColumn[i] += overlaps[i]&overlaps[i+1]
print "{0:d}\t\t{1:s}\t| {2:>10d}".format(event_number, "\t".join(("x" if overlap else "") for overlap in overlaps), numOverlapsInRow)
if numOverlapsInRow > 0:
atLeastOneOverlap += 1
print "-"*80
for i in range(0, len(overlaps), 2):
overlapsByColumn[i+1] = round(float(overlapsByColumn[i])/len(eventNumbers), 2)
print "{0:s}\t{1:s}\t| {2:>10d}".format("{0:d} evts".format(len(eventNumbers)), "\t".join(map(str, overlapsByColumn)), atLeastOneOverlap)
| false | true |
f720c4b0ba8a3112b5e4c2e356fdfa9e370b254c | 11,793 | py | Python | test/unit/mongo_class/server_connect.py | mjpernot/mongo-lib | be8aa4f0cbf7fdf475bf67c07df813ffc560c3ef | [
"MIT"
] | null | null | null | test/unit/mongo_class/server_connect.py | mjpernot/mongo-lib | be8aa4f0cbf7fdf475bf67c07df813ffc560c3ef | [
"MIT"
] | null | null | null | test/unit/mongo_class/server_connect.py | mjpernot/mongo-lib | be8aa4f0cbf7fdf475bf67c07df813ffc560c3ef | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Classification (U)
"""Program: server_connect.py
Description: Unit testing of Server.connect in mongo_class.py.
Usage:
test/unit/mongo_class/server_connect.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import mongo_class
import version
__version__ = version.__version__
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_auth_mech3
test_auth_mech2
test_auth_mech
test_conn_false2
test_conn_false
test_conn_true2
test_conn_true
test_fail_get_srv_attr2
test_fail_get_srv_attr
test_auth_arg4
test_auth_arg3
test_auth_arg2
test_auth_arg
test_no_auth2
test_no_auth
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.name = "Mongo_Server"
self.user = "mongo_user"
self.japd = "mongo_pd"
self.host = "host_server"
self.port = 27017
self.dbs = "test"
self.coll = None
self.db_auth = None
self.conf_file = "Conf_File"
self.errmsg = "Error Message"
self.auth_mech = "SCRAM-SHA-1"
self.auth_mech2 = "MONGODB-CR"
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_mech3(self, mock_cmd, mock_client):
"""Function: test_auth_mech3
Description: Test with auth_mech set to SCRAM-SHA-1.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True, auth_mech=self.auth_mech)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.auth_mech),
(self.name, self.user, self.japd, self.host, self.port,
self.auth_mech))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_mech2(self, mock_cmd, mock_client):
"""Function: test_auth_mech2
Description: Test with auth_mech set to MONGODB-CR.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True, auth_mech=self.auth_mech2)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.auth_mech),
(self.name, self.user, self.japd, self.host, self.port,
self.auth_mech2))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_mech(self, mock_cmd, mock_client):
"""Function: test_auth_mech
Description: Test with auth_mech default.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.auth_mech),
(self.name, self.user, self.japd, self.host, self.port,
self.auth_mech))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_false2(self, mock_cmd, mock_client):
"""Function: test_conn_false2
Description: Test with conn set to False.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_false(self, mock_cmd, mock_client):
"""Function: test_conn_false
Description: Test with conn set to False.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = False
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_true2(self, mock_cmd, mock_client):
"""Function: test_conn_true2
Description: Test with conn set to True.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = True
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_true(self, mock_cmd, mock_client):
"""Function: test_conn_true
Description: Test with conn set to True.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = True
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_fail_get_srv_attr2(self, mock_cmd, mock_client):
"""Function: test_fail_get_srv_attr2
Description: Test with failed get_srv_attr call.
Arguments:
"""
mock_cmd.return_value = (False, self.errmsg)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_fail_get_srv_attr(self, mock_cmd, mock_client):
"""Function: test_fail_get_srv_attr
Description: Test with failed get_srv_attr call.
Arguments:
"""
mock_cmd.return_value = (False, self.errmsg)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
self.assertEqual(mongo.connect(), (False, self.errmsg))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg4(self, mock_cmd, mock_client):
"""Function: test_auth_arg4
Description: Test with arg present and no auth.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=False)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg3(self, mock_cmd, mock_client):
"""Function: test_auth_arg3
Description: Test with arg present and no auth.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=False)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg2(self, mock_cmd, mock_client):
"""Function: test_auth_arg2
Description: Test with auth and arg present.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg(self, mock_cmd, mock_client):
"""Function: test_auth_arg
Description: Test with auth and arg present.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_auth2(self, mock_cmd, mock_client):
"""Function: test_no_auth2
Description: Test with no auth present.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(self.name, self.user, self.japd,
host=self.host, port=self.port, auth=False)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_auth(self, mock_cmd, mock_client):
"""Function: test_no_auth
Description: Test with no auth present.
Arguments:
"""
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(self.name, self.user, self.japd,
host=self.host, port=self.port, auth=False)
self.assertEqual(mongo.connect(), (True, None))
if __name__ == "__main__":
unittest.main()
| 27.425581 | 78 | 0.620114 |
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import mock
sys.path.append(os.getcwd())
import mongo_class
import version
__version__ = version.__version__
class UnitTest(unittest.TestCase):
def setUp(self):
self.name = "Mongo_Server"
self.user = "mongo_user"
self.japd = "mongo_pd"
self.host = "host_server"
self.port = 27017
self.dbs = "test"
self.coll = None
self.db_auth = None
self.conf_file = "Conf_File"
self.errmsg = "Error Message"
self.auth_mech = "SCRAM-SHA-1"
self.auth_mech2 = "MONGODB-CR"
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_mech3(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True, auth_mech=self.auth_mech)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.auth_mech),
(self.name, self.user, self.japd, self.host, self.port,
self.auth_mech))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_mech2(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True, auth_mech=self.auth_mech2)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.auth_mech),
(self.name, self.user, self.japd, self.host, self.port,
self.auth_mech2))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_mech(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port,
mongo.auth_mech),
(self.name, self.user, self.japd, self.host, self.port,
self.auth_mech))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_false2(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = False
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_false(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = False
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_true2(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = True
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_conn_true(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=True, use_arg=True)
mongo.conn = True
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_fail_get_srv_attr2(self, mock_cmd, mock_client):
mock_cmd.return_value = (False, self.errmsg)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_fail_get_srv_attr(self, mock_cmd, mock_client):
mock_cmd.return_value = (False, self.errmsg)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
self.assertEqual(mongo.connect(), (False, self.errmsg))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg4(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=False)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg3(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port,
auth=False)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg2(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
self.assertEqual(mongo.connect(), (True, None))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_auth_arg(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(
self.name, self.user, self.japd, host=self.host, port=self.port)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_auth2(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(self.name, self.user, self.japd,
host=self.host, port=self.port, auth=False)
mongo.connect()
self.assertEqual(
(mongo.name, mongo.user, mongo.japd, mongo.host, mongo.port),
(self.name, self.user, self.japd, self.host, self.port))
@mock.patch("mongo_class.pymongo.MongoClient")
@mock.patch("mongo_class.Server.get_srv_attr")
def test_no_auth(self, mock_cmd, mock_client):
mock_cmd.return_value = (True, None)
mock_client.return_value = True
mongo = mongo_class.Server(self.name, self.user, self.japd,
host=self.host, port=self.port, auth=False)
self.assertEqual(mongo.connect(), (True, None))
if __name__ == "__main__":
unittest.main()
| true | true |
f720c55c567a173b520fcbc4127c246b39b6746f | 8,696 | py | Python | tests/python/contrib/test_ethosn/test_networks.py | BaldLee/tvm | b53472c7b6afa34260afeffc5f088591352c58c3 | [
"Apache-2.0"
] | 10 | 2019-03-09T07:51:56.000Z | 2021-09-14T03:06:20.000Z | tests/python/contrib/test_ethosn/test_networks.py | BaldLee/tvm | b53472c7b6afa34260afeffc5f088591352c58c3 | [
"Apache-2.0"
] | 9 | 2021-10-20T13:48:52.000Z | 2021-12-09T07:14:24.000Z | tests/python/contrib/test_ethosn/test_networks.py | BaldLee/tvm | b53472c7b6afa34260afeffc5f088591352c58c3 | [
"Apache-2.0"
] | 5 | 2020-11-13T19:26:25.000Z | 2022-01-25T07:55:16.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Ethos-N integration end-to-end network tests"""
import pytest
pytest.importorskip("tflite")
pytest.importorskip("tensorflow")
from tvm import relay
from tvm.testing import requires_ethosn
from tvm.contrib import download
from tvm.testing import requires_ethosn
import tvm.relay.testing.tf as tf_testing
import tflite.Model
from . import infrastructure as tei
def _get_tflite_model(tflite_model_path, inputs_dict, dtype):
with open(tflite_model_path, "rb") as f:
tflite_model_buffer = f.read()
try:
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buffer, 0)
except AttributeError:
tflite_model = tflite.Model.GetRootAsModel(tflite_model_buffer, 0)
shape_dict = {}
dtype_dict = {}
for input in inputs_dict:
input_shape = inputs_dict[input]
shape_dict[input] = input_shape
dtype_dict[input] = dtype
return relay.frontend.from_tflite(
tflite_model,
shape_dict=shape_dict,
dtype_dict=dtype_dict,
)
def _test_image_network(
model_url,
model_sub_path,
input_dict,
compile_hash,
output_count,
host_ops=0,
npu_partitions=1,
run=False,
):
"""Test an image network.
Parameters
----------
model_url : str
The URL to the model.
model_sub_path : str
The name of the model file.
input_dict : dict
The input dict.
compile_hash : str, set
The compile hash(es) to check the compilation output against.
output_count : int
The expected number of outputs.
host_ops : int
The expected number of host operators.
npu_partitions : int
The expected number of Ethos-N partitions.
run : bool
Whether or not to try running the network. If hardware isn't
available, the run will still take place but with a mocked
inference function, so the results will be incorrect. This is
therefore just to test the runtime flow is working rather than
to check the correctness/accuracy.
"""
def get_model():
if model_url[-3:] in ("tgz", "zip"):
model_path = tf_testing.get_workload_official(
model_url,
model_sub_path,
)
else:
model_path = download.download_testdata(
model_url,
model_sub_path,
)
return _get_tflite_model(model_path, input_dict, "uint8")
inputs = {}
for input_name in input_dict:
input_shape = input_dict[input_name]
inputs[input_name] = tei.get_real_image(input_shape[1], input_shape[2])
mod, params = get_model()
m = tei.build(mod, params, npu=True, expected_host_ops=host_ops, npu_partitions=npu_partitions)
tei.assert_lib_hash(m.get_lib(), compile_hash)
if run:
tei.run(m, inputs, output_count, npu=True)
@requires_ethosn
def test_mobilenet_v1():
# If this test is failing due to a hash mismatch, please notify @mbaret and
# @Leo-arm. The hash is there to catch any changes in the behaviour of the
# codegen, which could come about from either a change in Support Library
# version or a change in the Ethos-N codegen. To update this requires running
# on hardware that isn't available in CI.
_compile_hash = {"1fd4ef29a1ea9f3a015cab87c0b8014a"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"b879dfbff1f907eaf6129dfd41b44ece"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"9c9f63b30824f5b223cdb27d2f22c857"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"cd13279061df2319124a7aac81581d81"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz",
model_sub_path="mobilenet_v1_1.0_224_quant.tflite",
input_dict={"input": (1, 224, 224, 3)},
compile_hash=_compile_hash,
output_count=1,
host_ops=3,
npu_partitions=1,
run=True,
)
@requires_ethosn
def test_inception_v3():
# If this test is failing due to a hash mismatch, please notify @mbaret and
# @Leo-arm. The hash is there to catch any changes in the behaviour of the
# codegen, which could come about from either a change in Support Library
# version or a change in the Ethos-N codegen. To update this requires running
# on hardware that isn't available in CI.
_compile_hash = {"b90ed315639c6a0e97584c2dbc42a55c"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"5693569055695e581a8739194d0301aa"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"46ccafc840633633aca441645e41b444"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"4a33f397ac3e15c0f9869f7b8286fc2f"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/tflite_11_05_08/inception_v3_quant.tgz",
model_sub_path="inception_v3_quant.tflite",
input_dict={"input": (1, 299, 299, 3)},
compile_hash=_compile_hash,
output_count=1,
host_ops=0,
npu_partitions=1,
)
@requires_ethosn
def test_inception_v4():
# If this test is failing due to a hash mismatch, please notify @mbaret and
# @Leo-arm. The hash is there to catch any changes in the behaviour of the
# codegen, which could come about from either a change in Support Library
# version or a change in the Ethos-N codegen. To update this requires running
# on hardware that isn't available in CI.
_compile_hash = {"b36877d2386d9f9c37a11772e3c4072c"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"b5046a6f56d78af0b4f51960bf2deeda"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"4a1a56393078367dd27915a188d6a6af"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"905caf389dd6b868aeff6acbca1fecef"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/inception_v4_299_quant_20181026.tgz",
model_sub_path="inception_v4_299_quant.tflite",
input_dict={"input": (1, 299, 299, 3)},
compile_hash=_compile_hash,
output_count=1,
host_ops=3,
npu_partitions=1,
)
@requires_ethosn
def test_ssd_mobilenet_v1():
# If this test is failing due to a hash mismatch, please notify @mbaret and
# @Leo-arm. The hash is there to catch any changes in the behaviour of the
# codegen, which could come about from either a change in Support Library
# version or a change in the Ethos-N codegen. To update this requires running
# on hardware that isn't available in CI.
_compile_hash = {"956caf9e7fe5cfd5c042bd17857f7407", "4313033d14328e2aa022b1bd71b27b1c"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"dc60cc687d892cd2877873094e9dfc0b", "6b3deeec16c24c0dcef23df0db5fb162"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"10826406ae724e52f360a06c35ced09d", "9a484d5ecec7acb18c9d6bc6058be031"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"425b38830f34b6eb448fa77dbfe9ac96", "de49128643cbf1c659a9a63aad1cba62"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip",
model_sub_path="detect.tflite",
input_dict={"normalized_input_image_tensor": (1, 300, 300, 3)},
compile_hash=_compile_hash,
output_count=4,
host_ops=28,
npu_partitions=2,
)
| 39.171171 | 100 | 0.700092 |
import pytest
pytest.importorskip("tflite")
pytest.importorskip("tensorflow")
from tvm import relay
from tvm.testing import requires_ethosn
from tvm.contrib import download
from tvm.testing import requires_ethosn
import tvm.relay.testing.tf as tf_testing
import tflite.Model
from . import infrastructure as tei
def _get_tflite_model(tflite_model_path, inputs_dict, dtype):
with open(tflite_model_path, "rb") as f:
tflite_model_buffer = f.read()
try:
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buffer, 0)
except AttributeError:
tflite_model = tflite.Model.GetRootAsModel(tflite_model_buffer, 0)
shape_dict = {}
dtype_dict = {}
for input in inputs_dict:
input_shape = inputs_dict[input]
shape_dict[input] = input_shape
dtype_dict[input] = dtype
return relay.frontend.from_tflite(
tflite_model,
shape_dict=shape_dict,
dtype_dict=dtype_dict,
)
def _test_image_network(
model_url,
model_sub_path,
input_dict,
compile_hash,
output_count,
host_ops=0,
npu_partitions=1,
run=False,
):
def get_model():
if model_url[-3:] in ("tgz", "zip"):
model_path = tf_testing.get_workload_official(
model_url,
model_sub_path,
)
else:
model_path = download.download_testdata(
model_url,
model_sub_path,
)
return _get_tflite_model(model_path, input_dict, "uint8")
inputs = {}
for input_name in input_dict:
input_shape = input_dict[input_name]
inputs[input_name] = tei.get_real_image(input_shape[1], input_shape[2])
mod, params = get_model()
m = tei.build(mod, params, npu=True, expected_host_ops=host_ops, npu_partitions=npu_partitions)
tei.assert_lib_hash(m.get_lib(), compile_hash)
if run:
tei.run(m, inputs, output_count, npu=True)
@requires_ethosn
def test_mobilenet_v1():
_compile_hash = {"1fd4ef29a1ea9f3a015cab87c0b8014a"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"b879dfbff1f907eaf6129dfd41b44ece"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"9c9f63b30824f5b223cdb27d2f22c857"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"cd13279061df2319124a7aac81581d81"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz",
model_sub_path="mobilenet_v1_1.0_224_quant.tflite",
input_dict={"input": (1, 224, 224, 3)},
compile_hash=_compile_hash,
output_count=1,
host_ops=3,
npu_partitions=1,
run=True,
)
@requires_ethosn
def test_inception_v3():
# If this test is failing due to a hash mismatch, please notify @mbaret and
# @Leo-arm. The hash is there to catch any changes in the behaviour of the
# codegen, which could come about from either a change in Support Library
# version or a change in the Ethos-N codegen. To update this requires running
# on hardware that isn't available in CI.
_compile_hash = {"b90ed315639c6a0e97584c2dbc42a55c"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"5693569055695e581a8739194d0301aa"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"46ccafc840633633aca441645e41b444"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"4a33f397ac3e15c0f9869f7b8286fc2f"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/tflite_11_05_08/inception_v3_quant.tgz",
model_sub_path="inception_v3_quant.tflite",
input_dict={"input": (1, 299, 299, 3)},
compile_hash=_compile_hash,
output_count=1,
host_ops=0,
npu_partitions=1,
)
@requires_ethosn
def test_inception_v4():
_compile_hash = {"b36877d2386d9f9c37a11772e3c4072c"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"b5046a6f56d78af0b4f51960bf2deeda"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"4a1a56393078367dd27915a188d6a6af"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"905caf389dd6b868aeff6acbca1fecef"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/inception_v4_299_quant_20181026.tgz",
model_sub_path="inception_v4_299_quant.tflite",
input_dict={"input": (1, 299, 299, 3)},
compile_hash=_compile_hash,
output_count=1,
host_ops=3,
npu_partitions=1,
)
@requires_ethosn
def test_ssd_mobilenet_v1():
# If this test is failing due to a hash mismatch, please notify @mbaret and
# @Leo-arm. The hash is there to catch any changes in the behaviour of the
# codegen, which could come about from either a change in Support Library
# version or a change in the Ethos-N codegen. To update this requires running
# on hardware that isn't available in CI.
_compile_hash = {"956caf9e7fe5cfd5c042bd17857f7407", "4313033d14328e2aa022b1bd71b27b1c"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"dc60cc687d892cd2877873094e9dfc0b", "6b3deeec16c24c0dcef23df0db5fb162"}
if tei.get_ethosn_api_version() == 2011:
_compile_hash = {"10826406ae724e52f360a06c35ced09d", "9a484d5ecec7acb18c9d6bc6058be031"}
if tei.get_ethosn_variant() == "Ethos-N78_1TOPS_2PLE_RATIO":
_compile_hash = {"425b38830f34b6eb448fa77dbfe9ac96", "de49128643cbf1c659a9a63aad1cba62"}
_test_image_network(
model_url="https://storage.googleapis.com/download.tensorflow.org/"
"models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip",
model_sub_path="detect.tflite",
input_dict={"normalized_input_image_tensor": (1, 300, 300, 3)},
compile_hash=_compile_hash,
output_count=4,
host_ops=28,
npu_partitions=2,
)
| true | true |
f720c5e752d2911c7077e24ef935f054b6818fb0 | 6,799 | py | Python | source/Mlos.Python/mlos/Optimizers/RegressionModels/SklearnRidgeRegressionModelConfig.py | kkanellis/MLOS | 791d670a4c44467b2b4c9633f8aa1bebab50771f | [
"MIT"
] | 81 | 2020-08-25T17:08:05.000Z | 2022-03-19T08:58:56.000Z | source/Mlos.Python/mlos/Optimizers/RegressionModels/SklearnRidgeRegressionModelConfig.py | grlap/MLOS | f828cf2b46ed63d7c9b3bd6cef73b2027a7ad12a | [
"MIT"
] | 173 | 2020-08-25T17:38:04.000Z | 2021-11-02T19:34:00.000Z | source/Mlos.Python/mlos/Optimizers/RegressionModels/SklearnRidgeRegressionModelConfig.py | grlap/MLOS | f828cf2b46ed63d7c9b3bd6cef73b2027a7ad12a | [
"MIT"
] | 38 | 2020-08-25T20:49:14.000Z | 2022-03-16T16:30:27.000Z | #
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
from enum import Enum
from mlos.Spaces import SimpleHypergrid, ContinuousDimension, DiscreteDimension, CategoricalDimension, Point
from mlos.Spaces.Configs.DefaultConfigMeta import DefaultConfigMeta
class SklearnRidgeRegressionModelConfig(metaclass=DefaultConfigMeta):
class Solver(Enum):
"""
From https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.Ridge.html:
Solver to use in the computational routines:
* ‘auto’ chooses the solver automatically based on the type of data.
* ‘svd’ uses a Singular Value Decomposition of X to compute the Ridge coefficients. More stable for
singular matrices than ‘cholesky’.
* ‘cholesky’ uses the standard scipy.linalg.solve function to obtain a closed-form solution.
* ‘sparse_cg’ uses the conjugate gradient solver as found in scipy.sparse.linalg.cg.
As an iterative algorithm, this solver is more appropriate than ‘cholesky’ for
large-scale data (possibility to set tol and max_iter).
* ‘lsqr’ uses the dedicated regularized least-squares routine scipy.sparse.linalg.lsqr.
It is the fastest and uses an iterative procedure.
* ‘sag’ uses a Stochastic Average Gradient descent, and ‘saga’ uses its improved,
unbiased version named SAGA. Both methods also use an iterative procedure, and are
often faster than other solvers when both n_samples and n_features are large.
Note that ‘sag’ and ‘saga’ fast convergence is only guaranteed on features with
approximately the same scale. You can preprocess the data with a scaler from sklearn.preprocessing.
All last five solvers support both dense and sparse data. However, only ‘sag’ and ‘sparse_cg’ supports
sparse input when fit_intercept is True.
"""
AUTO = 'auto' # default
SVD = 'svd'
CHOLESKY = 'cholesky'
LSQR = 'lsqr'
SPARSE_CG = 'sparse_cg'
SAG = 'sag'
SAGA = 'saga'
CONFIG_SPACE = SimpleHypergrid(
name="sklearn_ridge_regression_model_config",
dimensions=[
ContinuousDimension(name="alpha", min=0, max=2 ** 16),
CategoricalDimension(name="fit_intercept", values=[False, True]),
CategoricalDimension(name="normalize", values=[False, True]),
CategoricalDimension(name="copy_x", values=[False, True]),
DiscreteDimension(name="max_iter", min=0, max=10 ** 5),
ContinuousDimension(name="tol", min=0, max=2 ** 10),
CategoricalDimension(name="solver", values=[solver.value for solver in Solver]),
]
)
_DEFAULT = Point(
alpha=1.0,
fit_intercept=False,
normalize=False,
copy_x=True,
max_iter=1000,
tol=10 ** -4,
solver=Solver.AUTO.value
)
@classmethod
def contains(cls, config):
return Point(
alpha=config.alpha,
fit_intercept=config.fit_intercept,
normalize=config.normalize,
copy_x=config.copy_x,
max_iter=config.max_iter,
tol=config.tol,
random_state=config.random_state,
solver=config.solver
) in cls.CONFIG_SPACE
@classmethod
def create_from_config_point(cls, config_point):
assert cls.contains(config_point)
config_key_value_pairs = {param_name: value for param_name, value in config_point}
return cls(**config_key_value_pairs)
def __init__(
self,
alpha=_DEFAULT.alpha,
fit_intercept=_DEFAULT.fit_intercept,
normalize=_DEFAULT.normalize,
copy_x=_DEFAULT.copy_x,
max_iter=_DEFAULT.max_iter,
tol=_DEFAULT.tol,
random_state=None,
solver=_DEFAULT.solver
):
"""
Ridge parameters:
:param alpha:Regularization strength; must be a positive float. Defaults to 1.0.
:param fit_intercept: Whether to calculate the intercept for this model.
:param normalize: This parameter is ignored when ``fit_intercept`` is set to False.
If True, the regressors X will be normalized before regression by
subtracting the mean and dividing by the l2-norm.
:param copy_x: If ``True``, X will be copied; else, it may be overwritten.
:param max_iter: The maximum number of iterations
:param tol: The tolerance for the optimization: if the updates are
smaller than ``tol``, the optimization code checks the
dual gap for optimality and continues until it is smaller
than ``tol``.
:param solver: Solver to use in the computational routines:
- 'auto' chooses the solver automatically based on the type of data.
- 'svd' uses a Singular Value Decomposition of X to compute the Ridge
coefficients. More stable for singular matrices than 'cholesky'.
- 'cholesky' uses the standard scipy.linalg.solve function to
obtain a closed-form solution.
- 'sparse_cg' uses the conjugate gradient solver as found in
scipy.sparse.linalg.cg. As an iterative algorithm, this solver is
more appropriate than 'cholesky' for large-scale data
(possibility to set `tol` and `max_iter`).
- 'lsqr' uses the dedicated regularized least-squares routine
scipy.sparse.linalg.lsqr. It is the fastest and uses an iterative
procedure.
- 'sag' uses a Stochastic Average Gradient descent, and 'saga' uses
its improved, unbiased version named SAGA. Both methods also use an
iterative procedure, and are often faster than other solvers when
both n_samples and n_features are large. Note that 'sag' and
'saga' fast convergence is only guaranteed on features with
approximately the same scale. You can preprocess the data with a
scaler from sklearn.preprocessing.
:param random_state: The seed of the pseudo random number generator that selects a random
feature to update. Used when ``selection`` == 'random'.
"""
self.alpha = alpha
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_x = copy_x
self.max_iter = max_iter
self.tol = tol
self.random_state = random_state
self.solver = solver
| 49.268116 | 116 | 0.631563 |
from enum import Enum
from mlos.Spaces import SimpleHypergrid, ContinuousDimension, DiscreteDimension, CategoricalDimension, Point
from mlos.Spaces.Configs.DefaultConfigMeta import DefaultConfigMeta
class SklearnRidgeRegressionModelConfig(metaclass=DefaultConfigMeta):
class Solver(Enum):
AUTO = 'auto'
SVD = 'svd'
CHOLESKY = 'cholesky'
LSQR = 'lsqr'
SPARSE_CG = 'sparse_cg'
SAG = 'sag'
SAGA = 'saga'
CONFIG_SPACE = SimpleHypergrid(
name="sklearn_ridge_regression_model_config",
dimensions=[
ContinuousDimension(name="alpha", min=0, max=2 ** 16),
CategoricalDimension(name="fit_intercept", values=[False, True]),
CategoricalDimension(name="normalize", values=[False, True]),
CategoricalDimension(name="copy_x", values=[False, True]),
DiscreteDimension(name="max_iter", min=0, max=10 ** 5),
ContinuousDimension(name="tol", min=0, max=2 ** 10),
CategoricalDimension(name="solver", values=[solver.value for solver in Solver]),
]
)
_DEFAULT = Point(
alpha=1.0,
fit_intercept=False,
normalize=False,
copy_x=True,
max_iter=1000,
tol=10 ** -4,
solver=Solver.AUTO.value
)
@classmethod
def contains(cls, config):
return Point(
alpha=config.alpha,
fit_intercept=config.fit_intercept,
normalize=config.normalize,
copy_x=config.copy_x,
max_iter=config.max_iter,
tol=config.tol,
random_state=config.random_state,
solver=config.solver
) in cls.CONFIG_SPACE
@classmethod
def create_from_config_point(cls, config_point):
assert cls.contains(config_point)
config_key_value_pairs = {param_name: value for param_name, value in config_point}
return cls(**config_key_value_pairs)
def __init__(
self,
alpha=_DEFAULT.alpha,
fit_intercept=_DEFAULT.fit_intercept,
normalize=_DEFAULT.normalize,
copy_x=_DEFAULT.copy_x,
max_iter=_DEFAULT.max_iter,
tol=_DEFAULT.tol,
random_state=None,
solver=_DEFAULT.solver
):
self.alpha = alpha
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_x = copy_x
self.max_iter = max_iter
self.tol = tol
self.random_state = random_state
self.solver = solver
| true | true |
f720c7aa06d180672d2e8ae9ac3670dabcc51952 | 10,404 | py | Python | tensorflow_probability/substrates/meta/rewrite.py | varomodt/probability | d68de79e67c06ab46509744574a044ccb966c4d5 | [
"Apache-2.0"
] | 1 | 2020-01-16T02:19:34.000Z | 2020-01-16T02:19:34.000Z | tensorflow_probability/substrates/meta/rewrite.py | varomodt/probability | d68de79e67c06ab46509744574a044ccb966c4d5 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/substrates/meta/rewrite.py | varomodt/probability | d68de79e67c06ab46509744574a044ccb966c4d5 | [
"Apache-2.0"
] | 1 | 2020-10-19T11:24:40.000Z | 2020-10-19T11:24:40.000Z | # Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for TF->JAX."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
from absl import app
from absl import flags
flags.DEFINE_boolean('numpy_to_jax', False,
'Whether or not to rewrite numpy imports to jax.numpy')
flags.DEFINE_list('omit_deps', [], 'List of build deps being omitted.')
FLAGS = flags.FLAGS
TF_REPLACEMENTS = {
'import tensorflow ':
'from tensorflow_probability.python.internal.backend import numpy ',
'import tensorflow.compat.v1':
'from tensorflow_probability.python.internal.backend.numpy.compat '
'import v1',
'import tensorflow.compat.v2':
'from tensorflow_probability.python.internal.backend.numpy.compat '
'import v2',
'import tensorflow_probability as tfp':
'import tensorflow_probability as tfp; '
'tfp = tfp.substrates.numpy',
'from tensorflow.python.framework import tensor_shape':
('from tensorflow_probability.python.internal.backend.numpy.gen '
'import tensor_shape'),
'from tensorflow.python.framework import ops':
('from tensorflow_probability.python.internal.backend.numpy '
'import ops'),
'from tensorflow.python.framework import tensor_util':
('from tensorflow_probability.python.internal.backend.numpy '
'import ops'),
'from tensorflow.python.util import':
'from tensorflow_probability.python.internal.backend.numpy import',
'from tensorflow.python.util.all_util':
'from tensorflow_probability.python.internal.backend.numpy.private',
'from tensorflow.python.ops.linalg':
'from tensorflow_probability.python.internal.backend.numpy.gen',
'from tensorflow.python.ops import parallel_for':
'from tensorflow_probability.python.internal.backend.numpy '
'import functional_ops as parallel_for',
'from tensorflow.python.ops import control_flow_ops':
'from tensorflow_probability.python.internal.backend.numpy '
'import control_flow as control_flow_ops',
'from tensorflow.python.eager import context':
'from tensorflow_probability.python.internal.backend.numpy '
'import private',
('from tensorflow.python.client '
'import pywrap_tf_session as c_api'):
'pass',
('from tensorflow.python '
'import pywrap_tensorflow as c_api'):
'pass'
}
DISABLED_BY_PKG = {
'experimental':
('auto_batching', 'composite_tensor', 'linalg',
'marginalize', 'nn', 'sequential', 'substrates', 'vi'),
}
LIBS = ('bijectors', 'distributions', 'experimental', 'math', 'mcmc',
'optimizer', 'random', 'stats', 'util')
INTERNALS = ('assert_util', 'batched_rejection_sampler', 'broadcast_util',
'cache_util', 'callable_util',
'custom_gradient', 'distribution_util', 'dtype_util',
'hypothesis_testlib', 'implementation_selection', 'monte_carlo',
'name_util', 'nest_util', 'numerics_testing',
'parameter_properties', 'prefer_static', 'samplers',
'special_math', 'structural_tuple', 'tensor_util',
'tensorshape_util', 'test_combinations', 'test_util', 'unnest',
'variadic_reduce', 'vectorization_util')
OPTIMIZERS = ('linesearch',)
LINESEARCH = ('internal',)
SAMPLERS = ('categorical', 'normal', 'poisson', 'uniform', 'shuffle')
PRIVATE_TF_PKGS = ('array_ops', 'control_flow_util', 'gradient_checker_v2',
'numpy_text', 'random_ops')
def main(argv):
disabled_by_pkg = dict(DISABLED_BY_PKG)
for dep in FLAGS.omit_deps:
pkg = dep.split('/python/')[1].split(':')[0].replace('/', '.')
lib = dep.split(':')[1]
if pkg.endswith('.{}'.format(lib)):
pkg = pkg.replace('.{}'.format(lib), '')
disabled_by_pkg.setdefault(pkg, ())
disabled_by_pkg[pkg] += (lib,)
else:
disabled_by_pkg.setdefault(pkg, ())
disabled_by_pkg[pkg] += (lib,)
replacements = collections.OrderedDict(TF_REPLACEMENTS)
for pkg, disabled in disabled_by_pkg.items():
replacements.update({
'from tensorflow_probability.python.{}.{} '.format(pkg, item):
'# from tensorflow_probability.python.{}.{} '.format(pkg, item)
for item in disabled
})
replacements.update({
'from tensorflow_probability.python.{} import {}'.format(pkg, item):
'# from tensorflow_probability.python.{} import {}'.format(pkg, item)
for item in disabled
})
replacements.update({
'tensorflow_probability.python.{}'.format(lib):
'tensorflow_probability.substrates.numpy.{}'.format(lib)
for lib in LIBS
})
replacements.update({
'tensorflow_probability.python import {} as'.format(lib):
'tensorflow_probability.substrates.numpy import {} as'.format(lib)
for lib in LIBS
})
replacements.update({
'tensorflow_probability.python import {}'.format(lib):
'tensorflow_probability.substrates.numpy import {}'.format(lib)
for lib in LIBS
})
replacements.update({
# Permits distributions.internal, psd_kernels.internal.
# 'as psd_kernels as': 'as',
})
replacements.update({
'tensorflow_probability.python.internal.{}'.format(internal):
'tensorflow_probability.substrates.numpy.internal.{}'.format(internal)
for internal in INTERNALS
})
# pylint: disable=g-complex-comprehension
replacements.update({
'tensorflow_probability.python.internal import {}'.format(internal):
'tensorflow_probability.substrates.numpy.internal import {}'.format(
internal)
for internal in INTERNALS
})
replacements.update({
'tensorflow.python.ops import {}'.format(private):
'tensorflow_probability.python.internal.backend.numpy import private'
' as {}'.format(private)
for private in PRIVATE_TF_PKGS
})
replacements.update({
'tensorflow.python.framework.ops import {}'.format(
private):
'tensorflow_probability.python.internal.backend.numpy import private'
' as {}'.format(private)
for private in PRIVATE_TF_PKGS
})
# pylint: enable=g-complex-comprehension
# TODO(bjp): Delete this block after TFP uses stateless samplers.
replacements.update({
'tf.random.{}'.format(sampler): 'tf.random.stateless_{}'.format(sampler)
for sampler in SAMPLERS
})
replacements.update({
'self._maybe_assert_dtype': '# self._maybe_assert_dtype',
'SKIP_DTYPE_CHECKS = False': 'SKIP_DTYPE_CHECKS = True',
'@test_util.test_all_tf_execution_regimes':
'# @test_util.test_all_tf_execution_regimes',
'@test_util.test_graph_and_eager_modes':
'# @test_util.test_graph_and_eager_modes',
'@test_util.test_graph_mode_only':
'# @test_util.test_graph_mode_only',
'TestCombinationsTest(test_util.TestCase)':
'TestCombinationsDoNotTest(object)',
'@six.add_metaclass(TensorMetaClass)':
'# @six.add_metaclass(TensorMetaClass)',
})
filename = argv[1]
contents = open(filename, encoding='utf-8').read()
if '__init__.py' in filename:
# Comment out items from __all__.
for pkg, disabled in disabled_by_pkg.items():
for item in disabled:
def disable_all(name):
replacements.update({
'"{}"'.format(name): '# "{}"'.format(name),
'\'{}\''.format(name): '# \'{}\''.format(name),
})
if 'from tensorflow_probability.python.{} import {}'.format(
pkg, item) in contents:
disable_all(item)
for segment in contents.split(
'from tensorflow_probability.python.{}.{} import '.format(
pkg, item)):
disable_all(segment.split('\n')[0])
for find, replace in replacements.items():
contents = contents.replace(find, replace)
disabler = 'JAX_DISABLE' if FLAGS.numpy_to_jax else 'NUMPY_DISABLE'
lines = contents.split('\n')
for i, l in enumerate(lines):
if disabler in l:
lines[i] = '# {}'.format(l)
contents = '\n'.join(lines)
if not FLAGS.numpy_to_jax:
contents = contents.replace('NUMPY_MODE = False', 'NUMPY_MODE = True')
if FLAGS.numpy_to_jax:
contents = contents.replace('tfp.substrates.numpy', 'tfp.substrates.jax')
contents = contents.replace('substrates.numpy', 'substrates.jax')
contents = contents.replace('backend.numpy', 'backend.jax')
contents = contents.replace('def _call_jax', 'def __call__')
contents = contents.replace('JAX_MODE = False', 'JAX_MODE = True')
contents = contents.replace('SKIP_DTYPE_CHECKS = True',
'SKIP_DTYPE_CHECKS = False')
is_test = lambda x: x.endswith('_test.py') or x.endswith('_test_util.py')
if is_test(argv[1]): # Test-only rewrites.
contents = contents.replace(
'tf.test.main()',
'from jax.config import config; '
'config.update("jax_enable_x64", True); '
'config.enable_omnistaging(); '
'tf.test.main()')
print('# ' + '@' * 78)
print('# This file is auto-generated by substrates/meta/rewrite.py')
print('# It will be surfaced by the build system as a symlink at:')
substrate = 'jax' if FLAGS.numpy_to_jax else 'numpy'
print('# `tensorflow_probability/substrates/{substrate}/{path}`'.format(
substrate=substrate, path=filename.split('/python/')[1]))
print('# For more info, see substrate_runfiles_symlinks in build_defs.bzl')
print('# ' + '@' * 78)
print('\n# (This notice adds 10 to line numbering.)\n\n')
print(contents, file=open(1, 'w', encoding='utf-8', closefd=False))
if __name__ == '__main__':
app.run(main)
| 40.48249 | 78 | 0.663975 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from absl import app
from absl import flags
flags.DEFINE_boolean('numpy_to_jax', False,
'Whether or not to rewrite numpy imports to jax.numpy')
flags.DEFINE_list('omit_deps', [], 'List of build deps being omitted.')
FLAGS = flags.FLAGS
TF_REPLACEMENTS = {
'import tensorflow ':
'from tensorflow_probability.python.internal.backend import numpy ',
'import tensorflow.compat.v1':
'from tensorflow_probability.python.internal.backend.numpy.compat '
'import v1',
'import tensorflow.compat.v2':
'from tensorflow_probability.python.internal.backend.numpy.compat '
'import v2',
'import tensorflow_probability as tfp':
'import tensorflow_probability as tfp; '
'tfp = tfp.substrates.numpy',
'from tensorflow.python.framework import tensor_shape':
('from tensorflow_probability.python.internal.backend.numpy.gen '
'import tensor_shape'),
'from tensorflow.python.framework import ops':
('from tensorflow_probability.python.internal.backend.numpy '
'import ops'),
'from tensorflow.python.framework import tensor_util':
('from tensorflow_probability.python.internal.backend.numpy '
'import ops'),
'from tensorflow.python.util import':
'from tensorflow_probability.python.internal.backend.numpy import',
'from tensorflow.python.util.all_util':
'from tensorflow_probability.python.internal.backend.numpy.private',
'from tensorflow.python.ops.linalg':
'from tensorflow_probability.python.internal.backend.numpy.gen',
'from tensorflow.python.ops import parallel_for':
'from tensorflow_probability.python.internal.backend.numpy '
'import functional_ops as parallel_for',
'from tensorflow.python.ops import control_flow_ops':
'from tensorflow_probability.python.internal.backend.numpy '
'import control_flow as control_flow_ops',
'from tensorflow.python.eager import context':
'from tensorflow_probability.python.internal.backend.numpy '
'import private',
('from tensorflow.python.client '
'import pywrap_tf_session as c_api'):
'pass',
('from tensorflow.python '
'import pywrap_tensorflow as c_api'):
'pass'
}
DISABLED_BY_PKG = {
'experimental':
('auto_batching', 'composite_tensor', 'linalg',
'marginalize', 'nn', 'sequential', 'substrates', 'vi'),
}
LIBS = ('bijectors', 'distributions', 'experimental', 'math', 'mcmc',
'optimizer', 'random', 'stats', 'util')
INTERNALS = ('assert_util', 'batched_rejection_sampler', 'broadcast_util',
'cache_util', 'callable_util',
'custom_gradient', 'distribution_util', 'dtype_util',
'hypothesis_testlib', 'implementation_selection', 'monte_carlo',
'name_util', 'nest_util', 'numerics_testing',
'parameter_properties', 'prefer_static', 'samplers',
'special_math', 'structural_tuple', 'tensor_util',
'tensorshape_util', 'test_combinations', 'test_util', 'unnest',
'variadic_reduce', 'vectorization_util')
OPTIMIZERS = ('linesearch',)
LINESEARCH = ('internal',)
SAMPLERS = ('categorical', 'normal', 'poisson', 'uniform', 'shuffle')
PRIVATE_TF_PKGS = ('array_ops', 'control_flow_util', 'gradient_checker_v2',
'numpy_text', 'random_ops')
def main(argv):
disabled_by_pkg = dict(DISABLED_BY_PKG)
for dep in FLAGS.omit_deps:
pkg = dep.split('/python/')[1].split(':')[0].replace('/', '.')
lib = dep.split(':')[1]
if pkg.endswith('.{}'.format(lib)):
pkg = pkg.replace('.{}'.format(lib), '')
disabled_by_pkg.setdefault(pkg, ())
disabled_by_pkg[pkg] += (lib,)
else:
disabled_by_pkg.setdefault(pkg, ())
disabled_by_pkg[pkg] += (lib,)
replacements = collections.OrderedDict(TF_REPLACEMENTS)
for pkg, disabled in disabled_by_pkg.items():
replacements.update({
'from tensorflow_probability.python.{}.{} '.format(pkg, item):
'# from tensorflow_probability.python.{}.{} '.format(pkg, item)
for item in disabled
})
replacements.update({
'from tensorflow_probability.python.{} import {}'.format(pkg, item):
'# from tensorflow_probability.python.{} import {}'.format(pkg, item)
for item in disabled
})
replacements.update({
'tensorflow_probability.python.{}'.format(lib):
'tensorflow_probability.substrates.numpy.{}'.format(lib)
for lib in LIBS
})
replacements.update({
'tensorflow_probability.python import {} as'.format(lib):
'tensorflow_probability.substrates.numpy import {} as'.format(lib)
for lib in LIBS
})
replacements.update({
'tensorflow_probability.python import {}'.format(lib):
'tensorflow_probability.substrates.numpy import {}'.format(lib)
for lib in LIBS
})
replacements.update({
})
replacements.update({
'tensorflow_probability.python.internal.{}'.format(internal):
'tensorflow_probability.substrates.numpy.internal.{}'.format(internal)
for internal in INTERNALS
})
replacements.update({
'tensorflow_probability.python.internal import {}'.format(internal):
'tensorflow_probability.substrates.numpy.internal import {}'.format(
internal)
for internal in INTERNALS
})
replacements.update({
'tensorflow.python.ops import {}'.format(private):
'tensorflow_probability.python.internal.backend.numpy import private'
' as {}'.format(private)
for private in PRIVATE_TF_PKGS
})
replacements.update({
'tensorflow.python.framework.ops import {}'.format(
private):
'tensorflow_probability.python.internal.backend.numpy import private'
' as {}'.format(private)
for private in PRIVATE_TF_PKGS
})
replacements.update({
'tf.random.{}'.format(sampler): 'tf.random.stateless_{}'.format(sampler)
for sampler in SAMPLERS
})
replacements.update({
'self._maybe_assert_dtype': '# self._maybe_assert_dtype',
'SKIP_DTYPE_CHECKS = False': 'SKIP_DTYPE_CHECKS = True',
'@test_util.test_all_tf_execution_regimes':
'# @test_util.test_all_tf_execution_regimes',
'@test_util.test_graph_and_eager_modes':
'# @test_util.test_graph_and_eager_modes',
'@test_util.test_graph_mode_only':
'# @test_util.test_graph_mode_only',
'TestCombinationsTest(test_util.TestCase)':
'TestCombinationsDoNotTest(object)',
'@six.add_metaclass(TensorMetaClass)':
'# @six.add_metaclass(TensorMetaClass)',
})
filename = argv[1]
contents = open(filename, encoding='utf-8').read()
if '__init__.py' in filename:
for pkg, disabled in disabled_by_pkg.items():
for item in disabled:
def disable_all(name):
replacements.update({
'"{}"'.format(name): '# "{}"'.format(name),
'\'{}\''.format(name): '# \'{}\''.format(name),
})
if 'from tensorflow_probability.python.{} import {}'.format(
pkg, item) in contents:
disable_all(item)
for segment in contents.split(
'from tensorflow_probability.python.{}.{} import '.format(
pkg, item)):
disable_all(segment.split('\n')[0])
for find, replace in replacements.items():
contents = contents.replace(find, replace)
disabler = 'JAX_DISABLE' if FLAGS.numpy_to_jax else 'NUMPY_DISABLE'
lines = contents.split('\n')
for i, l in enumerate(lines):
if disabler in l:
lines[i] = '# {}'.format(l)
contents = '\n'.join(lines)
if not FLAGS.numpy_to_jax:
contents = contents.replace('NUMPY_MODE = False', 'NUMPY_MODE = True')
if FLAGS.numpy_to_jax:
contents = contents.replace('tfp.substrates.numpy', 'tfp.substrates.jax')
contents = contents.replace('substrates.numpy', 'substrates.jax')
contents = contents.replace('backend.numpy', 'backend.jax')
contents = contents.replace('def _call_jax', 'def __call__')
contents = contents.replace('JAX_MODE = False', 'JAX_MODE = True')
contents = contents.replace('SKIP_DTYPE_CHECKS = True',
'SKIP_DTYPE_CHECKS = False')
is_test = lambda x: x.endswith('_test.py') or x.endswith('_test_util.py')
if is_test(argv[1]):
contents = contents.replace(
'tf.test.main()',
'from jax.config import config; '
'config.update("jax_enable_x64", True); '
'config.enable_omnistaging(); '
'tf.test.main()')
print('# ' + '@' * 78)
print('# This file is auto-generated by substrates/meta/rewrite.py')
print('# It will be surfaced by the build system as a symlink at:')
substrate = 'jax' if FLAGS.numpy_to_jax else 'numpy'
print('# `tensorflow_probability/substrates/{substrate}/{path}`'.format(
substrate=substrate, path=filename.split('/python/')[1]))
print('# For more info, see substrate_runfiles_symlinks in build_defs.bzl')
print('# ' + '@' * 78)
print('\n# (This notice adds 10 to line numbering.)\n\n')
print(contents, file=open(1, 'w', encoding='utf-8', closefd=False))
if __name__ == '__main__':
app.run(main)
| true | true |
f720c8e34817cce8439e26b7ffd83fa810781ad6 | 35,031 | py | Python | Pilot1/Combo/combo_dose.py | j-woz/Benchmarks | d518162fdafb7cfa26071b6a30a3b456dad024f6 | [
"MIT"
] | 2 | 2021-02-06T06:47:19.000Z | 2021-02-24T13:45:02.000Z | Pilot1/Combo/combo_dose.py | j-woz/Benchmarks | d518162fdafb7cfa26071b6a30a3b456dad024f6 | [
"MIT"
] | null | null | null | Pilot1/Combo/combo_dose.py | j-woz/Benchmarks | d518162fdafb7cfa26071b6a30a3b456dad024f6 | [
"MIT"
] | 1 | 2019-08-14T14:29:42.000Z | 2019-08-14T14:29:42.000Z | #! /usr/bin/env python
from __future__ import division, print_function
import argparse
import collections
import logging
import os
import random
import threading
import numpy as np
import pandas as pd
from itertools import cycle, islice
import keras
from keras import backend as K
from keras import optimizers
from keras.models import Model
from keras.layers import Input, Dense, Dropout
from keras.callbacks import Callback, ModelCheckpoint, ReduceLROnPlateau, LearningRateScheduler, TensorBoard
from keras.utils import get_custom_objects
from keras.utils.vis_utils import plot_model
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
from sklearn.model_selection import KFold, StratifiedKFold, GroupKFold
from scipy.stats.stats import pearsonr
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import combo
import candle
import NCI60
logger = logging.getLogger(__name__)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def set_seed(seed):
os.environ['PYTHONHASHSEED'] = '0'
np.random.seed(seed)
random.seed(seed)
if K.backend() == 'tensorflow':
import tensorflow as tf
tf.set_random_seed(seed)
# session_conf = tf.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)
# sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
# K.set_session(sess)
# Uncommit when running on an optimized tensorflow where NUM_INTER_THREADS and
# NUM_INTRA_THREADS env vars are set.
# session_conf = tf.ConfigProto(inter_op_parallelism_threads=int(os.environ['NUM_INTER_THREADS']),
# intra_op_parallelism_threads=int(os.environ['NUM_INTRA_THREADS']))
# sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
# K.set_session(sess)
def verify_path(path):
folder = os.path.dirname(path)
if folder and not os.path.exists(folder):
os.makedirs(folder)
def set_up_logger(logfile, verbose):
verify_path(logfile)
fh = logging.FileHandler(logfile)
fh.setFormatter(logging.Formatter("[%(asctime)s %(process)d] %(message)s", datefmt="%Y-%m-%d %H:%M:%S"))
fh.setLevel(logging.DEBUG)
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(''))
sh.setLevel(logging.DEBUG if verbose else logging.INFO)
logger.setLevel(logging.DEBUG)
logger.addHandler(fh)
logger.addHandler(sh)
def extension_from_parameters(args):
"""Construct string for saving model with annotation of parameters"""
ext = ''
ext += '.A={}'.format(args.activation)
ext += '.B={}'.format(args.batch_size)
ext += '.E={}'.format(args.epochs)
ext += '.O={}'.format(args.optimizer)
# ext += '.LEN={}'.format(args.maxlen)
ext += '.LR={}'.format(args.learning_rate)
ext += '.CF={}'.format(''.join([x[0] for x in sorted(args.cell_features)]))
ext += '.DF={}'.format(''.join([x[0] for x in sorted(args.drug_features)]))
if args.feature_subsample > 0:
ext += '.FS={}'.format(args.feature_subsample)
if args.dropout > 0:
ext += '.DR={}'.format(args.dropout)
if args.warmup_lr:
ext += '.wu_lr'
if args.reduce_lr:
ext += '.re_lr'
if args.residual:
ext += '.res'
if args.use_landmark_genes:
ext += '.L1000'
if args.gen:
ext += '.gen'
if args.use_combo_score:
ext += '.scr'
for i, n in enumerate(args.dense):
if n > 0:
ext += '.D{}={}'.format(i+1, n)
if args.dense_feature_layers != args.dense:
for i, n in enumerate(args.dense):
if n > 0:
ext += '.FD{}={}'.format(i+1, n)
return ext
def discretize(y, bins=5):
percentiles = [100 / bins * (i + 1) for i in range(bins - 1)]
thresholds = [np.percentile(y, x) for x in percentiles]
classes = np.digitize(y, thresholds)
return classes
class ComboDataLoader(object):
"""Load merged drug response, drug descriptors and cell line essay data
"""
def __init__(self, seed, val_split=0.2, shuffle=True,
cell_features=['expression'], drug_features=['descriptors'],
response_url=None, use_landmark_genes=False, use_combo_score=False,
preprocess_rnaseq=None, exclude_cells=[], exclude_drugs=[],
feature_subsample=None, scaling='std', scramble=False,
cv_partition='overlapping', cv=0):
"""Initialize data merging drug response, drug descriptors and cell line essay.
Shuffle and split training and validation set
Parameters
----------
seed: integer
seed for random generation
val_split : float, optional (default 0.2)
fraction of data to use in validation
cell_features: list of strings from 'expression', 'expression_5platform', 'mirna', 'proteome', 'all', 'categorical' (default ['expression'])
use one or more cell line feature sets: gene expression, microRNA, proteome
use 'all' for ['expression', 'mirna', 'proteome']
use 'categorical' for one-hot encoded cell lines
drug_features: list of strings from 'descriptors', 'latent', 'all', 'categorical', 'noise' (default ['descriptors'])
use dragon7 descriptors, latent representations from Aspuru-Guzik's SMILES autoencoder
trained on NSC drugs, or both; use random features if set to noise
use 'categorical' for one-hot encoded drugs
shuffle : True or False, optional (default True)
if True shuffles the merged data before splitting training and validation sets
scramble: True or False, optional (default False)
if True randomly shuffle dose response data as a control
feature_subsample: None or integer (default None)
number of feature columns to use from cellline expressions and drug descriptors
use_landmark_genes: True or False
only use LINCS1000 landmark genes
use_combo_score: bool (default False)
use combination score in place of percent growth (stored in 'GROWTH' column)
scaling: None, 'std', 'minmax' or 'maxabs' (default 'std')
type of feature scaling: 'maxabs' to [-1,1], 'maxabs' to [-1, 1], 'std' for standard normalization
"""
self.cv_partition = cv_partition
np.random.seed(seed)
df = NCI60.load_combo_dose_response(response_url=response_url, use_combo_score=use_combo_score, fraction=True, exclude_cells=exclude_cells, exclude_drugs=exclude_drugs)
logger.info('Loaded {} unique (CL, D1, D2) response sets.'.format(df.shape[0]))
if 'all' in cell_features:
self.cell_features = ['expression', 'mirna', 'proteome']
else:
self.cell_features = cell_features
if 'all' in drug_features:
self.drug_features = ['descriptors', 'latent']
else:
self.drug_features = drug_features
for fea in self.cell_features:
if fea == 'expression' or fea == 'rnaseq':
self.df_cell_expr = NCI60.load_cell_expression_rnaseq(ncols=feature_subsample, scaling=scaling, use_landmark_genes=use_landmark_genes, preprocess_rnaseq=preprocess_rnaseq)
df = df.merge(self.df_cell_expr[['CELLNAME']], on='CELLNAME')
elif fea == 'expression_u133p2':
self.df_cell_expr = NCI60.load_cell_expression_u133p2(ncols=feature_subsample, scaling=scaling, use_landmark_genes=use_landmark_genes)
df = df.merge(self.df_cell_expr[['CELLNAME']], on='CELLNAME')
elif fea == 'expression_5platform':
self.df_cell_expr = NCI60.load_cell_expression_5platform(ncols=feature_subsample, scaling=scaling, use_landmark_genes=use_landmark_genes)
df = df.merge(self.df_cell_expr[['CELLNAME']], on='CELLNAME')
elif fea == 'mirna':
self.df_cell_mirna = NCI60.load_cell_mirna(ncols=feature_subsample, scaling=scaling)
df = df.merge(self.df_cell_mirna[['CELLNAME']], on='CELLNAME')
elif fea == 'proteome':
self.df_cell_prot = NCI60.load_cell_proteome(ncols=feature_subsample, scaling=scaling)
df = df.merge(self.df_cell_prot[['CELLNAME']], on='CELLNAME')
elif fea == 'categorical':
df_cell_ids = df[['CELLNAME']].drop_duplicates()
cell_ids = df_cell_ids['CELLNAME'].map(lambda x: x.replace(':', '.'))
df_cell_cat = pd.get_dummies(cell_ids)
df_cell_cat.index = df_cell_ids['CELLNAME']
self.df_cell_cat = df_cell_cat.reset_index()
for fea in self.drug_features:
if fea == 'descriptors':
self.df_drug_desc = NCI60.load_drug_descriptors(ncols=feature_subsample, scaling=scaling)
df = df[df['NSC1'].isin(self.df_drug_desc['NSC']) & df['NSC2'].isin(self.df_drug_desc['NSC'])]
elif fea == 'latent':
self.df_drug_auen = NCI60.load_drug_autoencoded_AG(ncols=feature_subsample, scaling=scaling)
df = df[df['NSC1'].isin(self.df_drug_auen['NSC']) & df['NSC2'].isin(self.df_drug_auen['NSC'])]
elif fea == 'categorical':
df_drug_ids = df[['NSC1']].drop_duplicates()
df_drug_ids.columns = ['NSC']
drug_ids = df_drug_ids['NSC']
df_drug_cat = pd.get_dummies(drug_ids)
df_drug_cat.index = df_drug_ids['NSC']
self.df_drug_cat = df_drug_cat.reset_index()
elif fea == 'noise':
ids1 = df[['NSC1']].drop_duplicates().rename(columns={'NSC1':'NSC'})
ids2 = df[['NSC2']].drop_duplicates().rename(columns={'NSC2':'NSC'})
df_drug_ids = pd.concat([ids1, ids2]).drop_duplicates()
noise = np.random.normal(size=(df_drug_ids.shape[0], 500))
df_rand = pd.DataFrame(noise, index=df_drug_ids['NSC'],
columns=['RAND-{:03d}'.format(x) for x in range(500)])
self.df_drug_rand = df_rand.reset_index()
logger.info('Filtered down to {} rows with matching information.'.format(df.shape[0]))
ids1 = df[['NSC1']].drop_duplicates().rename(columns={'NSC1':'NSC'})
ids2 = df[['NSC2']].drop_duplicates().rename(columns={'NSC2':'NSC'})
df_drug_ids = pd.concat([ids1, ids2]).drop_duplicates().reset_index(drop=True)
n_drugs = df_drug_ids.shape[0]
n_val_drugs = int(n_drugs * val_split)
n_train_drugs = n_drugs - n_val_drugs
logger.info('Unique cell lines: {}'.format(df['CELLNAME'].nunique()))
logger.info('Unique drugs: {}'.format(n_drugs))
# df.to_csv('filtered.growth.min.tsv', sep='\t', index=False, float_format='%.4g')
# df.to_csv('filtered.score.max.tsv', sep='\t', index=False, float_format='%.4g')
if shuffle:
df = df.sample(frac=1.0, random_state=seed).reset_index(drop=True)
df_drug_ids = df_drug_ids.sample(frac=1.0, random_state=seed).reset_index(drop=True)
self.df_response = df
self.df_drug_ids = df_drug_ids
self.train_drug_ids = df_drug_ids['NSC'][:n_train_drugs]
self.val_drug_ids = df_drug_ids['NSC'][-n_val_drugs:]
if scramble:
growth = df[['GROWTH']]
random_growth = growth.iloc[np.random.permutation(np.arange(growth.shape[0]))].reset_index()
self.df_response[['GROWTH']] = random_growth['GROWTH']
logger.warn('Randomly shuffled dose response growth values.')
logger.info('Distribution of dose response:')
logger.info(self.df_response[['GROWTH']].describe())
self.total = df.shape[0]
self.n_val = int(self.total * val_split)
self.n_train = self.total - self.n_val
logger.info('Rows in train: {}, val: {}'.format(self.n_train, self.n_val))
self.cell_df_dict = {'expression': 'df_cell_expr',
'expression_5platform': 'df_cell_expr',
'expression_u133p2': 'df_cell_expr',
'rnaseq': 'df_cell_expr',
'mirna': 'df_cell_mirna',
'proteome': 'df_cell_prot',
'categorical': 'df_cell_cat'}
self.drug_df_dict = {'descriptors': 'df_drug_desc',
'latent': 'df_drug_auen',
'categorical': 'df_drug_cat',
'noise': 'df_drug_rand'}
self.input_features = collections.OrderedDict()
self.feature_shapes = {}
for fea in self.cell_features:
feature_type = 'cell.' + fea
feature_name = 'cell.' + fea
df_cell = getattr(self, self.cell_df_dict[fea])
self.input_features[feature_name] = feature_type
self.feature_shapes[feature_type] = (df_cell.shape[1] - 1,)
for drug in ['drug1', 'drug2']:
for fea in self.drug_features:
feature_type = 'drug.' + fea
feature_name = drug + '.' + fea
df_drug = getattr(self, self.drug_df_dict[fea])
self.input_features[feature_name] = feature_type
self.feature_shapes[feature_type] = (df_drug.shape[1] - 1,)
self.feature_shapes['dose'] = (1,)
for dose in ['dose1', 'dose2']:
self.input_features[dose] = 'dose'
logger.info('Input features shapes:')
for k, v in self.input_features.items():
logger.info(' {}: {}'.format(k, self.feature_shapes[v]))
self.input_dim = sum([np.prod(self.feature_shapes[x]) for x in self.input_features.values()])
logger.info('Total input dimensions: {}'.format(self.input_dim))
if cv > 1:
if cv_partition == 'disjoint':
pass
elif cv_partition == 'disjoint_cells':
y = self.df_response['GROWTH'].values
groups = self.df_response['CELLNAME'].values
gkf = GroupKFold(n_splits=cv)
splits = gkf.split(y, groups=groups)
self.cv_train_indexes = []
self.cv_val_indexes = []
for index, (train_index, val_index) in enumerate(splits):
print(index, train_index)
self.cv_train_indexes.append(train_index)
self.cv_val_indexes.append(val_index)
else:
y = self.df_response['GROWTH'].values
# kf = KFold(n_splits=cv)
# splits = kf.split(y)
skf = StratifiedKFold(n_splits=cv, random_state=seed)
splits = skf.split(y, discretize(y, bins=cv))
self.cv_train_indexes = []
self.cv_val_indexes = []
for index, (train_index, val_index) in enumerate(splits):
print(index, train_index)
self.cv_train_indexes.append(train_index)
self.cv_val_indexes.append(val_index)
def load_data_all(self, switch_drugs=False):
df_all = self.df_response
y_all = df_all['GROWTH'].values
x_all_list = []
for fea in self.cell_features:
df_cell = getattr(self, self.cell_df_dict[fea])
df_x_all = pd.merge(df_all[['CELLNAME']], df_cell, on='CELLNAME', how='left')
x_all_list.append(df_x_all.drop(['CELLNAME'], axis=1).values)
# for fea in loader.cell_features:
# df_cell = getattr(loader, loader.cell_df_dict[fea])
# df_x_all = pd.merge(df_all[['CELLNAME']], df_cell, on='CELLNAME', how='left')
# df_x_all[:1000].to_csv('df.{}.1k.csv'.format(fea), index=False, float_format="%g")
drugs = ['NSC1', 'NSC2']
doses = ['pCONC1', 'pCONC2']
if switch_drugs:
drugs = ['NSC2', 'NSC1']
doses = ['pCONC2', 'pCONC1']
for drug in drugs:
for fea in self.drug_features:
df_drug = getattr(self, self.drug_df_dict[fea])
df_x_all = pd.merge(df_all[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
x_all_list.append(df_x_all.drop([drug, 'NSC'], axis=1).values)
for dose in doses:
x_all_list.append(df_all[dose].values)
# for drug in drugs:
# for fea in loader.drug_features:
# df_drug = getattr(loader, loader.drug_df_dict[fea])
# df_x_all = pd.merge(df_all[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
# print(df_x_all.shape)
# df_x_all[:1000].drop([drug], axis=1).to_csv('df.{}.{}.1k.csv'.format(drug, fea), index=False, float_format="%g")
# df_all[:1000].to_csv('df.growth.1k.csv', index=False, float_format="%g")
return x_all_list, y_all, df_all
def load_data_by_index(self, train_index, val_index):
x_all_list, y_all, df_all = self.load_data_all()
x_train_list = [x[train_index] for x in x_all_list]
x_val_list = [x[val_index] for x in x_all_list]
y_train = y_all[train_index]
y_val = y_all[val_index]
df_train = df_all.iloc[train_index, :]
df_val = df_all.iloc[val_index, :]
if self.cv_partition == 'disjoint':
logger.info('Training drugs: {}'.format(set(df_train['NSC1'])))
logger.info('Validation drugs: {}'.format(set(df_val['NSC1'])))
elif self.cv_partition == 'disjoint_cells':
logger.info('Training cells: {}'.format(set(df_train['CELLNAME'])))
logger.info('Validation cells: {}'.format(set(df_val['CELLNAME'])))
return x_train_list, y_train, x_val_list, y_val, df_train, df_val
def load_data_cv(self, fold):
train_index = self.cv_train_indexes[fold]
val_index = self.cv_val_indexes[fold]
# print('fold', fold)
# print(train_index[:5])
return self.load_data_by_index(train_index, val_index)
def load_data(self):
if self.cv_partition == 'disjoint':
train_index = self.df_response[(self.df_response['NSC1'].isin(self.train_drug_ids)) & (self.df_response['NSC2'].isin(self.train_drug_ids))].index
val_index = self.df_response[(self.df_response['NSC1'].isin(self.val_drug_ids)) & (self.df_response['NSC2'].isin(self.val_drug_ids))].index
else:
train_index = range(self.n_train)
val_index = range(self.n_train, self.total)
return self.load_data_by_index(train_index, val_index)
def load_data_old(self):
# bad performance (4x slow) possibly due to incontiguous data
df_train = self.df_response.iloc[:self.n_train, :]
df_val = self.df_response.iloc[self.n_train:, :]
y_train = df_train['GROWTH'].values
y_val = df_val['GROWTH'].values
x_train_list = []
x_val_list = []
for fea in self.cell_features:
df_cell = getattr(self, self.cell_df_dict[fea])
df_x_train = pd.merge(df_train[['CELLNAME']], df_cell, on='CELLNAME', how='left')
df_x_val = pd.merge(df_val[['CELLNAME']], df_cell, on='CELLNAME', how='left')
x_train_list.append(df_x_train.drop(['CELLNAME'], axis=1).values)
x_val_list.append(df_x_val.drop(['CELLNAME'], axis=1).values)
for drug in ['NSC1', 'NSC2']:
for fea in self.drug_features:
df_drug = getattr(self, self.drug_df_dict[fea])
df_x_train = pd.merge(df_train[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
df_x_val = pd.merge(df_val[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
x_train_list.append(df_x_train.drop([drug, 'NSC'], axis=1).values)
x_val_list.append(df_x_val.drop([drug, 'NSC'], axis=1).values)
return x_train_list, y_train, x_val_list, y_val, df_train, df_val
class ComboDataGenerator(object):
"""Generate training, validation or testing batches from loaded data
"""
def __init__(self, data, partition='train', batch_size=32):
self.lock = threading.Lock()
self.data = data
self.partition = partition
self.batch_size = batch_size
if partition == 'train':
self.cycle = cycle(range(data.n_train))
self.num_data = data.n_train
elif partition == 'val':
self.cycle = cycle(range(data.total)[-data.n_val:])
self.num_data = data.n_val
else:
raise Exception('Data partition "{}" not recognized.'.format(partition))
def flow(self):
"""Keep generating data batches
"""
while 1:
self.lock.acquire()
indices = list(islice(self.cycle, self.batch_size))
self.lock.release()
df = self.data.df_response.iloc[indices, :]
y = df['GROWTH'].values
x_list = []
for fea in self.data.cell_features:
df_cell = getattr(self.data, self.data.cell_df_dict[fea])
df_x = pd.merge(df[['CELLNAME']], df_cell, on='CELLNAME', how='left')
x_list.append(df_x.drop(['CELLNAME'], axis=1).values)
for drug in ['NSC1', 'NSC2']:
for fea in self.data.drug_features:
df_drug = getattr(self.data, self.data.drug_df_dict[fea])
df_x = pd.merge(df[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
x_list.append(df_x.drop([drug, 'NSC'], axis=1).values)
yield x_list, y
def test_generator(loader):
gen = ComboDataGenerator(loader).flow()
x_list, y = next(gen)
for x in x_list:
print(x.shape)
print(y.shape)
def test_loader(loader):
x_train_list, y_train, x_val_list, y_val = loader.load_data()
print('x_train shapes:')
for x in x_train_list:
print(x.shape)
print('y_train shape:', y_train.shape)
print('x_val shapes:')
for x in x_val_list:
print(x.shape)
print('y_val shape:', y_val.shape)
def r2(y_true, y_pred):
SS_res = K.sum(K.square(y_true - y_pred))
SS_tot = K.sum(K.square(y_true - K.mean(y_true)))
return (1 - SS_res/(SS_tot + K.epsilon()))
def mae(y_true, y_pred):
return keras.metrics.mean_absolute_error(y_true, y_pred)
def evaluate_prediction(y_true, y_pred):
mse = mean_squared_error(y_true, y_pred)
mae = mean_absolute_error(y_true, y_pred)
r2 = r2_score(y_true, y_pred)
corr, _ = pearsonr(y_true, y_pred)
return {'mse': mse, 'mae': mae, 'r2': r2, 'corr': corr}
def log_evaluation(metric_outputs, description='Comparing y_true and y_pred:'):
logger.info(description)
for metric, value in metric_outputs.items():
logger.info(' {}: {:.4f}'.format(metric, value))
def plot_history(out, history, metric='loss', title=None):
title = title or 'model {}'.format(metric)
val_metric = 'val_{}'.format(metric)
plt.figure(figsize=(8, 6))
plt.plot(history.history[metric], marker='o')
plt.plot(history.history[val_metric], marker='d')
plt.title(title)
plt.ylabel(metric)
plt.xlabel('epoch')
plt.legend(['train_{}'.format(metric), 'val_{}'.format(metric)], loc='upper center')
png = '{}.plot.{}.png'.format(out, metric)
plt.savefig(png, bbox_inches='tight')
class LoggingCallback(Callback):
def __init__(self, print_fcn=print):
Callback.__init__(self)
self.print_fcn = print_fcn
def on_epoch_end(self, epoch, logs={}):
msg = "[Epoch: %i] %s" % (epoch, ", ".join("%s: %f" % (k, v) for k, v in sorted(logs.items())))
self.print_fcn(msg)
class PermanentDropout(Dropout):
def __init__(self, rate, **kwargs):
super(PermanentDropout, self).__init__(rate, **kwargs)
self.uses_learning_phase = False
def call(self, x, mask=None):
if 0. < self.rate < 1.:
noise_shape = self._get_noise_shape(x)
x = K.dropout(x, self.rate, noise_shape)
return x
class ModelRecorder(Callback):
def __init__(self, save_all_models=False):
Callback.__init__(self)
self.save_all_models = save_all_models
get_custom_objects()['PermanentDropout'] = PermanentDropout
def on_train_begin(self, logs={}):
self.val_losses = []
self.best_val_loss = np.Inf
self.best_model = None
def on_epoch_end(self, epoch, logs={}):
val_loss = logs.get('val_loss')
self.val_losses.append(val_loss)
if val_loss < self.best_val_loss:
self.best_model = keras.models.clone_model(self.model)
self.best_val_loss = val_loss
def build_feature_model(input_shape, name='', dense_layers=[1000, 1000],
activation='relu', residual=False,
dropout_rate=0, permanent_dropout=True):
x_input = Input(shape=input_shape)
h = x_input
for i, layer in enumerate(dense_layers):
x = h
h = Dense(layer, activation=activation)(h)
if dropout_rate > 0:
if permanent_dropout:
h = PermanentDropout(dropout_rate)(h)
else:
h = Dropout(dropout_rate)(h)
if residual:
try:
h = keras.layers.add([h, x])
except ValueError:
pass
model = Model(x_input, h, name=name)
return model
def build_model(loader, args, verbose=False):
input_models = {}
dropout_rate = args.dropout
permanent_dropout = True
for fea_type, shape in loader.feature_shapes.items():
box = build_feature_model(input_shape=shape, name=fea_type,
dense_layers=args.dense_feature_layers,
dropout_rate=dropout_rate, permanent_dropout=permanent_dropout)
if verbose:
box.summary()
input_models[fea_type] = box
inputs = []
encoded_inputs = []
for fea_name, fea_type in loader.input_features.items():
shape = loader.feature_shapes[fea_type]
fea_input = Input(shape, name='input.'+fea_name)
inputs.append(fea_input)
input_model = input_models[fea_type]
encoded = input_model(fea_input)
encoded_inputs.append(encoded)
merged = keras.layers.concatenate(encoded_inputs)
h = merged
for i, layer in enumerate(args.dense):
x = h
h = Dense(layer, activation=args.activation)(h)
if dropout_rate > 0:
if permanent_dropout:
h = PermanentDropout(dropout_rate)(h)
else:
h = Dropout(dropout_rate)(h)
if args.residual:
try:
h = keras.layers.add([h, x])
except ValueError:
pass
output = Dense(1)(h)
return Model(inputs, output)
def get_combo_parser():
description = 'Build neural network based models to predict tumor response to drug pairs.'
parser = argparse.ArgumentParser(prog='combo_baseline', formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=description)
return combo.common_parser(parser)
# def initialize_parameters():
# # Get command-line parameters
# parser = get_combo_parser()
# args = parser.parse_args()
# # Get parameters from configuration file
# file_params = combo.read_config_file(args.config_file)
# # Consolidate parameter set. Command-line parameters overwrite file configuration
# params = p1_common.args_overwrite_config(args, file_params)
# # print(params)
# return params
def initialize_parameters():
# Build benchmark object
comboBmk = combo.BenchmarkCombo(combo.file_path, 'combo_default_model.txt', 'keras',
prog='combo_baseline',
desc = 'Build neural network based models to predict tumor response to drug pairs.')
# Initialize parameters
gParameters = candle.finalize_parameters(comboBmk)
#combo.logger.info('Params: {}'.format(gParameters))
return gParameters
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def run(params):
args = Struct(**params)
set_seed(args.rng_seed)
ext = extension_from_parameters(args)
prefix = args.save + ext
logfile = args.logfile if args.logfile else prefix+'.log'
set_up_logger(logfile, args.verbose)
logger.info('Params: {}'.format(params))
loader = ComboDataLoader(seed=args.rng_seed,
val_split=args.validation_split,
cell_features=args.cell_features,
drug_features=args.drug_features,
response_url=args.response_url,
use_landmark_genes=args.use_landmark_genes,
preprocess_rnaseq=args.preprocess_rnaseq,
exclude_cells=args.exclude_cells,
exclude_drugs=args.exclude_drugs,
use_combo_score=args.use_combo_score,
cv_partition=args.cv_partition, cv=args.cv)
# test_loader(loader)
# test_generator(loader)
train_gen = ComboDataGenerator(loader, batch_size=args.batch_size).flow()
val_gen = ComboDataGenerator(loader, partition='val', batch_size=args.batch_size).flow()
train_steps = int(loader.n_train / args.batch_size)
val_steps = int(loader.n_val / args.batch_size)
model = build_model(loader, args, verbose=True)
model.summary()
# plot_model(model, to_file=prefix+'.model.png', show_shapes=True)
if args.cp:
model_json = model.to_json()
with open(prefix+'.model.json', 'w') as f:
print(model_json, file=f)
def warmup_scheduler(epoch):
lr = args.learning_rate or base_lr * args.batch_size/100
if epoch <= 5:
K.set_value(model.optimizer.lr, (base_lr * (5-epoch) + lr * epoch) / 5)
logger.debug('Epoch {}: lr={}'.format(epoch, K.get_value(model.optimizer.lr)))
return K.get_value(model.optimizer.lr)
df_pred_list = []
cv_ext = ''
cv = args.cv if args.cv > 1 else 1
fold = 0
while fold < cv:
if args.cv > 1:
logger.info('Cross validation fold {}/{}:'.format(fold+1, cv))
cv_ext = '.cv{}'.format(fold+1)
model = build_model(loader, args)
optimizer = optimizers.deserialize({'class_name': args.optimizer, 'config': {}})
base_lr = args.base_lr or K.get_value(optimizer.lr)
if args.learning_rate:
K.set_value(optimizer.lr, args.learning_rate)
model.compile(loss=args.loss, optimizer=optimizer, metrics=[mae, r2])
# calculate trainable and non-trainable params
# params.update(compute_trainable_params(model))
# candle_monitor = CandleRemoteMonitor(params=params)
# timeout_monitor = TerminateOnTimeOut(params['timeout'])
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=5, min_lr=0.00001)
warmup_lr = LearningRateScheduler(warmup_scheduler)
checkpointer = ModelCheckpoint(prefix+cv_ext+'.weights.h5', save_best_only=True, save_weights_only=True)
tensorboard = TensorBoard(log_dir="tb/tb{}{}".format(ext, cv_ext))
history_logger = LoggingCallback(logger.debug)
model_recorder = ModelRecorder()
callbacks = [history_logger, model_recorder]
# callbacks = [candle_monitor, timeout_monitor, history_logger, model_recorder]
if args.reduce_lr:
callbacks.append(reduce_lr)
if args.warmup_lr:
callbacks.append(warmup_lr)
if args.cp:
callbacks.append(checkpointer)
if args.tb:
callbacks.append(tensorboard)
if args.gen:
history = model.fit_generator(train_gen, train_steps,
epochs=args.epochs,
callbacks=callbacks,
validation_data=val_gen, validation_steps=val_steps)
else:
if args.cv > 1:
x_train_list, y_train, x_val_list, y_val, df_train, df_val = loader.load_data_cv(fold)
else:
x_train_list, y_train, x_val_list, y_val, df_train, df_val = loader.load_data()
y_shuf = np.random.permutation(y_val)
log_evaluation(evaluate_prediction(y_val, y_shuf),
description='Between random pairs in y_val:')
history = model.fit(x_train_list, y_train,
batch_size=args.batch_size,
shuffle=args.shuffle,
epochs=args.epochs,
callbacks=callbacks,
validation_data=(x_val_list, y_val))
if args.cp:
model.load_weights(prefix+cv_ext+'.weights.h5')
if not args.gen:
y_val_pred = model.predict(x_val_list, batch_size=args.batch_size).flatten()
scores = evaluate_prediction(y_val, y_val_pred)
if args.cv > 1 and scores[args.loss] > args.max_val_loss:
logger.warn('Best val_loss {} is greater than {}; retrain the model...'.format(scores[args.loss], args.max_val_loss))
continue
else:
fold += 1
log_evaluation(scores)
df_val.is_copy = False
df_val['GROWTH_PRED'] = y_val_pred
df_val['GROWTH_ERROR'] = y_val_pred - y_val
df_pred_list.append(df_val)
if args.cp:
# model.save(prefix+'.model.h5')
model_recorder.best_model.save(prefix+'.model.h5')
# test reloadded model prediction
new_model = keras.models.load_model(prefix+'.model.h5')
new_model.load_weights(prefix+cv_ext+'.weights.h5')
new_pred = new_model.predict(x_val_list, batch_size=args.batch_size).flatten()
# print('y_val:', y_val[:10])
# print('old_pred:', y_val_pred[:10])
# print('new_pred:', new_pred[:10])
plot_history(prefix, history, 'loss')
plot_history(prefix, history, 'r2')
if K.backend() == 'tensorflow':
K.clear_session()
pred_fname = prefix + '.predicted.growth.tsv'
if args.use_combo_score:
pred_fname = prefix + '.predicted.score.tsv'
df_pred = pd.concat(df_pred_list)
df_pred.to_csv(pred_fname, sep='\t', index=False, float_format='%.4g')
logger.handlers = []
return history
def main():
params = initialize_parameters()
run(params)
if __name__ == '__main__':
main()
if K.backend() == 'tensorflow':
K.clear_session()
| 40.876313 | 187 | 0.61163 |
from __future__ import division, print_function
import argparse
import collections
import logging
import os
import random
import threading
import numpy as np
import pandas as pd
from itertools import cycle, islice
import keras
from keras import backend as K
from keras import optimizers
from keras.models import Model
from keras.layers import Input, Dense, Dropout
from keras.callbacks import Callback, ModelCheckpoint, ReduceLROnPlateau, LearningRateScheduler, TensorBoard
from keras.utils import get_custom_objects
from keras.utils.vis_utils import plot_model
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
from sklearn.model_selection import KFold, StratifiedKFold, GroupKFold
from scipy.stats.stats import pearsonr
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import combo
import candle
import NCI60
logger = logging.getLogger(__name__)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def set_seed(seed):
os.environ['PYTHONHASHSEED'] = '0'
np.random.seed(seed)
random.seed(seed)
if K.backend() == 'tensorflow':
import tensorflow as tf
tf.set_random_seed(seed)
def verify_path(path):
folder = os.path.dirname(path)
if folder and not os.path.exists(folder):
os.makedirs(folder)
def set_up_logger(logfile, verbose):
verify_path(logfile)
fh = logging.FileHandler(logfile)
fh.setFormatter(logging.Formatter("[%(asctime)s %(process)d] %(message)s", datefmt="%Y-%m-%d %H:%M:%S"))
fh.setLevel(logging.DEBUG)
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(''))
sh.setLevel(logging.DEBUG if verbose else logging.INFO)
logger.setLevel(logging.DEBUG)
logger.addHandler(fh)
logger.addHandler(sh)
def extension_from_parameters(args):
ext = ''
ext += '.A={}'.format(args.activation)
ext += '.B={}'.format(args.batch_size)
ext += '.E={}'.format(args.epochs)
ext += '.O={}'.format(args.optimizer)
ext += '.LR={}'.format(args.learning_rate)
ext += '.CF={}'.format(''.join([x[0] for x in sorted(args.cell_features)]))
ext += '.DF={}'.format(''.join([x[0] for x in sorted(args.drug_features)]))
if args.feature_subsample > 0:
ext += '.FS={}'.format(args.feature_subsample)
if args.dropout > 0:
ext += '.DR={}'.format(args.dropout)
if args.warmup_lr:
ext += '.wu_lr'
if args.reduce_lr:
ext += '.re_lr'
if args.residual:
ext += '.res'
if args.use_landmark_genes:
ext += '.L1000'
if args.gen:
ext += '.gen'
if args.use_combo_score:
ext += '.scr'
for i, n in enumerate(args.dense):
if n > 0:
ext += '.D{}={}'.format(i+1, n)
if args.dense_feature_layers != args.dense:
for i, n in enumerate(args.dense):
if n > 0:
ext += '.FD{}={}'.format(i+1, n)
return ext
def discretize(y, bins=5):
percentiles = [100 / bins * (i + 1) for i in range(bins - 1)]
thresholds = [np.percentile(y, x) for x in percentiles]
classes = np.digitize(y, thresholds)
return classes
class ComboDataLoader(object):
def __init__(self, seed, val_split=0.2, shuffle=True,
cell_features=['expression'], drug_features=['descriptors'],
response_url=None, use_landmark_genes=False, use_combo_score=False,
preprocess_rnaseq=None, exclude_cells=[], exclude_drugs=[],
feature_subsample=None, scaling='std', scramble=False,
cv_partition='overlapping', cv=0):
self.cv_partition = cv_partition
np.random.seed(seed)
df = NCI60.load_combo_dose_response(response_url=response_url, use_combo_score=use_combo_score, fraction=True, exclude_cells=exclude_cells, exclude_drugs=exclude_drugs)
logger.info('Loaded {} unique (CL, D1, D2) response sets.'.format(df.shape[0]))
if 'all' in cell_features:
self.cell_features = ['expression', 'mirna', 'proteome']
else:
self.cell_features = cell_features
if 'all' in drug_features:
self.drug_features = ['descriptors', 'latent']
else:
self.drug_features = drug_features
for fea in self.cell_features:
if fea == 'expression' or fea == 'rnaseq':
self.df_cell_expr = NCI60.load_cell_expression_rnaseq(ncols=feature_subsample, scaling=scaling, use_landmark_genes=use_landmark_genes, preprocess_rnaseq=preprocess_rnaseq)
df = df.merge(self.df_cell_expr[['CELLNAME']], on='CELLNAME')
elif fea == 'expression_u133p2':
self.df_cell_expr = NCI60.load_cell_expression_u133p2(ncols=feature_subsample, scaling=scaling, use_landmark_genes=use_landmark_genes)
df = df.merge(self.df_cell_expr[['CELLNAME']], on='CELLNAME')
elif fea == 'expression_5platform':
self.df_cell_expr = NCI60.load_cell_expression_5platform(ncols=feature_subsample, scaling=scaling, use_landmark_genes=use_landmark_genes)
df = df.merge(self.df_cell_expr[['CELLNAME']], on='CELLNAME')
elif fea == 'mirna':
self.df_cell_mirna = NCI60.load_cell_mirna(ncols=feature_subsample, scaling=scaling)
df = df.merge(self.df_cell_mirna[['CELLNAME']], on='CELLNAME')
elif fea == 'proteome':
self.df_cell_prot = NCI60.load_cell_proteome(ncols=feature_subsample, scaling=scaling)
df = df.merge(self.df_cell_prot[['CELLNAME']], on='CELLNAME')
elif fea == 'categorical':
df_cell_ids = df[['CELLNAME']].drop_duplicates()
cell_ids = df_cell_ids['CELLNAME'].map(lambda x: x.replace(':', '.'))
df_cell_cat = pd.get_dummies(cell_ids)
df_cell_cat.index = df_cell_ids['CELLNAME']
self.df_cell_cat = df_cell_cat.reset_index()
for fea in self.drug_features:
if fea == 'descriptors':
self.df_drug_desc = NCI60.load_drug_descriptors(ncols=feature_subsample, scaling=scaling)
df = df[df['NSC1'].isin(self.df_drug_desc['NSC']) & df['NSC2'].isin(self.df_drug_desc['NSC'])]
elif fea == 'latent':
self.df_drug_auen = NCI60.load_drug_autoencoded_AG(ncols=feature_subsample, scaling=scaling)
df = df[df['NSC1'].isin(self.df_drug_auen['NSC']) & df['NSC2'].isin(self.df_drug_auen['NSC'])]
elif fea == 'categorical':
df_drug_ids = df[['NSC1']].drop_duplicates()
df_drug_ids.columns = ['NSC']
drug_ids = df_drug_ids['NSC']
df_drug_cat = pd.get_dummies(drug_ids)
df_drug_cat.index = df_drug_ids['NSC']
self.df_drug_cat = df_drug_cat.reset_index()
elif fea == 'noise':
ids1 = df[['NSC1']].drop_duplicates().rename(columns={'NSC1':'NSC'})
ids2 = df[['NSC2']].drop_duplicates().rename(columns={'NSC2':'NSC'})
df_drug_ids = pd.concat([ids1, ids2]).drop_duplicates()
noise = np.random.normal(size=(df_drug_ids.shape[0], 500))
df_rand = pd.DataFrame(noise, index=df_drug_ids['NSC'],
columns=['RAND-{:03d}'.format(x) for x in range(500)])
self.df_drug_rand = df_rand.reset_index()
logger.info('Filtered down to {} rows with matching information.'.format(df.shape[0]))
ids1 = df[['NSC1']].drop_duplicates().rename(columns={'NSC1':'NSC'})
ids2 = df[['NSC2']].drop_duplicates().rename(columns={'NSC2':'NSC'})
df_drug_ids = pd.concat([ids1, ids2]).drop_duplicates().reset_index(drop=True)
n_drugs = df_drug_ids.shape[0]
n_val_drugs = int(n_drugs * val_split)
n_train_drugs = n_drugs - n_val_drugs
logger.info('Unique cell lines: {}'.format(df['CELLNAME'].nunique()))
logger.info('Unique drugs: {}'.format(n_drugs))
if shuffle:
df = df.sample(frac=1.0, random_state=seed).reset_index(drop=True)
df_drug_ids = df_drug_ids.sample(frac=1.0, random_state=seed).reset_index(drop=True)
self.df_response = df
self.df_drug_ids = df_drug_ids
self.train_drug_ids = df_drug_ids['NSC'][:n_train_drugs]
self.val_drug_ids = df_drug_ids['NSC'][-n_val_drugs:]
if scramble:
growth = df[['GROWTH']]
random_growth = growth.iloc[np.random.permutation(np.arange(growth.shape[0]))].reset_index()
self.df_response[['GROWTH']] = random_growth['GROWTH']
logger.warn('Randomly shuffled dose response growth values.')
logger.info('Distribution of dose response:')
logger.info(self.df_response[['GROWTH']].describe())
self.total = df.shape[0]
self.n_val = int(self.total * val_split)
self.n_train = self.total - self.n_val
logger.info('Rows in train: {}, val: {}'.format(self.n_train, self.n_val))
self.cell_df_dict = {'expression': 'df_cell_expr',
'expression_5platform': 'df_cell_expr',
'expression_u133p2': 'df_cell_expr',
'rnaseq': 'df_cell_expr',
'mirna': 'df_cell_mirna',
'proteome': 'df_cell_prot',
'categorical': 'df_cell_cat'}
self.drug_df_dict = {'descriptors': 'df_drug_desc',
'latent': 'df_drug_auen',
'categorical': 'df_drug_cat',
'noise': 'df_drug_rand'}
self.input_features = collections.OrderedDict()
self.feature_shapes = {}
for fea in self.cell_features:
feature_type = 'cell.' + fea
feature_name = 'cell.' + fea
df_cell = getattr(self, self.cell_df_dict[fea])
self.input_features[feature_name] = feature_type
self.feature_shapes[feature_type] = (df_cell.shape[1] - 1,)
for drug in ['drug1', 'drug2']:
for fea in self.drug_features:
feature_type = 'drug.' + fea
feature_name = drug + '.' + fea
df_drug = getattr(self, self.drug_df_dict[fea])
self.input_features[feature_name] = feature_type
self.feature_shapes[feature_type] = (df_drug.shape[1] - 1,)
self.feature_shapes['dose'] = (1,)
for dose in ['dose1', 'dose2']:
self.input_features[dose] = 'dose'
logger.info('Input features shapes:')
for k, v in self.input_features.items():
logger.info(' {}: {}'.format(k, self.feature_shapes[v]))
self.input_dim = sum([np.prod(self.feature_shapes[x]) for x in self.input_features.values()])
logger.info('Total input dimensions: {}'.format(self.input_dim))
if cv > 1:
if cv_partition == 'disjoint':
pass
elif cv_partition == 'disjoint_cells':
y = self.df_response['GROWTH'].values
groups = self.df_response['CELLNAME'].values
gkf = GroupKFold(n_splits=cv)
splits = gkf.split(y, groups=groups)
self.cv_train_indexes = []
self.cv_val_indexes = []
for index, (train_index, val_index) in enumerate(splits):
print(index, train_index)
self.cv_train_indexes.append(train_index)
self.cv_val_indexes.append(val_index)
else:
y = self.df_response['GROWTH'].values
skf = StratifiedKFold(n_splits=cv, random_state=seed)
splits = skf.split(y, discretize(y, bins=cv))
self.cv_train_indexes = []
self.cv_val_indexes = []
for index, (train_index, val_index) in enumerate(splits):
print(index, train_index)
self.cv_train_indexes.append(train_index)
self.cv_val_indexes.append(val_index)
def load_data_all(self, switch_drugs=False):
df_all = self.df_response
y_all = df_all['GROWTH'].values
x_all_list = []
for fea in self.cell_features:
df_cell = getattr(self, self.cell_df_dict[fea])
df_x_all = pd.merge(df_all[['CELLNAME']], df_cell, on='CELLNAME', how='left')
x_all_list.append(df_x_all.drop(['CELLNAME'], axis=1).values)
drugs = ['NSC1', 'NSC2']
doses = ['pCONC1', 'pCONC2']
if switch_drugs:
drugs = ['NSC2', 'NSC1']
doses = ['pCONC2', 'pCONC1']
for drug in drugs:
for fea in self.drug_features:
df_drug = getattr(self, self.drug_df_dict[fea])
df_x_all = pd.merge(df_all[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
x_all_list.append(df_x_all.drop([drug, 'NSC'], axis=1).values)
for dose in doses:
x_all_list.append(df_all[dose].values)
return x_all_list, y_all, df_all
def load_data_by_index(self, train_index, val_index):
x_all_list, y_all, df_all = self.load_data_all()
x_train_list = [x[train_index] for x in x_all_list]
x_val_list = [x[val_index] for x in x_all_list]
y_train = y_all[train_index]
y_val = y_all[val_index]
df_train = df_all.iloc[train_index, :]
df_val = df_all.iloc[val_index, :]
if self.cv_partition == 'disjoint':
logger.info('Training drugs: {}'.format(set(df_train['NSC1'])))
logger.info('Validation drugs: {}'.format(set(df_val['NSC1'])))
elif self.cv_partition == 'disjoint_cells':
logger.info('Training cells: {}'.format(set(df_train['CELLNAME'])))
logger.info('Validation cells: {}'.format(set(df_val['CELLNAME'])))
return x_train_list, y_train, x_val_list, y_val, df_train, df_val
def load_data_cv(self, fold):
train_index = self.cv_train_indexes[fold]
val_index = self.cv_val_indexes[fold]
return self.load_data_by_index(train_index, val_index)
def load_data(self):
if self.cv_partition == 'disjoint':
train_index = self.df_response[(self.df_response['NSC1'].isin(self.train_drug_ids)) & (self.df_response['NSC2'].isin(self.train_drug_ids))].index
val_index = self.df_response[(self.df_response['NSC1'].isin(self.val_drug_ids)) & (self.df_response['NSC2'].isin(self.val_drug_ids))].index
else:
train_index = range(self.n_train)
val_index = range(self.n_train, self.total)
return self.load_data_by_index(train_index, val_index)
def load_data_old(self):
df_train = self.df_response.iloc[:self.n_train, :]
df_val = self.df_response.iloc[self.n_train:, :]
y_train = df_train['GROWTH'].values
y_val = df_val['GROWTH'].values
x_train_list = []
x_val_list = []
for fea in self.cell_features:
df_cell = getattr(self, self.cell_df_dict[fea])
df_x_train = pd.merge(df_train[['CELLNAME']], df_cell, on='CELLNAME', how='left')
df_x_val = pd.merge(df_val[['CELLNAME']], df_cell, on='CELLNAME', how='left')
x_train_list.append(df_x_train.drop(['CELLNAME'], axis=1).values)
x_val_list.append(df_x_val.drop(['CELLNAME'], axis=1).values)
for drug in ['NSC1', 'NSC2']:
for fea in self.drug_features:
df_drug = getattr(self, self.drug_df_dict[fea])
df_x_train = pd.merge(df_train[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
df_x_val = pd.merge(df_val[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
x_train_list.append(df_x_train.drop([drug, 'NSC'], axis=1).values)
x_val_list.append(df_x_val.drop([drug, 'NSC'], axis=1).values)
return x_train_list, y_train, x_val_list, y_val, df_train, df_val
class ComboDataGenerator(object):
def __init__(self, data, partition='train', batch_size=32):
self.lock = threading.Lock()
self.data = data
self.partition = partition
self.batch_size = batch_size
if partition == 'train':
self.cycle = cycle(range(data.n_train))
self.num_data = data.n_train
elif partition == 'val':
self.cycle = cycle(range(data.total)[-data.n_val:])
self.num_data = data.n_val
else:
raise Exception('Data partition "{}" not recognized.'.format(partition))
def flow(self):
while 1:
self.lock.acquire()
indices = list(islice(self.cycle, self.batch_size))
self.lock.release()
df = self.data.df_response.iloc[indices, :]
y = df['GROWTH'].values
x_list = []
for fea in self.data.cell_features:
df_cell = getattr(self.data, self.data.cell_df_dict[fea])
df_x = pd.merge(df[['CELLNAME']], df_cell, on='CELLNAME', how='left')
x_list.append(df_x.drop(['CELLNAME'], axis=1).values)
for drug in ['NSC1', 'NSC2']:
for fea in self.data.drug_features:
df_drug = getattr(self.data, self.data.drug_df_dict[fea])
df_x = pd.merge(df[[drug]], df_drug, left_on=drug, right_on='NSC', how='left')
x_list.append(df_x.drop([drug, 'NSC'], axis=1).values)
yield x_list, y
def test_generator(loader):
gen = ComboDataGenerator(loader).flow()
x_list, y = next(gen)
for x in x_list:
print(x.shape)
print(y.shape)
def test_loader(loader):
x_train_list, y_train, x_val_list, y_val = loader.load_data()
print('x_train shapes:')
for x in x_train_list:
print(x.shape)
print('y_train shape:', y_train.shape)
print('x_val shapes:')
for x in x_val_list:
print(x.shape)
print('y_val shape:', y_val.shape)
def r2(y_true, y_pred):
SS_res = K.sum(K.square(y_true - y_pred))
SS_tot = K.sum(K.square(y_true - K.mean(y_true)))
return (1 - SS_res/(SS_tot + K.epsilon()))
def mae(y_true, y_pred):
return keras.metrics.mean_absolute_error(y_true, y_pred)
def evaluate_prediction(y_true, y_pred):
mse = mean_squared_error(y_true, y_pred)
mae = mean_absolute_error(y_true, y_pred)
r2 = r2_score(y_true, y_pred)
corr, _ = pearsonr(y_true, y_pred)
return {'mse': mse, 'mae': mae, 'r2': r2, 'corr': corr}
def log_evaluation(metric_outputs, description='Comparing y_true and y_pred:'):
logger.info(description)
for metric, value in metric_outputs.items():
logger.info(' {}: {:.4f}'.format(metric, value))
def plot_history(out, history, metric='loss', title=None):
title = title or 'model {}'.format(metric)
val_metric = 'val_{}'.format(metric)
plt.figure(figsize=(8, 6))
plt.plot(history.history[metric], marker='o')
plt.plot(history.history[val_metric], marker='d')
plt.title(title)
plt.ylabel(metric)
plt.xlabel('epoch')
plt.legend(['train_{}'.format(metric), 'val_{}'.format(metric)], loc='upper center')
png = '{}.plot.{}.png'.format(out, metric)
plt.savefig(png, bbox_inches='tight')
class LoggingCallback(Callback):
def __init__(self, print_fcn=print):
Callback.__init__(self)
self.print_fcn = print_fcn
def on_epoch_end(self, epoch, logs={}):
msg = "[Epoch: %i] %s" % (epoch, ", ".join("%s: %f" % (k, v) for k, v in sorted(logs.items())))
self.print_fcn(msg)
class PermanentDropout(Dropout):
def __init__(self, rate, **kwargs):
super(PermanentDropout, self).__init__(rate, **kwargs)
self.uses_learning_phase = False
def call(self, x, mask=None):
if 0. < self.rate < 1.:
noise_shape = self._get_noise_shape(x)
x = K.dropout(x, self.rate, noise_shape)
return x
class ModelRecorder(Callback):
def __init__(self, save_all_models=False):
Callback.__init__(self)
self.save_all_models = save_all_models
get_custom_objects()['PermanentDropout'] = PermanentDropout
def on_train_begin(self, logs={}):
self.val_losses = []
self.best_val_loss = np.Inf
self.best_model = None
def on_epoch_end(self, epoch, logs={}):
val_loss = logs.get('val_loss')
self.val_losses.append(val_loss)
if val_loss < self.best_val_loss:
self.best_model = keras.models.clone_model(self.model)
self.best_val_loss = val_loss
def build_feature_model(input_shape, name='', dense_layers=[1000, 1000],
activation='relu', residual=False,
dropout_rate=0, permanent_dropout=True):
x_input = Input(shape=input_shape)
h = x_input
for i, layer in enumerate(dense_layers):
x = h
h = Dense(layer, activation=activation)(h)
if dropout_rate > 0:
if permanent_dropout:
h = PermanentDropout(dropout_rate)(h)
else:
h = Dropout(dropout_rate)(h)
if residual:
try:
h = keras.layers.add([h, x])
except ValueError:
pass
model = Model(x_input, h, name=name)
return model
def build_model(loader, args, verbose=False):
input_models = {}
dropout_rate = args.dropout
permanent_dropout = True
for fea_type, shape in loader.feature_shapes.items():
box = build_feature_model(input_shape=shape, name=fea_type,
dense_layers=args.dense_feature_layers,
dropout_rate=dropout_rate, permanent_dropout=permanent_dropout)
if verbose:
box.summary()
input_models[fea_type] = box
inputs = []
encoded_inputs = []
for fea_name, fea_type in loader.input_features.items():
shape = loader.feature_shapes[fea_type]
fea_input = Input(shape, name='input.'+fea_name)
inputs.append(fea_input)
input_model = input_models[fea_type]
encoded = input_model(fea_input)
encoded_inputs.append(encoded)
merged = keras.layers.concatenate(encoded_inputs)
h = merged
for i, layer in enumerate(args.dense):
x = h
h = Dense(layer, activation=args.activation)(h)
if dropout_rate > 0:
if permanent_dropout:
h = PermanentDropout(dropout_rate)(h)
else:
h = Dropout(dropout_rate)(h)
if args.residual:
try:
h = keras.layers.add([h, x])
except ValueError:
pass
output = Dense(1)(h)
return Model(inputs, output)
def get_combo_parser():
description = 'Build neural network based models to predict tumor response to drug pairs.'
parser = argparse.ArgumentParser(prog='combo_baseline', formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=description)
return combo.common_parser(parser)
desc = 'Build neural network based models to predict tumor response to drug pairs.')
gParameters = candle.finalize_parameters(comboBmk)
return gParameters
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def run(params):
args = Struct(**params)
set_seed(args.rng_seed)
ext = extension_from_parameters(args)
prefix = args.save + ext
logfile = args.logfile if args.logfile else prefix+'.log'
set_up_logger(logfile, args.verbose)
logger.info('Params: {}'.format(params))
loader = ComboDataLoader(seed=args.rng_seed,
val_split=args.validation_split,
cell_features=args.cell_features,
drug_features=args.drug_features,
response_url=args.response_url,
use_landmark_genes=args.use_landmark_genes,
preprocess_rnaseq=args.preprocess_rnaseq,
exclude_cells=args.exclude_cells,
exclude_drugs=args.exclude_drugs,
use_combo_score=args.use_combo_score,
cv_partition=args.cv_partition, cv=args.cv)
train_gen = ComboDataGenerator(loader, batch_size=args.batch_size).flow()
val_gen = ComboDataGenerator(loader, partition='val', batch_size=args.batch_size).flow()
train_steps = int(loader.n_train / args.batch_size)
val_steps = int(loader.n_val / args.batch_size)
model = build_model(loader, args, verbose=True)
model.summary()
if args.cp:
model_json = model.to_json()
with open(prefix+'.model.json', 'w') as f:
print(model_json, file=f)
def warmup_scheduler(epoch):
lr = args.learning_rate or base_lr * args.batch_size/100
if epoch <= 5:
K.set_value(model.optimizer.lr, (base_lr * (5-epoch) + lr * epoch) / 5)
logger.debug('Epoch {}: lr={}'.format(epoch, K.get_value(model.optimizer.lr)))
return K.get_value(model.optimizer.lr)
df_pred_list = []
cv_ext = ''
cv = args.cv if args.cv > 1 else 1
fold = 0
while fold < cv:
if args.cv > 1:
logger.info('Cross validation fold {}/{}:'.format(fold+1, cv))
cv_ext = '.cv{}'.format(fold+1)
model = build_model(loader, args)
optimizer = optimizers.deserialize({'class_name': args.optimizer, 'config': {}})
base_lr = args.base_lr or K.get_value(optimizer.lr)
if args.learning_rate:
K.set_value(optimizer.lr, args.learning_rate)
model.compile(loss=args.loss, optimizer=optimizer, metrics=[mae, r2])
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=5, min_lr=0.00001)
warmup_lr = LearningRateScheduler(warmup_scheduler)
checkpointer = ModelCheckpoint(prefix+cv_ext+'.weights.h5', save_best_only=True, save_weights_only=True)
tensorboard = TensorBoard(log_dir="tb/tb{}{}".format(ext, cv_ext))
history_logger = LoggingCallback(logger.debug)
model_recorder = ModelRecorder()
callbacks = [history_logger, model_recorder]
if args.reduce_lr:
callbacks.append(reduce_lr)
if args.warmup_lr:
callbacks.append(warmup_lr)
if args.cp:
callbacks.append(checkpointer)
if args.tb:
callbacks.append(tensorboard)
if args.gen:
history = model.fit_generator(train_gen, train_steps,
epochs=args.epochs,
callbacks=callbacks,
validation_data=val_gen, validation_steps=val_steps)
else:
if args.cv > 1:
x_train_list, y_train, x_val_list, y_val, df_train, df_val = loader.load_data_cv(fold)
else:
x_train_list, y_train, x_val_list, y_val, df_train, df_val = loader.load_data()
y_shuf = np.random.permutation(y_val)
log_evaluation(evaluate_prediction(y_val, y_shuf),
description='Between random pairs in y_val:')
history = model.fit(x_train_list, y_train,
batch_size=args.batch_size,
shuffle=args.shuffle,
epochs=args.epochs,
callbacks=callbacks,
validation_data=(x_val_list, y_val))
if args.cp:
model.load_weights(prefix+cv_ext+'.weights.h5')
if not args.gen:
y_val_pred = model.predict(x_val_list, batch_size=args.batch_size).flatten()
scores = evaluate_prediction(y_val, y_val_pred)
if args.cv > 1 and scores[args.loss] > args.max_val_loss:
logger.warn('Best val_loss {} is greater than {}; retrain the model...'.format(scores[args.loss], args.max_val_loss))
continue
else:
fold += 1
log_evaluation(scores)
df_val.is_copy = False
df_val['GROWTH_PRED'] = y_val_pred
df_val['GROWTH_ERROR'] = y_val_pred - y_val
df_pred_list.append(df_val)
if args.cp:
model_recorder.best_model.save(prefix+'.model.h5')
new_model = keras.models.load_model(prefix+'.model.h5')
new_model.load_weights(prefix+cv_ext+'.weights.h5')
new_pred = new_model.predict(x_val_list, batch_size=args.batch_size).flatten()
plot_history(prefix, history, 'loss')
plot_history(prefix, history, 'r2')
if K.backend() == 'tensorflow':
K.clear_session()
pred_fname = prefix + '.predicted.growth.tsv'
if args.use_combo_score:
pred_fname = prefix + '.predicted.score.tsv'
df_pred = pd.concat(df_pred_list)
df_pred.to_csv(pred_fname, sep='\t', index=False, float_format='%.4g')
logger.handlers = []
return history
def main():
params = initialize_parameters()
run(params)
if __name__ == '__main__':
main()
if K.backend() == 'tensorflow':
K.clear_session()
| true | true |
f720c9210c69a182402e3ddf9bc6f6f6a8920ba9 | 659 | py | Python | utils/check_callback.py | jrl-umi3218/mc_naoqi_dcm | 605d2c448bd1254466d7a1f7f7a7c595ef5d8398 | [
"BSD-2-Clause"
] | null | null | null | utils/check_callback.py | jrl-umi3218/mc_naoqi_dcm | 605d2c448bd1254466d7a1f7f7a7c595ef5d8398 | [
"BSD-2-Clause"
] | null | null | null | utils/check_callback.py | jrl-umi3218/mc_naoqi_dcm | 605d2c448bd1254466d7a1f7f7a7c595ef5d8398 | [
"BSD-2-Clause"
] | null | null | null | # Disactivate safety reflexes
# First, go to http://pepper.local/advanced/#/settings to enable the disactivation
import qi
import sys
# Connect to Naoqi session
session = qi.Session()
try:
session.connect("tcp://127.0.0.1:9559")
except RuntimeError:
print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
"Please check your script arguments. Run with -h option for help.")
sys.exit(1)
# Access the module
mcnaoqidcm_service = session.service("MCNAOqiDCM")
# Check if the callback is connected to DCM loop
print "Is callback connected to DCM: " + str(mcnaoqidcm_service.isPreProccessConnected())
| 31.380952 | 94 | 0.710167 |
Session()
try:
session.connect("tcp://127.0.0.1:9559")
except RuntimeError:
print ("Can't connect to Naoqi at ip \"" + args.ip + "\" on port " + str(args.port) +".\n"
"Please check your script arguments. Run with -h option for help.")
sys.exit(1)
# Access the module
mcnaoqidcm_service = session.service("MCNAOqiDCM")
# Check if the callback is connected to DCM loop
print "Is callback connected to DCM: " + str(mcnaoqidcm_service.isPreProccessConnected())
| false | true |
f720c93c4cd30b631b5aa4846951a414fc4befea | 8,078 | py | Python | nets/mobilenet/mobilenet_v2.py | Popcorn-sugar/Deep_v2 | 23c25f74e36016658558e690890499bc7fd2aeb2 | [
"MIT"
] | null | null | null | nets/mobilenet/mobilenet_v2.py | Popcorn-sugar/Deep_v2 | 23c25f74e36016658558e690890499bc7fd2aeb2 | [
"MIT"
] | null | null | null | nets/mobilenet/mobilenet_v2.py | Popcorn-sugar/Deep_v2 | 23c25f74e36016658558e690890499bc7fd2aeb2 | [
"MIT"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of Mobilenet V2.
Architecture: https://arxiv.org/abs/1801.04381
The base model gives 72.2% accuracy on ImageNet, with 300MMadds,
3.4 M parameters.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import functools
import tensorflow as tf
from nets.mobilenet import conv_blocks as ops
from nets.mobilenet import mobilenet as lib
import tf_slim as slim
op = lib.op
expand_input = ops.expand_input_by_factor
# pyformat: disable
# Architecture: https://arxiv.org/abs/1801.04381
V2_DEF = dict(
defaults={
# Note: these parameters of batch norm affect the architecture
# that's why they are here and not in training_scope.
(slim.batch_norm,): {'center': True, 'scale': True},
(slim.conv2d, slim.fully_connected, slim.separable_conv2d): {
'normalizer_fn': slim.batch_norm, 'activation_fn': tf.nn.relu6
},
(ops.expanded_conv,): {
'expansion_size': expand_input(6),
'split_expansion': 1,
'normalizer_fn': slim.batch_norm,
'residual': True
},
(slim.conv2d, slim.separable_conv2d): {'padding': 'SAME'}
},
spec=[
op(slim.conv2d, stride=2, num_outputs=32, kernel_size=[3, 3]),
op(ops.expanded_conv,
expansion_size=expand_input(1, divisible_by=1),
num_outputs=16),
op(ops.expanded_conv, stride=2, num_outputs=24),
op(ops.expanded_conv, stride=1, num_outputs=24),
op(ops.expanded_conv, stride=2, num_outputs=32),
op(ops.expanded_conv, stride=1, num_outputs=32),
op(ops.expanded_conv, stride=1, num_outputs=32),
op(ops.expanded_conv, stride=2, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=2, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=320),
op(slim.conv2d, stride=1, kernel_size=[1, 1], num_outputs=1280)
],
)
# pyformat: enable
@slim.add_arg_scope
def mobilenet(input_tensor,
num_classes=1001,
depth_multiplier=1.0,
scope='MobilenetV2',
conv_defs=None,
finegrain_classification_mode=False,
min_depth=None,
divisible_by=None,
activation_fn=None,
**kwargs):
"""Creates mobilenet V2 network.
Inference mode is created by default. To create training use training_scope
below.
with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
Args:
input_tensor: The input tensor
num_classes: number of classes
depth_multiplier: The multiplier applied to scale number of
channels in each layer. Note: this is called depth multiplier in the
paper but the name is kept for consistency with slim's model builder.
scope: Scope of the operator
conv_defs: Allows to override default conv def.
finegrain_classification_mode: When set to True, the model
will keep the last layer large even for small multipliers. Following
https://arxiv.org/abs/1801.04381
suggests that it improves performance for ImageNet-type of problems.
*Note* ignored if final_endpoint makes the builder exit earlier.
min_depth: If provided, will ensure that all layers will have that
many channels after application of depth multiplier.
divisible_by: If provided will ensure that all layers # channels
will be divisible by this number.
activation_fn: Activation function to use, defaults to tf.nn.relu6 if not
specified.
**kwargs: passed directly to mobilenet.mobilenet:
prediction_fn- what prediction function to use.
reuse-: whether to reuse variables (if reuse set to true, scope
must be given).
Returns:
logits/endpoints pair
Raises:
ValueError: On invalid arguments
"""
if conv_defs is None:
conv_defs = V2_DEF
if 'multiplier' in kwargs:
raise ValueError('mobilenetv2 doesn\'t support generic '
'multiplier parameter use "depth_multiplier" instead.')
if finegrain_classification_mode:
conv_defs = copy.deepcopy(conv_defs)
if depth_multiplier < 1:
conv_defs['spec'][-1].params['num_outputs'] /= depth_multiplier
if activation_fn:
conv_defs = copy.deepcopy(conv_defs)
defaults = conv_defs['defaults']
conv_defaults = (
defaults[(slim.conv2d, slim.fully_connected, slim.separable_conv2d)])
conv_defaults['activation_fn'] = activation_fn
depth_args = {}
# NB: do not set depth_args unless they are provided to avoid overriding
# whatever default depth_multiplier might have thanks to arg_scope.
if min_depth is not None:
depth_args['min_depth'] = min_depth
if divisible_by is not None:
depth_args['divisible_by'] = divisible_by
with slim.arg_scope((lib.depth_multiplier,), **depth_args):
return lib.mobilenet(
input_tensor,
num_classes=num_classes,
conv_defs=conv_defs,
scope=scope,
multiplier=depth_multiplier,
**kwargs)
mobilenet.default_image_size = 224
def wrapped_partial(func, *args, **kwargs):
partial_func = functools.partial(func, *args, **kwargs)
functools.update_wrapper(partial_func, func)
return partial_func
# Wrappers for mobilenet v2 with depth-multipliers. Be noticed that
# 'finegrain_classification_mode' is set to True, which means the embedding
# layer will not be shrinked when given a depth-multiplier < 1.0.
mobilenet_v2_140 = wrapped_partial(mobilenet, depth_multiplier=1.4)
mobilenet_v2_050 = wrapped_partial(mobilenet, depth_multiplier=0.50,
finegrain_classification_mode=True)
mobilenet_v2_035 = wrapped_partial(mobilenet, depth_multiplier=0.35,
finegrain_classification_mode=True)
@slim.add_arg_scope
def mobilenet_base(input_tensor, depth_multiplier=1.0, **kwargs):
"""Creates base of the mobilenet (no pooling and no logits) ."""
return mobilenet(input_tensor,
depth_multiplier=depth_multiplier,
base_only=True, **kwargs)
def training_scope(**kwargs):
"""Defines MobilenetV2 training scope.
Usage:
with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
with slim.
Args:
**kwargs: Passed to mobilenet.training_scope. The following parameters
are supported:
weight_decay- The weight decay to use for regularizing the model.
stddev- Standard deviation for initialization, if negative uses xavier.
dropout_keep_prob- dropout keep probability
bn_decay- decay for the batch norm moving averages.
Returns:
An `arg_scope` to use for the mobilenet v2 model.
"""
return lib.training_scope(**kwargs)
__all__ = ['training_scope', 'mobilenet_base', 'mobilenet', 'V2_DEF']
| 37.225806 | 80 | 0.694974 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import functools
import tensorflow as tf
from nets.mobilenet import conv_blocks as ops
from nets.mobilenet import mobilenet as lib
import tf_slim as slim
op = lib.op
expand_input = ops.expand_input_by_factor
V2_DEF = dict(
defaults={
(slim.batch_norm,): {'center': True, 'scale': True},
(slim.conv2d, slim.fully_connected, slim.separable_conv2d): {
'normalizer_fn': slim.batch_norm, 'activation_fn': tf.nn.relu6
},
(ops.expanded_conv,): {
'expansion_size': expand_input(6),
'split_expansion': 1,
'normalizer_fn': slim.batch_norm,
'residual': True
},
(slim.conv2d, slim.separable_conv2d): {'padding': 'SAME'}
},
spec=[
op(slim.conv2d, stride=2, num_outputs=32, kernel_size=[3, 3]),
op(ops.expanded_conv,
expansion_size=expand_input(1, divisible_by=1),
num_outputs=16),
op(ops.expanded_conv, stride=2, num_outputs=24),
op(ops.expanded_conv, stride=1, num_outputs=24),
op(ops.expanded_conv, stride=2, num_outputs=32),
op(ops.expanded_conv, stride=1, num_outputs=32),
op(ops.expanded_conv, stride=1, num_outputs=32),
op(ops.expanded_conv, stride=2, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=64),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=1, num_outputs=96),
op(ops.expanded_conv, stride=2, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=160),
op(ops.expanded_conv, stride=1, num_outputs=320),
op(slim.conv2d, stride=1, kernel_size=[1, 1], num_outputs=1280)
],
)
# pyformat: enable
@slim.add_arg_scope
def mobilenet(input_tensor,
num_classes=1001,
depth_multiplier=1.0,
scope='MobilenetV2',
conv_defs=None,
finegrain_classification_mode=False,
min_depth=None,
divisible_by=None,
activation_fn=None,
**kwargs):
if conv_defs is None:
conv_defs = V2_DEF
if 'multiplier' in kwargs:
raise ValueError('mobilenetv2 doesn\'t support generic '
'multiplier parameter use "depth_multiplier" instead.')
if finegrain_classification_mode:
conv_defs = copy.deepcopy(conv_defs)
if depth_multiplier < 1:
conv_defs['spec'][-1].params['num_outputs'] /= depth_multiplier
if activation_fn:
conv_defs = copy.deepcopy(conv_defs)
defaults = conv_defs['defaults']
conv_defaults = (
defaults[(slim.conv2d, slim.fully_connected, slim.separable_conv2d)])
conv_defaults['activation_fn'] = activation_fn
depth_args = {}
if min_depth is not None:
depth_args['min_depth'] = min_depth
if divisible_by is not None:
depth_args['divisible_by'] = divisible_by
with slim.arg_scope((lib.depth_multiplier,), **depth_args):
return lib.mobilenet(
input_tensor,
num_classes=num_classes,
conv_defs=conv_defs,
scope=scope,
multiplier=depth_multiplier,
**kwargs)
mobilenet.default_image_size = 224
def wrapped_partial(func, *args, **kwargs):
partial_func = functools.partial(func, *args, **kwargs)
functools.update_wrapper(partial_func, func)
return partial_func
mobilenet_v2_140 = wrapped_partial(mobilenet, depth_multiplier=1.4)
mobilenet_v2_050 = wrapped_partial(mobilenet, depth_multiplier=0.50,
finegrain_classification_mode=True)
mobilenet_v2_035 = wrapped_partial(mobilenet, depth_multiplier=0.35,
finegrain_classification_mode=True)
@slim.add_arg_scope
def mobilenet_base(input_tensor, depth_multiplier=1.0, **kwargs):
return mobilenet(input_tensor,
depth_multiplier=depth_multiplier,
base_only=True, **kwargs)
def training_scope(**kwargs):
return lib.training_scope(**kwargs)
__all__ = ['training_scope', 'mobilenet_base', 'mobilenet', 'V2_DEF']
| true | true |
f720cb08684941936d653c433957364d390d8967 | 3,246 | py | Python | mt/preprocess/1_process_raw.py | salvacarrion/nmt-continual-learning | 302147ac9c270f3341a68a72c803c457f05ff37b | [
"MIT"
] | 1 | 2021-05-26T11:35:09.000Z | 2021-05-26T11:35:09.000Z | mt/preprocess/1_process_raw.py | salvacarrion/nmt-continual-learning | 302147ac9c270f3341a68a72c803c457f05ff37b | [
"MIT"
] | 1 | 2021-05-26T11:36:24.000Z | 2021-05-26T11:36:24.000Z | mt/preprocess/1_process_raw.py | salvacarrion/nmt-continual-learning | 302147ac9c270f3341a68a72c803c457f05ff37b | [
"MIT"
] | null | null | null | import os
import pandas as pd
from pathlib import Path
import numpy as np
from mt import RAW_PATH
from mt import utils
SUFFLE = True
CONSTRAINED = True
TR_DATA_PATH = "/home/salva/Documents/Programming/Datasets/scielo/originals/scielo-gma/scielo-gma"
TR_RAW_FILES = ["es-en-gma-biological.csv", "es-en-gma-health.csv", "fr-en-gma-health.csv",
"pt-en-gma-biological.csv", "pt-en-gma-health.csv"]
TS_DATA_PATH = "/home/salva/Documents/Programming/Datasets/scielo/originals/testset-gma/testset_gma"
TS_RAW_FILES = ["test-gma-en2es-biological.csv", "test-gma-en2es-health.csv", "test-gma-en2fr-health.csv",
"test-gma-en2pt-biological.csv", "test-gma-en2pt-health.csv", "test-gma-es2en-biological.csv",
"test-gma-es2en-health.csv", "test-gma-fr2en-health.csv", "test-gma-pt2en-biological.csv",
"test-gma-pt2en-health.csv"]
# Create path if doesn't exists
path = Path(RAW_PATH)
path.mkdir(parents=True, exist_ok=True)
# Process splits train/test files
for split in ["train", "test"]:
# Select split to process
if split == "train":
print("Processing training files...")
DATA_PATH = TR_DATA_PATH
RAW_FILES = TR_RAW_FILES
istrain = True
elif split == "test":
print("Processing test files...")
DATA_PATH = TS_DATA_PATH
RAW_FILES = TS_RAW_FILES
istrain = False
else:
raise ValueError("Invalid split name")
# Process raw files
for fname in RAW_FILES:
# Read file
print(f"Reading file... ({fname})")
filename = os.path.join(DATA_PATH, fname)
df = pd.read_csv(filename)
# Limit dataset
domain = utils.get_domain(fname)
SRC_LANG, TRG_LANG = utils.get_langs(fname, istrain=istrain)
# Clean dataset (basic)
total_old = len(df)
df = utils.preprocess_dataset(df, src_col=SRC_LANG, trg_col=TRG_LANG)
# Shuffle dataset
if SUFFLE:
np.random.seed(123)
np.random.shuffle(df.values)
if CONSTRAINED and istrain:
if domain == "health" and "es" in {SRC_LANG, TRG_LANG}:
max_size = 123597 # Biological rows
print(f"Limiting size to {max_size}")
df = df[:max_size]
elif domain == "health" and "pt" in {SRC_LANG, TRG_LANG}:
max_size = 120301 # Biological rows
print(f"Limiting size to {max_size}")
df = df[:max_size]
# Stats
total_doctypes = df['doctype'].value_counts()
removed = total_old - len(df)
print(f"Stats for: {fname} **************************")
print(f"\t- Documents: {len(set(df['docid']))}")
print(f"\t- Sentences: {len(df)}")
print("\t\t- Removed: {} ({:.2f}%)".format(removed, removed / total_old * 100))
print("\t- Titles/Abstracts: {}/{} ({:.2f}%)".format(total_doctypes['title'], total_doctypes['text'],
total_doctypes['title'] / total_doctypes['text'] * 100))
# Save data
df.to_csv(os.path.join(RAW_PATH, fname), index=False)
print("File saved!")
print("")
print("Done!")
| 35.282609 | 117 | 0.594886 | import os
import pandas as pd
from pathlib import Path
import numpy as np
from mt import RAW_PATH
from mt import utils
SUFFLE = True
CONSTRAINED = True
TR_DATA_PATH = "/home/salva/Documents/Programming/Datasets/scielo/originals/scielo-gma/scielo-gma"
TR_RAW_FILES = ["es-en-gma-biological.csv", "es-en-gma-health.csv", "fr-en-gma-health.csv",
"pt-en-gma-biological.csv", "pt-en-gma-health.csv"]
TS_DATA_PATH = "/home/salva/Documents/Programming/Datasets/scielo/originals/testset-gma/testset_gma"
TS_RAW_FILES = ["test-gma-en2es-biological.csv", "test-gma-en2es-health.csv", "test-gma-en2fr-health.csv",
"test-gma-en2pt-biological.csv", "test-gma-en2pt-health.csv", "test-gma-es2en-biological.csv",
"test-gma-es2en-health.csv", "test-gma-fr2en-health.csv", "test-gma-pt2en-biological.csv",
"test-gma-pt2en-health.csv"]
path = Path(RAW_PATH)
path.mkdir(parents=True, exist_ok=True)
# Process splits train/test files
for split in ["train", "test"]:
# Select split to process
if split == "train":
print("Processing training files...")
DATA_PATH = TR_DATA_PATH
RAW_FILES = TR_RAW_FILES
istrain = True
elif split == "test":
print("Processing test files...")
DATA_PATH = TS_DATA_PATH
RAW_FILES = TS_RAW_FILES
istrain = False
else:
raise ValueError("Invalid split name")
# Process raw files
for fname in RAW_FILES:
# Read file
print(f"Reading file... ({fname})")
filename = os.path.join(DATA_PATH, fname)
df = pd.read_csv(filename)
# Limit dataset
domain = utils.get_domain(fname)
SRC_LANG, TRG_LANG = utils.get_langs(fname, istrain=istrain)
# Clean dataset (basic)
total_old = len(df)
df = utils.preprocess_dataset(df, src_col=SRC_LANG, trg_col=TRG_LANG)
# Shuffle dataset
if SUFFLE:
np.random.seed(123)
np.random.shuffle(df.values)
if CONSTRAINED and istrain:
if domain == "health" and "es" in {SRC_LANG, TRG_LANG}:
max_size = 123597 # Biological rows
print(f"Limiting size to {max_size}")
df = df[:max_size]
elif domain == "health" and "pt" in {SRC_LANG, TRG_LANG}:
max_size = 120301 # Biological rows
print(f"Limiting size to {max_size}")
df = df[:max_size]
# Stats
total_doctypes = df['doctype'].value_counts()
removed = total_old - len(df)
print(f"Stats for: {fname} **************************")
print(f"\t- Documents: {len(set(df['docid']))}")
print(f"\t- Sentences: {len(df)}")
print("\t\t- Removed: {} ({:.2f}%)".format(removed, removed / total_old * 100))
print("\t- Titles/Abstracts: {}/{} ({:.2f}%)".format(total_doctypes['title'], total_doctypes['text'],
total_doctypes['title'] / total_doctypes['text'] * 100))
# Save data
df.to_csv(os.path.join(RAW_PATH, fname), index=False)
print("File saved!")
print("")
print("Done!")
| true | true |
f720cbcab58f05b66ace66127442ad6b2998f33d | 2,069 | py | Python | botnet/modules/lib/cache.py | admdev8/botnet-2 | 2fd43237e628869eb34d8e7a6747da6d71c1192c | [
"MIT"
] | 69 | 2015-02-24T19:24:23.000Z | 2022-02-23T08:04:53.000Z | botnet/modules/lib/cache.py | admdev8/botnet-2 | 2fd43237e628869eb34d8e7a6747da6d71c1192c | [
"MIT"
] | 10 | 2017-06-28T21:08:29.000Z | 2022-01-26T07:46:02.000Z | botnet/modules/lib/cache.py | admdev8/botnet-2 | 2fd43237e628869eb34d8e7a6747da6d71c1192c | [
"MIT"
] | 39 | 2015-11-19T10:07:21.000Z | 2022-03-30T10:56:24.000Z | """
Contains cache implementations which can be used by the modules, for example
to cache results acquired from various online APIs.
"""
import datetime
import hashlib
def get_md5(string):
"""Returns a hash of a string."""
m = hashlib.md5()
m.update(string.encode('utf-8'))
return m.hexdigest()
class BaseCache(object):
"""Base cache class."""
def __init__(self, default_timeout=300):
self.default_timeout = default_timeout
def set(self, key, value, timeout=None):
"""Sets a value of a key. Returns True on sucess or False in case of
errors.
"""
return True
def get(self, key):
"""Returns a value of a key or None if a key does not exist."""
return None
class MemoryCache(BaseCache):
"""Simple cache. 100% thread unsafety guaranteed.
default_timeout: timeout used by the set method [seconds].
"""
def __init__(self, default_timeout=300):
super().__init__(default_timeout)
self._data = {}
def _prepare_key(self, key):
"""Prepares a key before using it."""
return get_md5(key)
def _clean(self):
"""Removes expired values."""
for key in self._data.copy().keys():
try:
expires, value = self._data[key]
if expires < datetime.datetime.now():
self._data.pop(key)
except KeyError:
pass
def set(self, key, value, timeout=None):
self._clean()
key = self._prepare_key(key)
if timeout is None:
timeout = self.default_timeout
expires = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
self._data[key] = (expires, value)
return True
def get(self, key):
try:
key = self._prepare_key(key)
expires, value = self._data[key]
if expires > datetime.datetime.now():
return value
else:
return None
except KeyError:
return None
| 26.87013 | 80 | 0.581924 |
import datetime
import hashlib
def get_md5(string):
m = hashlib.md5()
m.update(string.encode('utf-8'))
return m.hexdigest()
class BaseCache(object):
def __init__(self, default_timeout=300):
self.default_timeout = default_timeout
def set(self, key, value, timeout=None):
return True
def get(self, key):
return None
class MemoryCache(BaseCache):
def __init__(self, default_timeout=300):
super().__init__(default_timeout)
self._data = {}
def _prepare_key(self, key):
return get_md5(key)
def _clean(self):
for key in self._data.copy().keys():
try:
expires, value = self._data[key]
if expires < datetime.datetime.now():
self._data.pop(key)
except KeyError:
pass
def set(self, key, value, timeout=None):
self._clean()
key = self._prepare_key(key)
if timeout is None:
timeout = self.default_timeout
expires = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
self._data[key] = (expires, value)
return True
def get(self, key):
try:
key = self._prepare_key(key)
expires, value = self._data[key]
if expires > datetime.datetime.now():
return value
else:
return None
except KeyError:
return None
| true | true |
f720cc9a775ee8a5289c1096d9e20c36d79908d3 | 15,229 | py | Python | src/main.py | Steffuu/tgMensaBotDD | 04bca6ce839d5fb040e0e6232163f4343bcb85fb | [
"MIT"
] | null | null | null | src/main.py | Steffuu/tgMensaBotDD | 04bca6ce839d5fb040e0e6232163f4343bcb85fb | [
"MIT"
] | null | null | null | src/main.py | Steffuu/tgMensaBotDD | 04bca6ce839d5fb040e0e6232163f4343bcb85fb | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, InlineQueryHandler
import telegram as tg
import requests
import json
import os
import io
import time
import logging
from datetime import timedelta
import translate
import random
import praw
REDDIT_BOT_ID = os.environ['REDDIT_BOT_ID']
REDDIT_BOT_SECRET = os.environ['REDDIT_BOT_SECRET']
REDDIT_USER_AGENT = os.environ['REDDIT_USER_AGENT']
USER_AGENT_BROWSER = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36'
royalTitles = ["Lé", "Baron", "König", "Archlord", "Genius", "Ritter", "Curry", "Burger", "Mc", "Doktor", "Gentoomaster", "Chef", "Lead Developer"]
firstFrag = ["Schm", "J", "Hans-J", "K", "G", "Gr", "B", "Str", "Kr", "Rask"]
secondFrag = ["oerg", "öck", "öhhhrk", "öhrp", "egor", "oeg", "ock"]
thirdFrag = ["inger", "erino", "aroni", "us", "sell", "topus", "thulu", "tain", "rid", "odil", "ette", "nikov"]
nobleAnnex = ["I.", "II.", "III.", "Royale", "dem Allmächtigen", "dem Weisen", "dem hochgradig Intelligenten", "dem Unendlichen", "dem Allwissenden", "dem Gentoobändiger", "dem Meisterinformatiker"]
wisdoms = ["Linux ist voll doof!", "Ich stehe immer um 7.00 Uhr auf!", "Tut schön viel Frischkäse in die Nudelsoße!", "Mensen um 11.00 Uhr ist eine super Sache!", "Ich habe WinRar gekauft!", "Für einen längeren XP-Supportzeitraum!", "Fasst meinen Laptopbildschirm an!", "Natürlich code ich dieses Feature für euch, ganz ohne Pull Request!", "Maxime ist ein toller Papa!", "Hirtenkäsepizza ist die beste!", "Sauerkraut ist doch ekelhaft!", "Mein Lieblingsbrowser ist ja der Internet Explorer!", "Rechtschreibfehler in Kommentaren? Voll okay!", "Party? Warum nicht bei mir zu Hause?", "Irgendwas mit dynamisch Parameter injecten!", "Wie war das mit den Speisezeiten?", "Ich kaufe nur bei Nvidia!", "Wer braucht schon Open Source...", "KöckOS? Kommt noch diese Woche raus!", "Die besten Witze sind Deine-Mutter-Witze!", "Mein Lieblings-OS ist iOS!", "Ein Halloumiburger ist eine eigenständige Mahlzeit!", "Ich kaufe mir ein MacBook!", "Ich fange wieder mit Medieninformatik an!", "Ich liebe Ubuntu!", "Verschlüsselung ist doch Unsinn!", "Machen wir alle ne gemeinsame WG auf?"]
haes = ["HÄ?", "VALORANT?", "WIE", "WANN", "WO", "Geller muss erst noch zu Ende essen!", "???", "*Random Katzenbild*", "Erstmal Valorant!", "ICH HASSE EUCH ALLE", "HÄÄÄ", "ICH ARBEITE", "ICH HASSE DEN", "FUCK YOU", "WIRKLICH", "BITTE", "Natürlich ist das gelb!", "Es gibt Kuchen!", "Wir haben wieder viel zu viel Lasagne!", "Oke", "WAS", "WAS MEINST DU", "WAS WILLST DU DENN JETZT SCHON WIEDER", "Alter", "Wirst schon sehen", "Denk nach du Schwamm", "Stop", "NICHT COOL", "TROLL NICHT RUM", "Uff", "AAAAARGH", "Kann den jemand kicken?", "DU HAST NUR ANGST VOR MIR", "EKELHAFT", "ICH HASSE ALLES", "WOFÜR", "ICH BIN IMMER SO", "KUCHEN", "LASAGNE", "SCHANDE", "WARUM ICH", "ICH LIEBE ARBEITEN", "ICH HASSE UNPÜNKTLICHKEIT", "IDIOT", "HEY", "WO SEID IHR", "WAS SONST", "KIBA", "HAHA", "VERSTEHT IHR DAS NICHT", "SEID IHR DUMM ODER WAS", "WTF", "RED DEUTSCH MIT MIR", "OMG", "LOL", ":)", "MIR IST LANGWEILIG", "ALS OB IHR ALLE SCHON SCHLAFT", "HALLO", "WEIß ICH NICHT", "WER DENKT SICH DAS AUS", "ICH SPRING LIEBER AUS DEM FENSTER", "NE"]
class NotifyUserException(Exception):
"""Raised whenever an error needs to be propagated to the user"""
pass
def start(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Reichenbach is never an option!")
def echoText(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=update.message.text)
def echoSticker(update, context):
sticker = update.message.sticker
context.bot.send_sticker(chat_id=update.message.chat_id, sticker=sticker)
def mensa(update, context):
params = context.args
if len(params) < 1:
daysToAdd = 0
else:
try:
daysToAdd = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be an integer value. Aborting.")
return
day = update.message.date.date() + timedelta(days=daysToAdd)
url = "https://openmensa.org/api/v2/canteens/79/days/" + day.strftime("%Y-%m-%d") + "/meals"
resp = requests.get(url)
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="I failed miserably. Disgrace!")
return
jsonData = json.loads(resp.content)
for elem in jsonData:
mealNotes = elem["notes"]
if "vegetarisch" in mealNotes or "vegan" in mealNotes:
context.bot.send_message(chat_id=update.message.chat_id, text="*" + elem["name"] + "*", parse_mode="Markdown")
else:
context.bot.send_message(chat_id=update.message.chat_id, text="_" + elem["name"] + "_", parse_mode="Markdown")
def andre(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Höhöhö Reichenbach!")
def leon(update, context):
joke = dadJoke()
context.bot.send_message(chat_id=update.message.chat_id, text=joke)
def loen(update, context):
joke = dadJoke()
translator = translate.Translator(from_lang='en', to_lang='de')
translatedJoke = translator.translate(joke)
context.bot.send_message(chat_id=update.message.chat_id, text=translatedJoke)
def dadJoke():
headers = {'Accept': 'text/plain '}
resp = requests.get("https://icanhazdadjoke.com/", headers=headers)
if not resp.ok:
return "I failed miserably. Disgrace!"
return resp.text
def georg(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="https://wiki.archlinux.org/index.php/Installation_guide")
def maxime(update, context):
context.bot.send_sticker(chat_id=update.message.chat_id, sticker="CAADBQADfAMAAukKyAPfAAFRgAuYdNoWBA")
def andrey(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="11.00 Bois. Yeef!")
def steffuu(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(haes))
def getXkcd(id, rand):
resp = requests.get("https://xkcd.com/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
upperLimit = jsonData["num"]
if rand:
id = random.randint(1, upperLimit)
elif id > upperLimit:
raise NotifyUserException("Id not in range. Maximum id currently is " + str(upperLimit) + ".")
resp = requests.get("https://xkcd.com/" + str(id) + "/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
return (id, jsonData["img"], jsonData["title"])
def xkcd(update, context):
params = context.args
rand = False
id = 0
if len(params) < 1:
rand = True
else:
try:
id = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
if id < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
try:
xkcd = getXkcd(id, rand)
except NotifyUserException as error:
context.bot.send_message(chat_id=update.message.chat_id, text=str(error))
return
context.bot.send_photo(chat_id=update.message.chat_id, photo=xkcd[1], caption=str(xkcd[0]) + " - " + xkcd[2])
def decision(update, context):
headers = {'Accept': 'text/plain '}
resp = requests.get("https://yesno.wtf/api/", headers=headers)
if not resp.ok:
raise NotifyUserException("oof")
data = json.loads(resp.text)
context.bot.send_animation(chat_id=update.message.chat_id, animation=data["image"], caption=data["answer"])
def subredditImg(subreddit, offset=0, count=5):
imageFileEndings = [".png", ".jpg", ".jpeg", ".webp", ".gif"]
reddit = praw.Reddit(client_id=REDDIT_BOT_ID, client_secret=REDDIT_BOT_SECRET, user_agent=REDDIT_USER_AGENT)
images = []
for post in reddit.subreddit(subreddit).hot(limit=count):
for ending in imageFileEndings:
if str(post.url).endswith(ending):
images.append(post.url)
return images
def r(update, context):
params = context.args
offset = 0
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first parameter has to be a string identifying the requested subreddit. Aborting.")
return
subreddit = params[0]
if len(params) > 1:
try:
offset = int(params[1])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
if offset < 0:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
try:
images = subredditImg(subreddit)
except Exception:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
if len(images) == 0:
context.bot.send_message(chat_id=update.message.chat_id, text="There are no images in the top 5 posts.")
return
for image in images:
context.bot.send_photo(chat_id=update.message.chat_id, photo=image)
def cat(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thiscatdoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def horse(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thishorsedoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def person(update, context):
resp = requests.get("https://thispersondoesnotexist.com/image?time=" + str(time.time()) + str(random.randint(1, 1024)), headers={'User-Agent': 'USER_AGENT_BROWSER'})
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
with io.BytesIO(resp.content) as buf:
context.bot.send_photo(chat_id=update.message.chat_id, photo=buf)
def wisdom(update, context):
wisdom = createWisdomString()
context.bot.send_message(chat_id=update.message.chat_id, text=wisdom)
def createWisdomString():
optionalNoble = None
optionalThird = None
optionalAnnex = None
if bool(random.getrandbits(1)):
optionalNoble = random.choice(royalTitles)
if bool(random.getrandbits(1)):
optionalThird = random.choice(thirdFrag)
if bool(random.getrandbits(1)):
optionalAnnex = random.choice(nobleAnnex)
mainBody = random.choice(firstFrag) + random.choice(secondFrag)
output = "Die heutige Weisheit von "
if optionalNoble:
output += optionalNoble + " " + mainBody
else:
output += mainBody
if optionalThird:
output += optionalThird
if optionalAnnex:
output += " " + optionalAnnex
output += ": " + random.choice(wisdoms)
return output
def choose(update, context):
params = context.args
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="You know, I can't choose if there is nothing to choose from. Wise words!")
return
elif len(params) == 1:
context.bot.send_message(chat_id=update.message.chat_id, text="How the hell am I supposed to choose when only value is entered? Gosh.")
return
else:
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(params) + " shall be my answer!")
def inlineR(update, context):
query = update.inline_query.query
results = []
try:
images = subredditImg(query, count=40)
except Exception:
results.append(tg.InlineQueryResultArticle(0, "No", tg.InputTextMessageContent("No!")))
else:
if len(images) == 0:
results.append(tg.InlineQueryResultArticle(0, "No", "No!", ))
else:
for img in images:
results.append(tg.InlineQueryResultPhoto(img, img, img))
finally:
update.inline_query.answer(results)
def main():
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
API_TOKEN = os.environ['TELEGRAM_APITOKEN']
APP_ADDR = os.environ['APP_ADDRESS']
PORT = int(os.environ.get('PORT', '8443'))
updater = Updater(token=API_TOKEN, use_context=True)
startHandler = CommandHandler('start', start)
updater.dispatcher.add_handler(startHandler)
mensaHandler = CommandHandler('mensa', mensa)
updater.dispatcher.add_handler(mensaHandler)
andreHandler = CommandHandler('andre', andre)
updater.dispatcher.add_handler(andreHandler)
leonHandler = CommandHandler('leon', leon)
updater.dispatcher.add_handler(leonHandler)
georgHandler = CommandHandler('georg', georg)
updater.dispatcher.add_handler(georgHandler)
loenHandler = CommandHandler('loen', loen)
updater.dispatcher.add_handler(loenHandler)
maximeHandler = CommandHandler('maxime', maxime)
updater.dispatcher.add_handler(maximeHandler)
andreyHandler = CommandHandler('andrey', andrey)
updater.dispatcher.add_handler(andreyHandler)
steffuuHandler = CommandHandler('steffuu', steffuu)
updater.dispatcher.add_handler(steffuuHandler)
xkcdHandler = CommandHandler('xkcd', xkcd)
updater.dispatcher.add_handler(xkcdHandler)
decisionHandler = CommandHandler('decision', decision)
updater.dispatcher.add_handler(decisionHandler)
redditImgHandler = CommandHandler('r', r)
updater.dispatcher.add_handler(redditImgHandler)
echoHandlerText = MessageHandler(Filters.text, echoText)
updater.dispatcher.add_handler(echoHandlerText)
echoHandlerSticker = MessageHandler(Filters.sticker, echoSticker)
updater.dispatcher.add_handler(echoHandlerSticker)
catHandler = CommandHandler('cat', cat)
updater.dispatcher.add_handler(catHandler)
horseHandler = CommandHandler('horse', horse)
updater.dispatcher.add_handler(horseHandler)
personHandler = CommandHandler('person', person)
updater.dispatcher.add_handler(personHandler)
wisdomHandler = CommandHandler('wisdom', wisdom)
updater.dispatcher.add_handler(wisdomHandler)
chooseHandler = CommandHandler('choose', choose)
updater.dispatcher.add_handler(chooseHandler)
inlineRedditHandler = InlineQueryHandler(inlineR)
updater.dispatcher.add_handler(inlineRedditHandler)
updater.start_webhook(listen="0.0.0.0", port=PORT, url_path=API_TOKEN)
updater.bot.set_webhook(APP_ADDR + API_TOKEN)
updater.idle()
if __name__ == "__main__":
main()
| 40.395225 | 1,074 | 0.690919 |
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, InlineQueryHandler
import telegram as tg
import requests
import json
import os
import io
import time
import logging
from datetime import timedelta
import translate
import random
import praw
REDDIT_BOT_ID = os.environ['REDDIT_BOT_ID']
REDDIT_BOT_SECRET = os.environ['REDDIT_BOT_SECRET']
REDDIT_USER_AGENT = os.environ['REDDIT_USER_AGENT']
USER_AGENT_BROWSER = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36'
royalTitles = ["Lé", "Baron", "König", "Archlord", "Genius", "Ritter", "Curry", "Burger", "Mc", "Doktor", "Gentoomaster", "Chef", "Lead Developer"]
firstFrag = ["Schm", "J", "Hans-J", "K", "G", "Gr", "B", "Str", "Kr", "Rask"]
secondFrag = ["oerg", "öck", "öhhhrk", "öhrp", "egor", "oeg", "ock"]
thirdFrag = ["inger", "erino", "aroni", "us", "sell", "topus", "thulu", "tain", "rid", "odil", "ette", "nikov"]
nobleAnnex = ["I.", "II.", "III.", "Royale", "dem Allmächtigen", "dem Weisen", "dem hochgradig Intelligenten", "dem Unendlichen", "dem Allwissenden", "dem Gentoobändiger", "dem Meisterinformatiker"]
wisdoms = ["Linux ist voll doof!", "Ich stehe immer um 7.00 Uhr auf!", "Tut schön viel Frischkäse in die Nudelsoße!", "Mensen um 11.00 Uhr ist eine super Sache!", "Ich habe WinRar gekauft!", "Für einen längeren XP-Supportzeitraum!", "Fasst meinen Laptopbildschirm an!", "Natürlich code ich dieses Feature für euch, ganz ohne Pull Request!", "Maxime ist ein toller Papa!", "Hirtenkäsepizza ist die beste!", "Sauerkraut ist doch ekelhaft!", "Mein Lieblingsbrowser ist ja der Internet Explorer!", "Rechtschreibfehler in Kommentaren? Voll okay!", "Party? Warum nicht bei mir zu Hause?", "Irgendwas mit dynamisch Parameter injecten!", "Wie war das mit den Speisezeiten?", "Ich kaufe nur bei Nvidia!", "Wer braucht schon Open Source...", "KöckOS? Kommt noch diese Woche raus!", "Die besten Witze sind Deine-Mutter-Witze!", "Mein Lieblings-OS ist iOS!", "Ein Halloumiburger ist eine eigenständige Mahlzeit!", "Ich kaufe mir ein MacBook!", "Ich fange wieder mit Medieninformatik an!", "Ich liebe Ubuntu!", "Verschlüsselung ist doch Unsinn!", "Machen wir alle ne gemeinsame WG auf?"]
haes = ["HÄ?", "VALORANT?", "WIE", "WANN", "WO", "Geller muss erst noch zu Ende essen!", "???", "*Random Katzenbild*", "Erstmal Valorant!", "ICH HASSE EUCH ALLE", "HÄÄÄ", "ICH ARBEITE", "ICH HASSE DEN", "FUCK YOU", "WIRKLICH", "BITTE", "Natürlich ist das gelb!", "Es gibt Kuchen!", "Wir haben wieder viel zu viel Lasagne!", "Oke", "WAS", "WAS MEINST DU", "WAS WILLST DU DENN JETZT SCHON WIEDER", "Alter", "Wirst schon sehen", "Denk nach du Schwamm", "Stop", "NICHT COOL", "TROLL NICHT RUM", "Uff", "AAAAARGH", "Kann den jemand kicken?", "DU HAST NUR ANGST VOR MIR", "EKELHAFT", "ICH HASSE ALLES", "WOFÜR", "ICH BIN IMMER SO", "KUCHEN", "LASAGNE", "SCHANDE", "WARUM ICH", "ICH LIEBE ARBEITEN", "ICH HASSE UNPÜNKTLICHKEIT", "IDIOT", "HEY", "WO SEID IHR", "WAS SONST", "KIBA", "HAHA", "VERSTEHT IHR DAS NICHT", "SEID IHR DUMM ODER WAS", "WTF", "RED DEUTSCH MIT MIR", "OMG", "LOL", ":)", "MIR IST LANGWEILIG", "ALS OB IHR ALLE SCHON SCHLAFT", "HALLO", "WEIß ICH NICHT", "WER DENKT SICH DAS AUS", "ICH SPRING LIEBER AUS DEM FENSTER", "NE"]
class NotifyUserException(Exception):
pass
def start(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Reichenbach is never an option!")
def echoText(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=update.message.text)
def echoSticker(update, context):
sticker = update.message.sticker
context.bot.send_sticker(chat_id=update.message.chat_id, sticker=sticker)
def mensa(update, context):
params = context.args
if len(params) < 1:
daysToAdd = 0
else:
try:
daysToAdd = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be an integer value. Aborting.")
return
day = update.message.date.date() + timedelta(days=daysToAdd)
url = "https://openmensa.org/api/v2/canteens/79/days/" + day.strftime("%Y-%m-%d") + "/meals"
resp = requests.get(url)
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="I failed miserably. Disgrace!")
return
jsonData = json.loads(resp.content)
for elem in jsonData:
mealNotes = elem["notes"]
if "vegetarisch" in mealNotes or "vegan" in mealNotes:
context.bot.send_message(chat_id=update.message.chat_id, text="*" + elem["name"] + "*", parse_mode="Markdown")
else:
context.bot.send_message(chat_id=update.message.chat_id, text="_" + elem["name"] + "_", parse_mode="Markdown")
def andre(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Höhöhö Reichenbach!")
def leon(update, context):
joke = dadJoke()
context.bot.send_message(chat_id=update.message.chat_id, text=joke)
def loen(update, context):
joke = dadJoke()
translator = translate.Translator(from_lang='en', to_lang='de')
translatedJoke = translator.translate(joke)
context.bot.send_message(chat_id=update.message.chat_id, text=translatedJoke)
def dadJoke():
headers = {'Accept': 'text/plain '}
resp = requests.get("https://icanhazdadjoke.com/", headers=headers)
if not resp.ok:
return "I failed miserably. Disgrace!"
return resp.text
def georg(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="https://wiki.archlinux.org/index.php/Installation_guide")
def maxime(update, context):
context.bot.send_sticker(chat_id=update.message.chat_id, sticker="CAADBQADfAMAAukKyAPfAAFRgAuYdNoWBA")
def andrey(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="11.00 Bois. Yeef!")
def steffuu(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(haes))
def getXkcd(id, rand):
resp = requests.get("https://xkcd.com/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
upperLimit = jsonData["num"]
if rand:
id = random.randint(1, upperLimit)
elif id > upperLimit:
raise NotifyUserException("Id not in range. Maximum id currently is " + str(upperLimit) + ".")
resp = requests.get("https://xkcd.com/" + str(id) + "/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
return (id, jsonData["img"], jsonData["title"])
def xkcd(update, context):
params = context.args
rand = False
id = 0
if len(params) < 1:
rand = True
else:
try:
id = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
if id < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
try:
xkcd = getXkcd(id, rand)
except NotifyUserException as error:
context.bot.send_message(chat_id=update.message.chat_id, text=str(error))
return
context.bot.send_photo(chat_id=update.message.chat_id, photo=xkcd[1], caption=str(xkcd[0]) + " - " + xkcd[2])
def decision(update, context):
headers = {'Accept': 'text/plain '}
resp = requests.get("https://yesno.wtf/api/", headers=headers)
if not resp.ok:
raise NotifyUserException("oof")
data = json.loads(resp.text)
context.bot.send_animation(chat_id=update.message.chat_id, animation=data["image"], caption=data["answer"])
def subredditImg(subreddit, offset=0, count=5):
imageFileEndings = [".png", ".jpg", ".jpeg", ".webp", ".gif"]
reddit = praw.Reddit(client_id=REDDIT_BOT_ID, client_secret=REDDIT_BOT_SECRET, user_agent=REDDIT_USER_AGENT)
images = []
for post in reddit.subreddit(subreddit).hot(limit=count):
for ending in imageFileEndings:
if str(post.url).endswith(ending):
images.append(post.url)
return images
def r(update, context):
params = context.args
offset = 0
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first parameter has to be a string identifying the requested subreddit. Aborting.")
return
subreddit = params[0]
if len(params) > 1:
try:
offset = int(params[1])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
if offset < 0:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
try:
images = subredditImg(subreddit)
except Exception:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
if len(images) == 0:
context.bot.send_message(chat_id=update.message.chat_id, text="There are no images in the top 5 posts.")
return
for image in images:
context.bot.send_photo(chat_id=update.message.chat_id, photo=image)
def cat(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thiscatdoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def horse(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thishorsedoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def person(update, context):
resp = requests.get("https://thispersondoesnotexist.com/image?time=" + str(time.time()) + str(random.randint(1, 1024)), headers={'User-Agent': 'USER_AGENT_BROWSER'})
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
with io.BytesIO(resp.content) as buf:
context.bot.send_photo(chat_id=update.message.chat_id, photo=buf)
def wisdom(update, context):
wisdom = createWisdomString()
context.bot.send_message(chat_id=update.message.chat_id, text=wisdom)
def createWisdomString():
optionalNoble = None
optionalThird = None
optionalAnnex = None
if bool(random.getrandbits(1)):
optionalNoble = random.choice(royalTitles)
if bool(random.getrandbits(1)):
optionalThird = random.choice(thirdFrag)
if bool(random.getrandbits(1)):
optionalAnnex = random.choice(nobleAnnex)
mainBody = random.choice(firstFrag) + random.choice(secondFrag)
output = "Die heutige Weisheit von "
if optionalNoble:
output += optionalNoble + " " + mainBody
else:
output += mainBody
if optionalThird:
output += optionalThird
if optionalAnnex:
output += " " + optionalAnnex
output += ": " + random.choice(wisdoms)
return output
def choose(update, context):
params = context.args
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="You know, I can't choose if there is nothing to choose from. Wise words!")
return
elif len(params) == 1:
context.bot.send_message(chat_id=update.message.chat_id, text="How the hell am I supposed to choose when only value is entered? Gosh.")
return
else:
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(params) + " shall be my answer!")
def inlineR(update, context):
query = update.inline_query.query
results = []
try:
images = subredditImg(query, count=40)
except Exception:
results.append(tg.InlineQueryResultArticle(0, "No", tg.InputTextMessageContent("No!")))
else:
if len(images) == 0:
results.append(tg.InlineQueryResultArticle(0, "No", "No!", ))
else:
for img in images:
results.append(tg.InlineQueryResultPhoto(img, img, img))
finally:
update.inline_query.answer(results)
def main():
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
API_TOKEN = os.environ['TELEGRAM_APITOKEN']
APP_ADDR = os.environ['APP_ADDRESS']
PORT = int(os.environ.get('PORT', '8443'))
updater = Updater(token=API_TOKEN, use_context=True)
startHandler = CommandHandler('start', start)
updater.dispatcher.add_handler(startHandler)
mensaHandler = CommandHandler('mensa', mensa)
updater.dispatcher.add_handler(mensaHandler)
andreHandler = CommandHandler('andre', andre)
updater.dispatcher.add_handler(andreHandler)
leonHandler = CommandHandler('leon', leon)
updater.dispatcher.add_handler(leonHandler)
georgHandler = CommandHandler('georg', georg)
updater.dispatcher.add_handler(georgHandler)
loenHandler = CommandHandler('loen', loen)
updater.dispatcher.add_handler(loenHandler)
maximeHandler = CommandHandler('maxime', maxime)
updater.dispatcher.add_handler(maximeHandler)
andreyHandler = CommandHandler('andrey', andrey)
updater.dispatcher.add_handler(andreyHandler)
steffuuHandler = CommandHandler('steffuu', steffuu)
updater.dispatcher.add_handler(steffuuHandler)
xkcdHandler = CommandHandler('xkcd', xkcd)
updater.dispatcher.add_handler(xkcdHandler)
decisionHandler = CommandHandler('decision', decision)
updater.dispatcher.add_handler(decisionHandler)
redditImgHandler = CommandHandler('r', r)
updater.dispatcher.add_handler(redditImgHandler)
echoHandlerText = MessageHandler(Filters.text, echoText)
updater.dispatcher.add_handler(echoHandlerText)
echoHandlerSticker = MessageHandler(Filters.sticker, echoSticker)
updater.dispatcher.add_handler(echoHandlerSticker)
catHandler = CommandHandler('cat', cat)
updater.dispatcher.add_handler(catHandler)
horseHandler = CommandHandler('horse', horse)
updater.dispatcher.add_handler(horseHandler)
personHandler = CommandHandler('person', person)
updater.dispatcher.add_handler(personHandler)
wisdomHandler = CommandHandler('wisdom', wisdom)
updater.dispatcher.add_handler(wisdomHandler)
chooseHandler = CommandHandler('choose', choose)
updater.dispatcher.add_handler(chooseHandler)
inlineRedditHandler = InlineQueryHandler(inlineR)
updater.dispatcher.add_handler(inlineRedditHandler)
updater.start_webhook(listen="0.0.0.0", port=PORT, url_path=API_TOKEN)
updater.bot.set_webhook(APP_ADDR + API_TOKEN)
updater.idle()
if __name__ == "__main__":
main()
| true | true |
f720ccd4ee2f6948386979975d4872da8241f475 | 232 | py | Python | handroll/i18n.py | mblayman/handroll | 42703cf5c969dccd0eb0715402ab84056ab65e22 | [
"BSD-2-Clause"
] | null | null | null | handroll/i18n.py | mblayman/handroll | 42703cf5c969dccd0eb0715402ab84056ab65e22 | [
"BSD-2-Clause"
] | null | null | null | handroll/i18n.py | mblayman/handroll | 42703cf5c969dccd0eb0715402ab84056ab65e22 | [
"BSD-2-Clause"
] | null | null | null | # Copyright (c) 2014, Matt Layman
import gettext
import os
localedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale')
translate = gettext.translation('handroll', localedir, fallback=True)
_ = translate.gettext
| 25.777778 | 78 | 0.762931 |
import gettext
import os
localedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale')
translate = gettext.translation('handroll', localedir, fallback=True)
_ = translate.gettext
| true | true |
f720cf1b4711518700b108a7d64fb57a175679e5 | 18,297 | py | Python | neutron/tests/functional/plugins/ml2/drivers/ovn/mech_driver/test_mech_driver.py | huiweics/neutron | 8c7ca776d8cbe967a8bbe773ab38c361414a7068 | [
"Apache-2.0"
] | null | null | null | neutron/tests/functional/plugins/ml2/drivers/ovn/mech_driver/test_mech_driver.py | huiweics/neutron | 8c7ca776d8cbe967a8bbe773ab38c361414a7068 | [
"Apache-2.0"
] | null | null | null | neutron/tests/functional/plugins/ml2/drivers/ovn/mech_driver/test_mech_driver.py | huiweics/neutron | 8c7ca776d8cbe967a8bbe773ab38c361414a7068 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils
from neutron.common import utils as n_utils
from neutron.db import ovn_revision_numbers_db as db_rev
from neutron.tests.functional import base
class TestPortBinding(base.TestOVNFunctionalBase):
def setUp(self):
super(TestPortBinding, self).setUp()
self.ovs_host = 'ovs-host'
self.dpdk_host = 'dpdk-host'
self.invalid_dpdk_host = 'invalid-host'
self.vhu_mode = 'server'
self.add_fake_chassis(self.ovs_host)
self.add_fake_chassis(
self.dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,dpdkvhostuser'})
self.add_fake_chassis(
self.invalid_dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,geneve,vxlan'})
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.deserialize(self.fmt, res)
def _create_or_update_port(self, port_id=None, hostname=None):
if port_id is None:
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if hostname:
port_data['port']['device_id'] = uuidutils.generate_uuid()
port_data['port']['device_owner'] = 'compute:None'
port_data['port']['binding:host_id'] = hostname
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
port_id = p['port']['id']
else:
port_data = {
'port': {'device_id': uuidutils.generate_uuid(),
'device_owner': 'compute:None',
'binding:host_id': hostname}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.deserialize(self.fmt, port_res)
return port_id
def _verify_vif_details(self, port_id, expected_host_name,
expected_vif_type, expected_vif_details):
port_req = self.new_show_request('ports', port_id)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
self.assertEqual(expected_host_name, p['port']['binding:host_id'])
self.assertEqual(expected_vif_type, p['port']['binding:vif_type'])
self.assertEqual(expected_vif_details,
p['port']['binding:vif_details'])
def test_port_binding_create_port(self):
port_id = self._create_or_update_port(hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
def test_port_binding_update_port(self):
port_id = self._create_or_update_port()
self._verify_vif_details(port_id, '', 'unbound', {})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
class TestPortBindingOverTcp(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'tcp'
# TODO(mjozefcz): This test class hangs during execution.
class TestPortBindingOverSsl(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'ssl'
class TestNetworkMTUUpdate(base.TestOVNFunctionalBase):
def setUp(self):
super(TestNetworkMTUUpdate, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def test_update_network_mtu(self):
mtu_value = self.n1['network']['mtu'] - 100
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertNotEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
def test_no_update_network_mtu(self):
mtu_value = self.n1['network']['mtu']
base_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
second_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
self.assertEqual(
base_revision.updated_at,
second_revision.updated_at)
@mock.patch('neutron.plugins.ml2.drivers.ovn.mech_driver.'
'ovsdb.ovn_client.OVNClient._is_virtual_port_supported',
lambda *args: True)
class TestVirtualPorts(base.TestOVNFunctionalBase):
def setUp(self):
super(TestVirtualPorts, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def _create_port(self, fixed_ip=None, allowed_address=None):
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if fixed_ip:
port_data['port']['fixed_ips'] = [{'ip_address': fixed_ip}]
if allowed_address:
port_data['port']['allowed_address_pairs'] = [
{'ip_address': allowed_address}]
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _update_allowed_address_pair(self, port_id, data):
port_data = {
'port': {'allowed_address_pairs': data}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(200, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _set_allowed_address_pair(self, port_id, ip):
return self._update_allowed_address_pair(port_id, [{'ip_address': ip}])
def _unset_allowed_address_pair(self, port_id):
return self._update_allowed_address_pair(port_id, [])
def _find_port_row(self, port_id):
cmd = self.nb_api.db_find_rows(
'Logical_Switch_Port', ('name', '=', port_id))
rows = cmd.execute(check_error=True)
return rows[0] if rows else None
def _is_ovn_port_type(self, port_id, port_type):
ovn_vport = self._find_port_row(port_id)
return port_type == ovn_vport.type
def _check_port_type(self, port_id, type):
check = functools.partial(self._is_ovn_port_type, port_id, type)
n_utils.wait_until_true(check, timeout=10)
def test_virtual_port_created_before(self):
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Create the master port with the VIP address already set in
# the allowed_address_pairs field
master = self._create_port(allowed_address=virt_ip)
# Assert the virt port has the type virtual and master is set
# as parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Create the backport parent port
backup = self._create_port(allowed_address=virt_ip)
# Assert the virt port now also includes the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_update_address_pairs(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Assert the virt port does not yet have the type virtual (no
# address pairs were set yet)
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
# Set the virt IP to the allowed address pairs of the master port
self._set_allowed_address_pair(master['id'], virt_ip)
# Assert the virt port is now updated
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Set the virt IP to the allowed address pairs of the backup port
self._set_allowed_address_pair(backup['id'], virt_ip)
# Assert the virt port now includes the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Remove the address pairs from the master port
self._unset_allowed_address_pair(master['id'])
# Assert the virt port now only has the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Remove the address pairs from the backup port
self._unset_allowed_address_pair(backup['id'])
# Assert the virt port is not type virtual anymore and the virtual
# port options are cleared
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
def test_virtual_port_created_after(self):
master = self._create_port(fixed_ip='10.0.0.11')
backup = self._create_port(fixed_ip='10.0.0.12')
virt_ip = '10.0.0.55'
# Set the virt IP to the master and backup ports *before* creating
# the virtual port
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
virt_port = self._create_port(fixed_ip=virt_ip)
# Assert the virtual port has been created with the
# right type and parents
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_delete_parents(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Assert the virt port does not yet have the type virtual (no
# address pairs were set yet)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
# Set allowed address paris to the master and backup ports
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
# Assert the virtual port is correct
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Delete the backup port
self._delete('ports', backup['id'])
# Assert the virt port now only has the master port as a parent
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Delete the master port
self._delete('ports', master['id'])
# Assert the virt port is not type virtual anymore and the virtual
# port options are cleared
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
| 42.158986 | 79 | 0.6283 |
import functools
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils
from neutron.common import utils as n_utils
from neutron.db import ovn_revision_numbers_db as db_rev
from neutron.tests.functional import base
class TestPortBinding(base.TestOVNFunctionalBase):
def setUp(self):
super(TestPortBinding, self).setUp()
self.ovs_host = 'ovs-host'
self.dpdk_host = 'dpdk-host'
self.invalid_dpdk_host = 'invalid-host'
self.vhu_mode = 'server'
self.add_fake_chassis(self.ovs_host)
self.add_fake_chassis(
self.dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,dpdkvhostuser'})
self.add_fake_chassis(
self.invalid_dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,geneve,vxlan'})
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.deserialize(self.fmt, res)
def _create_or_update_port(self, port_id=None, hostname=None):
if port_id is None:
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if hostname:
port_data['port']['device_id'] = uuidutils.generate_uuid()
port_data['port']['device_owner'] = 'compute:None'
port_data['port']['binding:host_id'] = hostname
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
port_id = p['port']['id']
else:
port_data = {
'port': {'device_id': uuidutils.generate_uuid(),
'device_owner': 'compute:None',
'binding:host_id': hostname}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.deserialize(self.fmt, port_res)
return port_id
def _verify_vif_details(self, port_id, expected_host_name,
expected_vif_type, expected_vif_details):
port_req = self.new_show_request('ports', port_id)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
self.assertEqual(expected_host_name, p['port']['binding:host_id'])
self.assertEqual(expected_vif_type, p['port']['binding:vif_type'])
self.assertEqual(expected_vif_details,
p['port']['binding:vif_details'])
def test_port_binding_create_port(self):
port_id = self._create_or_update_port(hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
def test_port_binding_update_port(self):
port_id = self._create_or_update_port()
self._verify_vif_details(port_id, '', 'unbound', {})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
class TestPortBindingOverTcp(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'tcp'
class TestPortBindingOverSsl(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'ssl'
class TestNetworkMTUUpdate(base.TestOVNFunctionalBase):
def setUp(self):
super(TestNetworkMTUUpdate, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def test_update_network_mtu(self):
mtu_value = self.n1['network']['mtu'] - 100
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertNotEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
def test_no_update_network_mtu(self):
mtu_value = self.n1['network']['mtu']
base_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
second_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
self.assertEqual(
base_revision.updated_at,
second_revision.updated_at)
@mock.patch('neutron.plugins.ml2.drivers.ovn.mech_driver.'
'ovsdb.ovn_client.OVNClient._is_virtual_port_supported',
lambda *args: True)
class TestVirtualPorts(base.TestOVNFunctionalBase):
def setUp(self):
super(TestVirtualPorts, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def _create_port(self, fixed_ip=None, allowed_address=None):
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if fixed_ip:
port_data['port']['fixed_ips'] = [{'ip_address': fixed_ip}]
if allowed_address:
port_data['port']['allowed_address_pairs'] = [
{'ip_address': allowed_address}]
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _update_allowed_address_pair(self, port_id, data):
port_data = {
'port': {'allowed_address_pairs': data}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(200, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _set_allowed_address_pair(self, port_id, ip):
return self._update_allowed_address_pair(port_id, [{'ip_address': ip}])
def _unset_allowed_address_pair(self, port_id):
return self._update_allowed_address_pair(port_id, [])
def _find_port_row(self, port_id):
cmd = self.nb_api.db_find_rows(
'Logical_Switch_Port', ('name', '=', port_id))
rows = cmd.execute(check_error=True)
return rows[0] if rows else None
def _is_ovn_port_type(self, port_id, port_type):
ovn_vport = self._find_port_row(port_id)
return port_type == ovn_vport.type
def _check_port_type(self, port_id, type):
check = functools.partial(self._is_ovn_port_type, port_id, type)
n_utils.wait_until_true(check, timeout=10)
def test_virtual_port_created_before(self):
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
master = self._create_port(allowed_address=virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
backup = self._create_port(allowed_address=virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_update_address_pairs(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
self._set_allowed_address_pair(master['id'], virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._set_allowed_address_pair(backup['id'], virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._unset_allowed_address_pair(master['id'])
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._unset_allowed_address_pair(backup['id'])
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
def test_virtual_port_created_after(self):
master = self._create_port(fixed_ip='10.0.0.11')
backup = self._create_port(fixed_ip='10.0.0.12')
virt_ip = '10.0.0.55'
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
virt_port = self._create_port(fixed_ip=virt_ip)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_delete_parents(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._delete('ports', backup['id'])
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._delete('ports', master['id'])
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
| true | true |
f720cfcd78b89cb225ad9d77d9115e223033a0da | 8,174 | py | Python | tensorflow_federated/python/core/impl/value_utils.py | hieunq95/federated | 15402997ce7fb35d782d715758acf82767206916 | [
"Apache-2.0"
] | 5 | 2019-07-23T14:49:46.000Z | 2022-03-30T13:54:22.000Z | tensorflow_federated/python/core/impl/value_utils.py | hieunq95/federated | 15402997ce7fb35d782d715758acf82767206916 | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/core/impl/value_utils.py | hieunq95/federated | 15402997ce7fb35d782d715758acf82767206916 | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities file for functions with TFF `Value`s as inputs and outputs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
from tensorflow_federated.python.common_libs import anonymous_tuple
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import placements
from tensorflow_federated.python.core.api import value_base
from tensorflow_federated.python.core.impl import computation_building_blocks
from tensorflow_federated.python.core.impl import intrinsic_defs
from tensorflow_federated.python.core.impl import type_utils
from tensorflow_federated.python.core.impl import value_impl
def zip_two_tuple(input_val, context_stack):
"""Helper function to perform 2-tuple at a time zipping.
Takes 2-tuple of federated values and returns federated 2-tuple of values.
Args:
input_val: 2-tuple TFF `Value` of `NamedTuple` type, whose elements must be
`FederatedTypes` with the same placement.
context_stack: The context stack to use, as in `impl.value_impl.to_value`.
Returns:
TFF `Value` of `FederatedType` with member of 2-tuple `NamedTuple` type.
"""
py_typecheck.check_type(input_val, value_base.Value)
py_typecheck.check_type(input_val.type_signature,
computation_types.NamedTupleType)
py_typecheck.check_type(input_val[0].type_signature,
computation_types.FederatedType)
zip_uris = {
placements.CLIENTS: intrinsic_defs.FEDERATED_ZIP_AT_CLIENTS.uri,
placements.SERVER: intrinsic_defs.FEDERATED_ZIP_AT_SERVER.uri,
}
zip_all_equal = {
placements.CLIENTS: False,
placements.SERVER: True,
}
output_placement = input_val[0].type_signature.placement
if output_placement not in zip_uris:
raise TypeError('The argument must have components placed at SERVER or '
'CLIENTS')
output_all_equal_bit = zip_all_equal[output_placement]
for elem in input_val:
type_utils.check_federated_value_placement(elem, output_placement)
num_elements = len(anonymous_tuple.to_elements(input_val.type_signature))
if num_elements != 2:
raise ValueError('The argument of zip_two_tuple must be a 2-tuple, '
'not an {}-tuple'.format(num_elements))
result_type = computation_types.FederatedType(
[(name, e.member)
for name, e in anonymous_tuple.to_elements(input_val.type_signature)],
output_placement, output_all_equal_bit)
def _adjust_all_equal_bit(x):
return computation_types.FederatedType(x.member, x.placement,
output_all_equal_bit)
adjusted_input_type = computation_types.NamedTupleType([
(k, _adjust_all_equal_bit(v)) if k else _adjust_all_equal_bit(v)
for k, v in anonymous_tuple.to_elements(input_val.type_signature)
])
intrinsic = value_impl.ValueImpl(
computation_building_blocks.Intrinsic(
zip_uris[output_placement],
computation_types.FunctionType(adjusted_input_type, result_type)),
context_stack)
return intrinsic(input_val)
def flatten_first_index(apply_fn, type_to_add, context_stack):
"""Returns a value `(arg -> APPEND(apply_fn(arg[0]), arg[1]))`.
In the above, `APPEND(a,b)` refers to appending element b to tuple a.
Constructs a Value of a TFF functional type that:
1. Takes as argument a 2-element tuple `(x, y)` of TFF type
`[apply_fn.type_signature.parameter, type_to_add]`.
2. Transforms the 1st element `x` of this 2-tuple by applying `apply_fn`,
producing a result `z` that must be a TFF tuple (e.g, as a result of
flattening `x`).
3. Leaves the 2nd element `y` of the argument 2-tuple unchanged.
4. Returns the result of appending the unchanged `y` at the end of the
tuple `z` returned by `apply_fn`.
Args:
apply_fn: TFF `Value` of type_signature `FunctionType`, a function taking
TFF `Value`s to `Value`s of type `NamedTupleType`.
type_to_add: 2-tuple specifying name and TFF type of arg[1]. Name can be
`None` or `string`.
context_stack: The context stack to use, as in `impl.value_impl.to_value`.
Returns:
TFF `Value` of `FunctionType`, taking 2-tuples to N-tuples, which calls
`apply_fn` on the first index of its argument, appends the second
index to the resulting (N-1)-tuple, then returns the N-tuple thus created.
"""
py_typecheck.check_type(apply_fn, value_base.Value)
py_typecheck.check_type(apply_fn.type_signature,
computation_types.FunctionType)
py_typecheck.check_type(apply_fn.type_signature.result,
computation_types.NamedTupleType)
py_typecheck.check_type(type_to_add, tuple)
if len(type_to_add) != 2:
raise ValueError('Please pass a 2-tuple as type_to_add to '
'flatten_first_index, with first index name or None '
'and second index instance of `computation_types.Type` '
'or something convertible to one by '
'`computationtypes.to_type`.')
prev_param_type = apply_fn.type_signature.parameter
inputs = value_impl.to_value(
computation_building_blocks.Reference(
'inputs',
computation_types.NamedTupleType([prev_param_type, type_to_add])),
None, context_stack)
intermediate = apply_fn(inputs[0])
full_type_spec = anonymous_tuple.to_elements(
apply_fn.type_signature.result) + [type_to_add]
named_values = [
(full_type_spec[k][0], intermediate[k]) for k in range(len(intermediate))
] + [(full_type_spec[-1][0], inputs[1])]
new_elements = value_impl.to_value(
anonymous_tuple.AnonymousTuple(named_values),
type_spec=full_type_spec,
context_stack=context_stack)
return value_impl.to_value(
computation_building_blocks.Lambda(
'inputs', inputs.type_signature,
value_impl.ValueImpl.get_comp(new_elements)), None, context_stack)
def get_curried(fn):
"""Returns a curried version of function `fn` that takes a parameter tuple.
For functions `fn` of types <T1,T2,....,Tn> -> U, the result is a function
of the form T1 -> (T2 -> (T3 -> .... (Tn -> U) ... )).
NOTE: No attempt is made at avoiding naming conflicts in cases where `fn`
contains references. The arguments of the curriend function are named `argN`
with `N` starting at 0.
Args:
fn: A value of a functional TFF type.
Returns:
A value that represents the curried form of `fn`.
"""
py_typecheck.check_type(fn, value_base.Value)
py_typecheck.check_type(fn.type_signature, computation_types.FunctionType)
py_typecheck.check_type(fn.type_signature.parameter,
computation_types.NamedTupleType)
param_elements = anonymous_tuple.to_elements(fn.type_signature.parameter)
references = []
for idx, (_, elem_type) in enumerate(param_elements):
references.append(
computation_building_blocks.Reference('arg{}'.format(idx), elem_type))
result = computation_building_blocks.Call(
value_impl.ValueImpl.get_comp(fn),
computation_building_blocks.Tuple(references))
for ref in references[::-1]:
result = computation_building_blocks.Lambda(ref.name, ref.type_signature,
result)
return value_impl.ValueImpl(result,
value_impl.ValueImpl.get_context_stack(fn))
| 42.572917 | 80 | 0.722535 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
from tensorflow_federated.python.common_libs import anonymous_tuple
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import placements
from tensorflow_federated.python.core.api import value_base
from tensorflow_federated.python.core.impl import computation_building_blocks
from tensorflow_federated.python.core.impl import intrinsic_defs
from tensorflow_federated.python.core.impl import type_utils
from tensorflow_federated.python.core.impl import value_impl
def zip_two_tuple(input_val, context_stack):
py_typecheck.check_type(input_val, value_base.Value)
py_typecheck.check_type(input_val.type_signature,
computation_types.NamedTupleType)
py_typecheck.check_type(input_val[0].type_signature,
computation_types.FederatedType)
zip_uris = {
placements.CLIENTS: intrinsic_defs.FEDERATED_ZIP_AT_CLIENTS.uri,
placements.SERVER: intrinsic_defs.FEDERATED_ZIP_AT_SERVER.uri,
}
zip_all_equal = {
placements.CLIENTS: False,
placements.SERVER: True,
}
output_placement = input_val[0].type_signature.placement
if output_placement not in zip_uris:
raise TypeError('The argument must have components placed at SERVER or '
'CLIENTS')
output_all_equal_bit = zip_all_equal[output_placement]
for elem in input_val:
type_utils.check_federated_value_placement(elem, output_placement)
num_elements = len(anonymous_tuple.to_elements(input_val.type_signature))
if num_elements != 2:
raise ValueError('The argument of zip_two_tuple must be a 2-tuple, '
'not an {}-tuple'.format(num_elements))
result_type = computation_types.FederatedType(
[(name, e.member)
for name, e in anonymous_tuple.to_elements(input_val.type_signature)],
output_placement, output_all_equal_bit)
def _adjust_all_equal_bit(x):
return computation_types.FederatedType(x.member, x.placement,
output_all_equal_bit)
adjusted_input_type = computation_types.NamedTupleType([
(k, _adjust_all_equal_bit(v)) if k else _adjust_all_equal_bit(v)
for k, v in anonymous_tuple.to_elements(input_val.type_signature)
])
intrinsic = value_impl.ValueImpl(
computation_building_blocks.Intrinsic(
zip_uris[output_placement],
computation_types.FunctionType(adjusted_input_type, result_type)),
context_stack)
return intrinsic(input_val)
def flatten_first_index(apply_fn, type_to_add, context_stack):
py_typecheck.check_type(apply_fn, value_base.Value)
py_typecheck.check_type(apply_fn.type_signature,
computation_types.FunctionType)
py_typecheck.check_type(apply_fn.type_signature.result,
computation_types.NamedTupleType)
py_typecheck.check_type(type_to_add, tuple)
if len(type_to_add) != 2:
raise ValueError('Please pass a 2-tuple as type_to_add to '
'flatten_first_index, with first index name or None '
'and second index instance of `computation_types.Type` '
'or something convertible to one by '
'`computationtypes.to_type`.')
prev_param_type = apply_fn.type_signature.parameter
inputs = value_impl.to_value(
computation_building_blocks.Reference(
'inputs',
computation_types.NamedTupleType([prev_param_type, type_to_add])),
None, context_stack)
intermediate = apply_fn(inputs[0])
full_type_spec = anonymous_tuple.to_elements(
apply_fn.type_signature.result) + [type_to_add]
named_values = [
(full_type_spec[k][0], intermediate[k]) for k in range(len(intermediate))
] + [(full_type_spec[-1][0], inputs[1])]
new_elements = value_impl.to_value(
anonymous_tuple.AnonymousTuple(named_values),
type_spec=full_type_spec,
context_stack=context_stack)
return value_impl.to_value(
computation_building_blocks.Lambda(
'inputs', inputs.type_signature,
value_impl.ValueImpl.get_comp(new_elements)), None, context_stack)
def get_curried(fn):
py_typecheck.check_type(fn, value_base.Value)
py_typecheck.check_type(fn.type_signature, computation_types.FunctionType)
py_typecheck.check_type(fn.type_signature.parameter,
computation_types.NamedTupleType)
param_elements = anonymous_tuple.to_elements(fn.type_signature.parameter)
references = []
for idx, (_, elem_type) in enumerate(param_elements):
references.append(
computation_building_blocks.Reference('arg{}'.format(idx), elem_type))
result = computation_building_blocks.Call(
value_impl.ValueImpl.get_comp(fn),
computation_building_blocks.Tuple(references))
for ref in references[::-1]:
result = computation_building_blocks.Lambda(ref.name, ref.type_signature,
result)
return value_impl.ValueImpl(result,
value_impl.ValueImpl.get_context_stack(fn))
| true | true |
f720d050c37ee3d16536fe8dff1a9deb55d14284 | 5,304 | py | Python | backend/tests/baserow/contrib/database/field/test_number_field_type.py | jacklicn/baserow | 978d9462ededbaa96674a6653028ba19876ea273 | [
"MIT"
] | 1 | 2021-04-13T16:27:58.000Z | 2021-04-13T16:27:58.000Z | backend/tests/baserow/contrib/database/field/test_number_field_type.py | jacklicn/baserow | 978d9462ededbaa96674a6653028ba19876ea273 | [
"MIT"
] | null | null | null | backend/tests/baserow/contrib/database/field/test_number_field_type.py | jacklicn/baserow | 978d9462ededbaa96674a6653028ba19876ea273 | [
"MIT"
] | null | null | null | import pytest
from decimal import Decimal
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.registries import field_type_registry
@pytest.mark.django_db
@pytest.mark.parametrize(
"expected,field_kwargs",
[
(
[
9223372036854775807, 100, 100, 101, 0, 0, 0, 0, None, None, None, None,
None
],
{'number_type': 'INTEGER', 'number_negative': False}
),
(
[9223372036854775807, 100, 100, 101, -9223372036854775808, -100, -100, -101,
None, None, None, None, None],
{'number_type': 'INTEGER', 'number_negative': True}
),
(
[
Decimal('9223372036854775807.0'), Decimal('100.0'), Decimal('100.2'),
Decimal('100.6'), Decimal('0.0'), Decimal('0.0'), Decimal('0.0'),
Decimal('0.0'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': False,
'number_decimal_places': 1
}
),
(
[
Decimal('9223372036854775807.000'), Decimal('100.000'),
Decimal('100.220'), Decimal('100.600'),
Decimal('-9223372036854775808.0'), Decimal('-100.0'),
Decimal('-100.220'), Decimal('-100.600'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': True,
'number_decimal_places': 3
}
)
]
)
def test_alter_number_field_column_type(expected, field_kwargs, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table, order=1)
handler = FieldHandler()
field = handler.update_field(user=user, field=field, name='Text field')
model = table.get_model()
model.objects.create(**{f'field_{field.id}': '9223372036854775807'})
model.objects.create(**{f'field_{field.id}': '100'})
model.objects.create(**{f'field_{field.id}': '100.22'})
model.objects.create(**{f'field_{field.id}': '100.59999'})
model.objects.create(**{f'field_{field.id}': '-9223372036854775808'})
model.objects.create(**{f'field_{field.id}': '-100'})
model.objects.create(**{f'field_{field.id}': '-100.22'})
model.objects.create(**{f'field_{field.id}': '-100.5999'})
model.objects.create(**{f'field_{field.id}': '100.59.99'})
model.objects.create(**{f'field_{field.id}': '-100.59.99'})
model.objects.create(**{f'field_{field.id}': '100TEST100.10'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^&&^^%$$'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^5.2&&^^%$$'})
# Change the field type to a number and test if the values have been changed.
field = handler.update_field(user=user, field=field, new_type_name='number',
**field_kwargs)
model = table.get_model()
rows = model.objects.all()
for index, row in enumerate(rows):
assert getattr(row, f'field_{field.id}') == expected[index]
@pytest.mark.django_db
def test_alter_number_field_column_type_negative(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(table=table, order=1,
number_negative=True)
decimal_field = data_fixture.create_number_field(table=table, order=2,
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2)
model = table.get_model()
model.objects.create(**{
f'field_{number_field.id}': -10,
f'field_{decimal_field.id}': Decimal('-10.10')
})
handler = FieldHandler()
number_field = handler.update_field(user=user, field=number_field,
number_negative=False)
decimal_field = handler.update_field(user=user, field=decimal_field,
number_negative=False)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{number_field.id}') == 0
assert getattr(rows[0], f'field_{decimal_field.id}') == 0.00
@pytest.mark.django_db
def test_import_export_number_field(data_fixture):
number_field = data_fixture.create_number_field(
name='Number field',
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2
)
number_field_type = field_type_registry.get_by_model(number_field)
number_serialized = number_field_type.export_serialized(number_field)
number_field_imported = number_field_type.import_serialized(
number_field.table,
number_serialized,
{}
)
assert number_field.number_type == number_field_imported.number_type
assert number_field.number_negative == number_field_imported.number_negative
assert number_field.number_decimal_places == (
number_field_imported.number_decimal_places
)
| 40.181818 | 88 | 0.601244 | import pytest
from decimal import Decimal
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.registries import field_type_registry
@pytest.mark.django_db
@pytest.mark.parametrize(
"expected,field_kwargs",
[
(
[
9223372036854775807, 100, 100, 101, 0, 0, 0, 0, None, None, None, None,
None
],
{'number_type': 'INTEGER', 'number_negative': False}
),
(
[9223372036854775807, 100, 100, 101, -9223372036854775808, -100, -100, -101,
None, None, None, None, None],
{'number_type': 'INTEGER', 'number_negative': True}
),
(
[
Decimal('9223372036854775807.0'), Decimal('100.0'), Decimal('100.2'),
Decimal('100.6'), Decimal('0.0'), Decimal('0.0'), Decimal('0.0'),
Decimal('0.0'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': False,
'number_decimal_places': 1
}
),
(
[
Decimal('9223372036854775807.000'), Decimal('100.000'),
Decimal('100.220'), Decimal('100.600'),
Decimal('-9223372036854775808.0'), Decimal('-100.0'),
Decimal('-100.220'), Decimal('-100.600'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': True,
'number_decimal_places': 3
}
)
]
)
def test_alter_number_field_column_type(expected, field_kwargs, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table, order=1)
handler = FieldHandler()
field = handler.update_field(user=user, field=field, name='Text field')
model = table.get_model()
model.objects.create(**{f'field_{field.id}': '9223372036854775807'})
model.objects.create(**{f'field_{field.id}': '100'})
model.objects.create(**{f'field_{field.id}': '100.22'})
model.objects.create(**{f'field_{field.id}': '100.59999'})
model.objects.create(**{f'field_{field.id}': '-9223372036854775808'})
model.objects.create(**{f'field_{field.id}': '-100'})
model.objects.create(**{f'field_{field.id}': '-100.22'})
model.objects.create(**{f'field_{field.id}': '-100.5999'})
model.objects.create(**{f'field_{field.id}': '100.59.99'})
model.objects.create(**{f'field_{field.id}': '-100.59.99'})
model.objects.create(**{f'field_{field.id}': '100TEST100.10'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^&&^^%$$'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^5.2&&^^%$$'})
field = handler.update_field(user=user, field=field, new_type_name='number',
**field_kwargs)
model = table.get_model()
rows = model.objects.all()
for index, row in enumerate(rows):
assert getattr(row, f'field_{field.id}') == expected[index]
@pytest.mark.django_db
def test_alter_number_field_column_type_negative(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(table=table, order=1,
number_negative=True)
decimal_field = data_fixture.create_number_field(table=table, order=2,
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2)
model = table.get_model()
model.objects.create(**{
f'field_{number_field.id}': -10,
f'field_{decimal_field.id}': Decimal('-10.10')
})
handler = FieldHandler()
number_field = handler.update_field(user=user, field=number_field,
number_negative=False)
decimal_field = handler.update_field(user=user, field=decimal_field,
number_negative=False)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{number_field.id}') == 0
assert getattr(rows[0], f'field_{decimal_field.id}') == 0.00
@pytest.mark.django_db
def test_import_export_number_field(data_fixture):
number_field = data_fixture.create_number_field(
name='Number field',
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2
)
number_field_type = field_type_registry.get_by_model(number_field)
number_serialized = number_field_type.export_serialized(number_field)
number_field_imported = number_field_type.import_serialized(
number_field.table,
number_serialized,
{}
)
assert number_field.number_type == number_field_imported.number_type
assert number_field.number_negative == number_field_imported.number_negative
assert number_field.number_decimal_places == (
number_field_imported.number_decimal_places
)
| true | true |
f720d05559826b7b3e8260bdfa239a1cb56c9a6c | 4,465 | py | Python | generated-libraries/python/netapp/iscsi/iscsi_received_stats_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/iscsi/iscsi_received_stats_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/iscsi/iscsi_received_stats_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | from netapp.netapp_object import NetAppObject
class IscsiReceivedStatsInfo(NetAppObject):
"""
Counts for PDUs received.
"""
_data_out = None
@property
def data_out(self):
"""
Count of data out requests.
"""
return self._data_out
@data_out.setter
def data_out(self, val):
if val != None:
self.validate('data_out', val)
self._data_out = val
_scsi_task_mgt_cmd = None
@property
def scsi_task_mgt_cmd(self):
"""
Count of SCSI task management commands.
"""
return self._scsi_task_mgt_cmd
@scsi_task_mgt_cmd.setter
def scsi_task_mgt_cmd(self, val):
if val != None:
self.validate('scsi_task_mgt_cmd', val)
self._scsi_task_mgt_cmd = val
_login_req = None
@property
def login_req(self):
"""
Count of login requests.
"""
return self._login_req
@login_req.setter
def login_req(self, val):
if val != None:
self.validate('login_req', val)
self._login_req = val
_unknown = None
@property
def unknown(self):
"""
Count of unknown PDUs.
"""
return self._unknown
@unknown.setter
def unknown(self, val):
if val != None:
self.validate('unknown', val)
self._unknown = val
_nop_out = None
@property
def nop_out(self):
"""
Count of NOP Out.
"""
return self._nop_out
@nop_out.setter
def nop_out(self, val):
if val != None:
self.validate('nop_out', val)
self._nop_out = val
_scsi_cmd = None
@property
def scsi_cmd(self):
"""
Count of SCSI commands.
"""
return self._scsi_cmd
@scsi_cmd.setter
def scsi_cmd(self, val):
if val != None:
self.validate('scsi_cmd', val)
self._scsi_cmd = val
_snack = None
@property
def snack(self):
"""
Count of SNACK requests.
"""
return self._snack
@snack.setter
def snack(self, val):
if val != None:
self.validate('snack', val)
self._snack = val
_text_req = None
@property
def text_req(self):
"""
Count of text requests.
"""
return self._text_req
@text_req.setter
def text_req(self, val):
if val != None:
self.validate('text_req', val)
self._text_req = val
_total = None
@property
def total(self):
"""
Total PDUs received.
"""
return self._total
@total.setter
def total(self, val):
if val != None:
self.validate('total', val)
self._total = val
_logout_req = None
@property
def logout_req(self):
"""
Count of logout requests.
"""
return self._logout_req
@logout_req.setter
def logout_req(self, val):
if val != None:
self.validate('logout_req', val)
self._logout_req = val
@staticmethod
def get_api_name():
return "iscsi-received-stats-info"
@staticmethod
def get_desired_attrs():
return [
'data-out',
'scsi-task-mgt-cmd',
'login-req',
'unknown',
'nop-out',
'scsi-cmd',
'snack',
'text-req',
'total',
'logout-req',
]
def describe_properties(self):
return {
'data_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_task_mgt_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'login_req': { 'class': int, 'is_list': False, 'required': 'required' },
'unknown': { 'class': int, 'is_list': False, 'required': 'required' },
'nop_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'snack': { 'class': int, 'is_list': False, 'required': 'required' },
'text_req': { 'class': int, 'is_list': False, 'required': 'required' },
'total': { 'class': int, 'is_list': False, 'required': 'required' },
'logout_req': { 'class': int, 'is_list': False, 'required': 'required' },
}
| 26.264706 | 92 | 0.520717 | from netapp.netapp_object import NetAppObject
class IscsiReceivedStatsInfo(NetAppObject):
_data_out = None
@property
def data_out(self):
return self._data_out
@data_out.setter
def data_out(self, val):
if val != None:
self.validate('data_out', val)
self._data_out = val
_scsi_task_mgt_cmd = None
@property
def scsi_task_mgt_cmd(self):
return self._scsi_task_mgt_cmd
@scsi_task_mgt_cmd.setter
def scsi_task_mgt_cmd(self, val):
if val != None:
self.validate('scsi_task_mgt_cmd', val)
self._scsi_task_mgt_cmd = val
_login_req = None
@property
def login_req(self):
return self._login_req
@login_req.setter
def login_req(self, val):
if val != None:
self.validate('login_req', val)
self._login_req = val
_unknown = None
@property
def unknown(self):
return self._unknown
@unknown.setter
def unknown(self, val):
if val != None:
self.validate('unknown', val)
self._unknown = val
_nop_out = None
@property
def nop_out(self):
return self._nop_out
@nop_out.setter
def nop_out(self, val):
if val != None:
self.validate('nop_out', val)
self._nop_out = val
_scsi_cmd = None
@property
def scsi_cmd(self):
return self._scsi_cmd
@scsi_cmd.setter
def scsi_cmd(self, val):
if val != None:
self.validate('scsi_cmd', val)
self._scsi_cmd = val
_snack = None
@property
def snack(self):
return self._snack
@snack.setter
def snack(self, val):
if val != None:
self.validate('snack', val)
self._snack = val
_text_req = None
@property
def text_req(self):
return self._text_req
@text_req.setter
def text_req(self, val):
if val != None:
self.validate('text_req', val)
self._text_req = val
_total = None
@property
def total(self):
return self._total
@total.setter
def total(self, val):
if val != None:
self.validate('total', val)
self._total = val
_logout_req = None
@property
def logout_req(self):
return self._logout_req
@logout_req.setter
def logout_req(self, val):
if val != None:
self.validate('logout_req', val)
self._logout_req = val
@staticmethod
def get_api_name():
return "iscsi-received-stats-info"
@staticmethod
def get_desired_attrs():
return [
'data-out',
'scsi-task-mgt-cmd',
'login-req',
'unknown',
'nop-out',
'scsi-cmd',
'snack',
'text-req',
'total',
'logout-req',
]
def describe_properties(self):
return {
'data_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_task_mgt_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'login_req': { 'class': int, 'is_list': False, 'required': 'required' },
'unknown': { 'class': int, 'is_list': False, 'required': 'required' },
'nop_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'snack': { 'class': int, 'is_list': False, 'required': 'required' },
'text_req': { 'class': int, 'is_list': False, 'required': 'required' },
'total': { 'class': int, 'is_list': False, 'required': 'required' },
'logout_req': { 'class': int, 'is_list': False, 'required': 'required' },
}
| true | true |
f720d09b09639cf12c6d88a9b93e2140d324a4fc | 6,209 | py | Python | data-analysis/analyze_E017+020.py | JakobHavtorn/es-rl | 30d81ad908a30e78d03c83d37454dbe8e05d1452 | [
"MIT"
] | 1 | 2021-09-03T17:54:14.000Z | 2021-09-03T17:54:14.000Z | data-analysis/analyze_E017+020.py | JakobHavtorn/es-rl | 30d81ad908a30e78d03c83d37454dbe8e05d1452 | [
"MIT"
] | null | null | null | data-analysis/analyze_E017+020.py | JakobHavtorn/es-rl | 30d81ad908a30e78d03c83d37454dbe8e05d1452 | [
"MIT"
] | null | null | null | import os
from distutils.dir_util import copy_tree
import warnings
import IPython
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import torch
from context import utils
import utils.filesystem as fs
import utils.plotting as plot
from utils.data_analysis import invert_signs, load_stats
from utils.misc import get_equal_dicts, length_of_longest
def create_plots(stats_list, keys_to_plot, groups, result_dir, include_val=True):
n_keys = len(keys_to_plot)
n_chars = len(str(n_keys))
f = ' {:' + str(n_chars) + 'd}/{:' + str(n_chars) + 'd} monitored keys plotted'
groups_org = groups.copy()
for i_key, k in enumerate(keys_to_plot):
# Get data and subset only those series that are done (or the one that is the longest)
groups = groups_org.copy()
list_of_series = [s[k].tolist() for s in stats_list if k in s]
list_of_genera = [s['generations'].tolist() for s in stats_list if k in s]
l = length_of_longest(list_of_series)
indices = [i for i, series in enumerate(list_of_series) if len(series) == l]
groups = groups[indices]
list_of_series = [list_of_series[i] for i in indices]
list_of_genera = [list_of_genera[i] for i in indices]
# Validation series
if include_val:
val_k = k[:-4] + '_val'
list_of_series_val = [s[val_k].tolist() for i, s in enumerate(stats_list) if val_k in s and i in indices]
if include_val and not len(list_of_series_val) == 0:
list_of_genera_val = [np.where(~np.isnan(l))[0].tolist() for l in list_of_series_val]
list_of_genera.extend(list_of_genera_val)
list_of_series_val = [np.array(l) for l in list_of_series_val]
list_of_series_val = [l[~np.isnan(l)].tolist() for l in list_of_series_val]
list_of_series.extend(list_of_series_val)
groups_val = np.array([g + ', validation' for g in groups])
groups = np.append(groups, groups_val)
if k is 'return_val':
IPython.embed()
# Sort
list_of_genera = [x for _,x in sorted(zip(groups.tolist(), list_of_genera))]
list_of_series = [x for _,x in sorted(zip(groups.tolist(), list_of_series))]
groups.sort()
# Plot
plot.timeseries_mean_grouped(list_of_genera, list_of_series, groups, xlabel='generations', ylabel=k, map_labels='supervised')
if 'return' in k:
plt.gca().set_ylim(0, 1.5)
elif 'accuracy' in k:
plt.gca().set_ylim(0.4, 1)
plt.savefig(os.path.join(result_dir, k + '-all-series-mean-sd' + '.pdf'), bbox_inches='tight')
plt.close()
# Progress
if i_key + 1 == n_keys:
print(f.format(i_key+1, n_keys), end='\n')
else:
print(f.format(i_key+1, n_keys), end='\r')
def get_directories(experiment_id):
# Get directories to analyze
this_file_dir_local = os.path.dirname(os.path.abspath(__file__))
package_root_this_file = fs.get_parent(this_file_dir_local, 'es-rl')
d = os.path.join(package_root_this_file, 'experiments', 'checkpoints', experiment_id)
directories = [os.path.join(d, di) for di in os.listdir(d) if os.path.isdir(os.path.join(d, di))]
directories = [d for d in directories if 'monitoring' not in d and 'analysis' not in d]
# Create result directory
result_dir = os.path.join(d, str(experiment_id[:4]))
dst_dir = '/home/jakob/Dropbox/Apps/ShareLaTeX/Master\'s Thesis/graphics/' + experiment_id[:4]
if not os.path.exists(result_dir + '-bn-analysis'):
os.mkdir(result_dir + '-bn-analysis'),
if not os.path.exists(result_dir + '-init-analysis'):
os.mkdir(result_dir + '-init-analysis')
return directories, result_dir, dst_dir
def load(experiment_id, optimizer):
stats_init = []
stats_bn = []
groups_init = np.array([])
groups_bn = np.array([])
for d in directories:
try:
st = pd.read_csv(os.path.join(d, 'stats.csv'))
with open(os.path.join(d, 'init.log'), 'r') as f:
s = f.read()
if 'MNISTNetNoInit' in s:
groups_init = np.append(groups_init, 'Default init' + optimizer) # Has BN
stats_init.append(st)
elif 'MNISTNetNoBN' in s:
groups_bn = np.append(groups_bn, 'No Batchnorm' + optimizer) # Has Xavier Glorot
stats_bn.append(st)
else:
groups_bn = np.append(groups_bn, 'Batchnorm' + optimizer) # Has Xavier Glorot
groups_init = np.append(groups_init, 'Xavier-Glorot' + optimizer) # Has BN
stats_init.append(st)
stats_bn.append(st)
except:
print("None in: " + d)
return stats_init, stats_bn, groups_init, groups_bn
if __name__ == '__main__':
# Ignore warnings from matplotlib
warnings.filterwarnings("ignore", module="matplotlib")
# Font setting
matplotlib.rcParams.update({'font.size': 12})
# Experiment IDs
experiment_ids = ['E017-bn-init', 'E020-bn-init']
# Optimizer labels
# optimizers = [', SGD', ', ADAM']
optimizers = ['', '']
# Keys to analyze
keys_to_plot = {'return_unp', 'return_avg', 'accuracy_unp', 'accuracy_avg', 'sigma'}
# Analyze
for experiment_id, optimizer in zip(experiment_ids, optimizers):
# Get directories
directories, result_dir, dst_dir = get_directories(experiment_id)
if len(directories) == 0:
print('No results for {}'.format(experiment_id))
continue
# Load data
stats_init, stats_bn, groups_init, groups_bn = load(experiment_id, optimizer)
# Plot
invert_signs(stats_init)
invert_signs(stats_bn)
create_plots(stats_init, keys_to_plot, groups_init, result_dir + '-init-analysis', include_val=True)
create_plots(stats_bn, keys_to_plot, groups_bn, result_dir + '-bn-analysis', include_val=True)
copy_tree(result_dir + '-init-analysis', dst_dir + '-init-analysis')
copy_tree(result_dir + '-bn-analysis', dst_dir + '-bn-analysis')
| 42.82069 | 133 | 0.639394 | import os
from distutils.dir_util import copy_tree
import warnings
import IPython
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import torch
from context import utils
import utils.filesystem as fs
import utils.plotting as plot
from utils.data_analysis import invert_signs, load_stats
from utils.misc import get_equal_dicts, length_of_longest
def create_plots(stats_list, keys_to_plot, groups, result_dir, include_val=True):
n_keys = len(keys_to_plot)
n_chars = len(str(n_keys))
f = ' {:' + str(n_chars) + 'd}/{:' + str(n_chars) + 'd} monitored keys plotted'
groups_org = groups.copy()
for i_key, k in enumerate(keys_to_plot):
groups = groups_org.copy()
list_of_series = [s[k].tolist() for s in stats_list if k in s]
list_of_genera = [s['generations'].tolist() for s in stats_list if k in s]
l = length_of_longest(list_of_series)
indices = [i for i, series in enumerate(list_of_series) if len(series) == l]
groups = groups[indices]
list_of_series = [list_of_series[i] for i in indices]
list_of_genera = [list_of_genera[i] for i in indices]
if include_val:
val_k = k[:-4] + '_val'
list_of_series_val = [s[val_k].tolist() for i, s in enumerate(stats_list) if val_k in s and i in indices]
if include_val and not len(list_of_series_val) == 0:
list_of_genera_val = [np.where(~np.isnan(l))[0].tolist() for l in list_of_series_val]
list_of_genera.extend(list_of_genera_val)
list_of_series_val = [np.array(l) for l in list_of_series_val]
list_of_series_val = [l[~np.isnan(l)].tolist() for l in list_of_series_val]
list_of_series.extend(list_of_series_val)
groups_val = np.array([g + ', validation' for g in groups])
groups = np.append(groups, groups_val)
if k is 'return_val':
IPython.embed()
list_of_genera = [x for _,x in sorted(zip(groups.tolist(), list_of_genera))]
list_of_series = [x for _,x in sorted(zip(groups.tolist(), list_of_series))]
groups.sort()
plot.timeseries_mean_grouped(list_of_genera, list_of_series, groups, xlabel='generations', ylabel=k, map_labels='supervised')
if 'return' in k:
plt.gca().set_ylim(0, 1.5)
elif 'accuracy' in k:
plt.gca().set_ylim(0.4, 1)
plt.savefig(os.path.join(result_dir, k + '-all-series-mean-sd' + '.pdf'), bbox_inches='tight')
plt.close()
if i_key + 1 == n_keys:
print(f.format(i_key+1, n_keys), end='\n')
else:
print(f.format(i_key+1, n_keys), end='\r')
def get_directories(experiment_id):
this_file_dir_local = os.path.dirname(os.path.abspath(__file__))
package_root_this_file = fs.get_parent(this_file_dir_local, 'es-rl')
d = os.path.join(package_root_this_file, 'experiments', 'checkpoints', experiment_id)
directories = [os.path.join(d, di) for di in os.listdir(d) if os.path.isdir(os.path.join(d, di))]
directories = [d for d in directories if 'monitoring' not in d and 'analysis' not in d]
result_dir = os.path.join(d, str(experiment_id[:4]))
dst_dir = '/home/jakob/Dropbox/Apps/ShareLaTeX/Master\'s Thesis/graphics/' + experiment_id[:4]
if not os.path.exists(result_dir + '-bn-analysis'):
os.mkdir(result_dir + '-bn-analysis'),
if not os.path.exists(result_dir + '-init-analysis'):
os.mkdir(result_dir + '-init-analysis')
return directories, result_dir, dst_dir
def load(experiment_id, optimizer):
stats_init = []
stats_bn = []
groups_init = np.array([])
groups_bn = np.array([])
for d in directories:
try:
st = pd.read_csv(os.path.join(d, 'stats.csv'))
with open(os.path.join(d, 'init.log'), 'r') as f:
s = f.read()
if 'MNISTNetNoInit' in s:
groups_init = np.append(groups_init, 'Default init' + optimizer) # Has BN
stats_init.append(st)
elif 'MNISTNetNoBN' in s:
groups_bn = np.append(groups_bn, 'No Batchnorm' + optimizer) # Has Xavier Glorot
stats_bn.append(st)
else:
groups_bn = np.append(groups_bn, 'Batchnorm' + optimizer) # Has Xavier Glorot
groups_init = np.append(groups_init, 'Xavier-Glorot' + optimizer) # Has BN
stats_init.append(st)
stats_bn.append(st)
except:
print("None in: " + d)
return stats_init, stats_bn, groups_init, groups_bn
if __name__ == '__main__':
# Ignore warnings from matplotlib
warnings.filterwarnings("ignore", module="matplotlib")
# Font setting
matplotlib.rcParams.update({'font.size': 12})
# Experiment IDs
experiment_ids = ['E017-bn-init', 'E020-bn-init']
# Optimizer labels
# optimizers = [', SGD', ', ADAM']
optimizers = ['', '']
# Keys to analyze
keys_to_plot = {'return_unp', 'return_avg', 'accuracy_unp', 'accuracy_avg', 'sigma'}
# Analyze
for experiment_id, optimizer in zip(experiment_ids, optimizers):
# Get directories
directories, result_dir, dst_dir = get_directories(experiment_id)
if len(directories) == 0:
print('No results for {}'.format(experiment_id))
continue
# Load data
stats_init, stats_bn, groups_init, groups_bn = load(experiment_id, optimizer)
# Plot
invert_signs(stats_init)
invert_signs(stats_bn)
create_plots(stats_init, keys_to_plot, groups_init, result_dir + '-init-analysis', include_val=True)
create_plots(stats_bn, keys_to_plot, groups_bn, result_dir + '-bn-analysis', include_val=True)
copy_tree(result_dir + '-init-analysis', dst_dir + '-init-analysis')
copy_tree(result_dir + '-bn-analysis', dst_dir + '-bn-analysis')
| true | true |
f720d1f5708dbc5ccf4ce7f998568b7bcfcee378 | 686 | py | Python | test/test_relay.py | steinwurf/kodo-simulations-python | f9d9bcce70adf1666cf8bac9f352fbbf640ca783 | [
"BSD-3-Clause"
] | 2 | 2017-12-09T20:41:02.000Z | 2022-01-10T23:23:01.000Z | test/test_relay.py | steinwurf/kodo-simulations-python | f9d9bcce70adf1666cf8bac9f352fbbf640ca783 | [
"BSD-3-Clause"
] | null | null | null | test/test_relay.py | steinwurf/kodo-simulations-python | f9d9bcce70adf1666cf8bac9f352fbbf640ca783 | [
"BSD-3-Clause"
] | 5 | 2016-10-12T12:18:59.000Z | 2022-01-10T23:23:55.000Z | #! /usr/bin/env python
# encoding: utf-8
import sys
sys.path.append('..')
sys.path.append('mock')
import unittest
from mock import Mock
import simulator.relay
class TestPacket(unittest.TestCase):
"""Class for testing Relay."""
def test_instantiation(self):
"""Test instantiation."""
id = "test_id"
stats = {}
decoder = Mock(name="decoder_object")
decoder.block_size = Mock(return_value=100)
c = simulator.relay.Relay(id, stats, decoder)
self.assertEqual(c.sender.id, id)
self.assertEqual(c.receiver.id, id)
self.assertEqual(c.receiver.decoder, decoder)
if __name__ == '__main__':
unittest.main()
| 23.655172 | 53 | 0.650146 |
import sys
sys.path.append('..')
sys.path.append('mock')
import unittest
from mock import Mock
import simulator.relay
class TestPacket(unittest.TestCase):
def test_instantiation(self):
id = "test_id"
stats = {}
decoder = Mock(name="decoder_object")
decoder.block_size = Mock(return_value=100)
c = simulator.relay.Relay(id, stats, decoder)
self.assertEqual(c.sender.id, id)
self.assertEqual(c.receiver.id, id)
self.assertEqual(c.receiver.decoder, decoder)
if __name__ == '__main__':
unittest.main()
| true | true |
f720d23a79090927f1bcc5cdbf04f6da46a364cb | 10,513 | py | Python | ui_automation_tests/step_defs/test_open_application.py | uktrade/lite-exporter-frontend | cf42ac37a21236486aa303c8935c44a7eba91ef5 | [
"MIT"
] | 3 | 2019-05-31T06:36:17.000Z | 2020-02-12T16:02:24.000Z | ui_automation_tests/step_defs/test_open_application.py | uktrade/lite-exporter-frontend | cf42ac37a21236486aa303c8935c44a7eba91ef5 | [
"MIT"
] | 33 | 2019-03-28T10:20:14.000Z | 2020-07-16T15:12:43.000Z | ui_automation_tests/step_defs/test_open_application.py | uktrade/lite-exporter-frontend | cf42ac37a21236486aa303c8935c44a7eba91ef5 | [
"MIT"
] | 1 | 2019-05-01T15:52:02.000Z | 2019-05-01T15:52:02.000Z | from pytest_bdd import scenarios, when, then, parsers
import ui_automation_tests.shared.tools.helpers as utils
from ui_automation_tests.pages.generic_application.task_list import TaskListPage
from ui_automation_tests.pages.open_application.country_contract_types import OpenApplicationCountryContractTypes
from ui_automation_tests.pages.open_application.country_contract_types_summary import (
OpenApplicationCountryContractTypesSummaryPage,
)
from ui_automation_tests.pages.exporter_hub_page import ExporterHubPage
from ui_automation_tests.pages.generic_application.ultimate_end_users import GenericApplicationUltimateEndUsers
from ui_automation_tests.shared import functions
from ui_automation_tests.conftest import (
enter_type_of_application,
enter_application_name,
enter_permanent_or_temporary,
choose_open_licence_category,
answer_firearms_question,
)
from ui_automation_tests.pages.apply_for_a_licence_page import ApplyForALicencePage
from ui_automation_tests.pages.open_application.countries import OpenApplicationCountriesPage
from ui_automation_tests.pages.open_application.goods_countries_page import GoodsCountriesPage
from ui_automation_tests.pages.open_application.goods_types import OpenApplicationGoodsTypesPage
from ui_automation_tests.pages.standard_application.goods import StandardApplicationGoodsPage
scenarios(
"../features/submit_open_application.feature", "../features/edit_open_application.feature", strict_gherkin=False
)
@then(parsers.parse('I see my goods type added at position "{position}" with a description and a control code'))
def i_see_the_goods_types_list(driver, position, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
good_type = goods_type_page.get_text_of_goods_type_info(int(position))
assert context.good_description in good_type
assert context.control_code in good_type
@then(parsers.parse("I see a list of the preselected media products"))
def i_see_the_goods_types_list_media_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 7
@then(parsers.parse("I see a list of the preselected cryptographic products"))
def i_see_the_goods_types_list_cryptographic_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 4
@then("I should see a list of countries")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_countries_names()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of all countries that have been preselected")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of the countries permitted for a cryptographic OIEL")
def i_should_see_a_list_of_countries_cryptographic_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 213
assert "United Kingdom" not in page_countries
@then("I should see the UK Continental Shelf as the only permitted destination")
def i_should_see_a_list_of_countries_uk_continental_shelf_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 1
assert page_countries[0] == "UK Continental Shelf"
@when(parsers.parse('I select "{country}" from the country list'))
def i_select_country_from_the_country_list(driver, country):
application_countries_list = OpenApplicationCountriesPage(driver)
application_countries_list.select_country(country)
assert utils.find_element_by_href(driver, "#" + country).is_displayed()
@when(parsers.parse('I search for country "{country}"'))
def search_for_country(driver, country):
OpenApplicationCountriesPage(driver).search_for_country(country)
@then(parsers.parse('only "{country}" is displayed in country list'))
def search_country_result(driver, country):
assert (
country == OpenApplicationCountriesPage(driver).get_text_of_countries_list()
), "Country not searched correctly"
@when("I click select all countries")
def select_all_countries(driver):
page = OpenApplicationCountriesPage(driver)
page.click_select_all()
@then("all checkboxes are selected")
def all_selected(driver):
page = OpenApplicationCountriesPage(driver)
assert page.get_number_of_checkboxes(checked=False) == page.get_number_of_checkboxes(checked=True)
@when("I select that I want to add the same sectors and contract types to all countries")
def select_yes_to_all_countries_with_the_same_contract_types(driver):
OpenApplicationCountryContractTypes(driver).select_same_contract_types_for_all_countries_radio_button()
@when("I select contract types for all countries")
def select_contract_types_for_all_countries(driver, context):
page = OpenApplicationCountryContractTypes(driver)
context.contract_types = [
{"id": "Navy", "value": "Navy"},
{
"id": "Aircraft-manufacturers,-maintainers-or-operators",
"value": "Aircraft manufacturers, maintainers or operators",
},
{"id": "Pharmaceutical-or-medical", "value": "Pharmaceutical or medical"},
]
page.select_contract_type(context.contract_types[0]["id"])
page.select_contract_type(context.contract_types[1]["id"])
page.select_contract_type(context.contract_types[2]["id"])
page.select_other_contract_type_and_fill_in_details()
functions.click_submit(driver)
@then("I should see all countries and the chosen contract types on the destination summary list")
def i_should_see_destinations_summary_countries_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 273
assert "United Kingdom" not in countries_and_contract_types
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@then(
"I should see the UK Continental Shelf as the only destination and the chosen contract types on the destination summary list"
)
def i_should_see_destinations_summary_uk_continental_shelf_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 1
assert countries_and_contract_types[0][0] == "UK Continental Shelf"
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@when(parsers.parse('I "{assign_or_unassign}" all countries to all goods with link'))
def assign_all_with_link(driver, assign_or_unassign):
countries_page = GoodsCountriesPage(driver)
if assign_or_unassign == "assign":
countries_page.select_all_link()
countries_page.click_save()
else:
countries_page.deselect_all_link()
@when("I click Add goods type button")
def click_goods_type_button(driver):
OpenApplicationGoodsTypesPage(driver).click_add_good_button()
@then(parsers.parse('I see all countries are "{assigned_or_unassigned}" to all goods'))
def see_all_or_no_selected(driver, assigned_or_unassigned):
countries_page = GoodsCountriesPage(driver)
if assigned_or_unassigned == "assigned":
assert countries_page.all_selected()
else:
assert countries_page.all_deselected()
@when(parsers.parse('I create an open application of a "{export_type}" export type')) # noqa
def create_open_app(driver, export_type, context): # noqa
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, "military", context)
enter_permanent_or_temporary(driver, export_type, context)
enter_application_name(driver, context)
answer_firearms_question(driver)
@when(parsers.parse('I create an open application for an export licence of the "{licence_type}" licence type')) # noqa
def create_open_app_of_specific_type(driver, licence_type, context): # noqa
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, licence_type, context)
if licence_type in ["military", "uk_continental_shelf"]:
enter_permanent_or_temporary(driver, "permanent", context)
enter_application_name(driver, context)
if licence_type in ["military", "uk_continental_shelf"]:
answer_firearms_question(driver)
@when("I click on the add button")
def i_click_on_the_add_button(driver):
GenericApplicationUltimateEndUsers(driver).click_add_ultimate_recipient_button()
@when("I remove a good type from the application")
def i_remove_a_good_from_the_application(driver):
remove_good_link = StandardApplicationGoodsPage(driver).find_remove_goods_type_link()
driver.execute_script("arguments[0].click();", remove_good_link)
@then("no goods types are left on the application")
def no_goods_types_are_left_on_the_application(driver):
assert (OpenApplicationGoodsTypesPage(driver).find_remove_goods_type_link(), None)
@then(parsers.parse('I cannot see the sections "{sections}"')) # noqa
def sections_did_not_appear_on_task_list(driver, sections): # noqa
sections = sections.split(", ")
for section in sections:
assert TaskListPage(driver).get_section(section) is None
| 44.54661 | 129 | 0.799106 | from pytest_bdd import scenarios, when, then, parsers
import ui_automation_tests.shared.tools.helpers as utils
from ui_automation_tests.pages.generic_application.task_list import TaskListPage
from ui_automation_tests.pages.open_application.country_contract_types import OpenApplicationCountryContractTypes
from ui_automation_tests.pages.open_application.country_contract_types_summary import (
OpenApplicationCountryContractTypesSummaryPage,
)
from ui_automation_tests.pages.exporter_hub_page import ExporterHubPage
from ui_automation_tests.pages.generic_application.ultimate_end_users import GenericApplicationUltimateEndUsers
from ui_automation_tests.shared import functions
from ui_automation_tests.conftest import (
enter_type_of_application,
enter_application_name,
enter_permanent_or_temporary,
choose_open_licence_category,
answer_firearms_question,
)
from ui_automation_tests.pages.apply_for_a_licence_page import ApplyForALicencePage
from ui_automation_tests.pages.open_application.countries import OpenApplicationCountriesPage
from ui_automation_tests.pages.open_application.goods_countries_page import GoodsCountriesPage
from ui_automation_tests.pages.open_application.goods_types import OpenApplicationGoodsTypesPage
from ui_automation_tests.pages.standard_application.goods import StandardApplicationGoodsPage
scenarios(
"../features/submit_open_application.feature", "../features/edit_open_application.feature", strict_gherkin=False
)
@then(parsers.parse('I see my goods type added at position "{position}" with a description and a control code'))
def i_see_the_goods_types_list(driver, position, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
good_type = goods_type_page.get_text_of_goods_type_info(int(position))
assert context.good_description in good_type
assert context.control_code in good_type
@then(parsers.parse("I see a list of the preselected media products"))
def i_see_the_goods_types_list_media_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 7
@then(parsers.parse("I see a list of the preselected cryptographic products"))
def i_see_the_goods_types_list_cryptographic_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 4
@then("I should see a list of countries")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_countries_names()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of all countries that have been preselected")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of the countries permitted for a cryptographic OIEL")
def i_should_see_a_list_of_countries_cryptographic_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 213
assert "United Kingdom" not in page_countries
@then("I should see the UK Continental Shelf as the only permitted destination")
def i_should_see_a_list_of_countries_uk_continental_shelf_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 1
assert page_countries[0] == "UK Continental Shelf"
@when(parsers.parse('I select "{country}" from the country list'))
def i_select_country_from_the_country_list(driver, country):
application_countries_list = OpenApplicationCountriesPage(driver)
application_countries_list.select_country(country)
assert utils.find_element_by_href(driver, "#" + country).is_displayed()
@when(parsers.parse('I search for country "{country}"'))
def search_for_country(driver, country):
OpenApplicationCountriesPage(driver).search_for_country(country)
@then(parsers.parse('only "{country}" is displayed in country list'))
def search_country_result(driver, country):
assert (
country == OpenApplicationCountriesPage(driver).get_text_of_countries_list()
), "Country not searched correctly"
@when("I click select all countries")
def select_all_countries(driver):
page = OpenApplicationCountriesPage(driver)
page.click_select_all()
@then("all checkboxes are selected")
def all_selected(driver):
page = OpenApplicationCountriesPage(driver)
assert page.get_number_of_checkboxes(checked=False) == page.get_number_of_checkboxes(checked=True)
@when("I select that I want to add the same sectors and contract types to all countries")
def select_yes_to_all_countries_with_the_same_contract_types(driver):
OpenApplicationCountryContractTypes(driver).select_same_contract_types_for_all_countries_radio_button()
@when("I select contract types for all countries")
def select_contract_types_for_all_countries(driver, context):
page = OpenApplicationCountryContractTypes(driver)
context.contract_types = [
{"id": "Navy", "value": "Navy"},
{
"id": "Aircraft-manufacturers,-maintainers-or-operators",
"value": "Aircraft manufacturers, maintainers or operators",
},
{"id": "Pharmaceutical-or-medical", "value": "Pharmaceutical or medical"},
]
page.select_contract_type(context.contract_types[0]["id"])
page.select_contract_type(context.contract_types[1]["id"])
page.select_contract_type(context.contract_types[2]["id"])
page.select_other_contract_type_and_fill_in_details()
functions.click_submit(driver)
@then("I should see all countries and the chosen contract types on the destination summary list")
def i_should_see_destinations_summary_countries_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 273
assert "United Kingdom" not in countries_and_contract_types
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@then(
"I should see the UK Continental Shelf as the only destination and the chosen contract types on the destination summary list"
)
def i_should_see_destinations_summary_uk_continental_shelf_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 1
assert countries_and_contract_types[0][0] == "UK Continental Shelf"
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@when(parsers.parse('I "{assign_or_unassign}" all countries to all goods with link'))
def assign_all_with_link(driver, assign_or_unassign):
countries_page = GoodsCountriesPage(driver)
if assign_or_unassign == "assign":
countries_page.select_all_link()
countries_page.click_save()
else:
countries_page.deselect_all_link()
@when("I click Add goods type button")
def click_goods_type_button(driver):
OpenApplicationGoodsTypesPage(driver).click_add_good_button()
@then(parsers.parse('I see all countries are "{assigned_or_unassigned}" to all goods'))
def see_all_or_no_selected(driver, assigned_or_unassigned):
countries_page = GoodsCountriesPage(driver)
if assigned_or_unassigned == "assigned":
assert countries_page.all_selected()
else:
assert countries_page.all_deselected()
@when(parsers.parse('I create an open application of a "{export_type}" export type'))
def create_open_app(driver, export_type, context):
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, "military", context)
enter_permanent_or_temporary(driver, export_type, context)
enter_application_name(driver, context)
answer_firearms_question(driver)
@when(parsers.parse('I create an open application for an export licence of the "{licence_type}" licence type'))
def create_open_app_of_specific_type(driver, licence_type, context):
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, licence_type, context)
if licence_type in ["military", "uk_continental_shelf"]:
enter_permanent_or_temporary(driver, "permanent", context)
enter_application_name(driver, context)
if licence_type in ["military", "uk_continental_shelf"]:
answer_firearms_question(driver)
@when("I click on the add button")
def i_click_on_the_add_button(driver):
GenericApplicationUltimateEndUsers(driver).click_add_ultimate_recipient_button()
@when("I remove a good type from the application")
def i_remove_a_good_from_the_application(driver):
remove_good_link = StandardApplicationGoodsPage(driver).find_remove_goods_type_link()
driver.execute_script("arguments[0].click();", remove_good_link)
@then("no goods types are left on the application")
def no_goods_types_are_left_on_the_application(driver):
assert (OpenApplicationGoodsTypesPage(driver).find_remove_goods_type_link(), None)
@then(parsers.parse('I cannot see the sections "{sections}"'))
def sections_did_not_appear_on_task_list(driver, sections):
sections = sections.split(", ")
for section in sections:
assert TaskListPage(driver).get_section(section) is None
| true | true |
f720d28d694930288ecc3e99c146b144020f7a87 | 13,442 | py | Python | lib/redis_cache/rediscache.py | eapearson/kb_Metrics | f1c3c8457577060c9c695d6f4cbb7ec8f7fae17f | [
"MIT"
] | null | null | null | lib/redis_cache/rediscache.py | eapearson/kb_Metrics | f1c3c8457577060c9c695d6f4cbb7ec8f7fae17f | [
"MIT"
] | null | null | null | lib/redis_cache/rediscache.py | eapearson/kb_Metrics | f1c3c8457577060c9c695d6f4cbb7ec8f7fae17f | [
"MIT"
] | null | null | null | """
A simple redis-cache interface for storing python objects.
"""
from functools import wraps
import pickle
import json
import hashlib
import redis
import logging
from redis._compat import basestring, unicode
DEFAULT_EXPIRY = 60 * 60 * 24
class RedisConnect(object):
"""
A simple object to store and pass database connection information.
This makes the Simple Cache class a little more flexible, for cases
where redis connection configuration needs customizing.
"""
def __init__(self, host=None, port=None, db=None, password=None):
self.host = host if host else 'localhost'
self.port = port if port else 6379
self.db = db if db else 0
self.password = password
def connect(self):
"""
We cannot assume that connection will succeed, as such we use a ping()
method in the redis client library to validate ability to contact redis.
RedisNoConnException is raised if we fail to ping.
:return: redis.StrictRedis Connection Object
"""
try:
redis.StrictRedis(host=self.host, port=self.port, password=self.password).ping()
except redis.ConnectionError as e:
raise RedisNoConnException("Failed to create connection to redis",
(self.host,
self.port)
)
return redis.StrictRedis(host=self.host,
port=self.port,
db=self.db,
password=self.password)
class CacheMissException(Exception):
pass
class ExpiredKeyException(Exception):
pass
class RedisNoConnException(Exception):
pass
class DoNotCache(Exception):
_result = None
def __init__(self, result):
super(DoNotCache, self).__init__()
self._result = result
@property
def result(self):
return self._result
class SimpleCache(object):
def __init__(self,
limit=10000,
expire=DEFAULT_EXPIRY,
hashkeys=False,
host=None,
port=None,
db=None,
password=None,
namespace="SimpleCache"):
self.limit = limit # No of json encoded strings to cache
self.expire = expire # Time to keys to expire in seconds
self.prefix = namespace
self.host = host
self.port = port
self.db = db
try:
self.connection = RedisConnect(host=self.host,
port=self.port,
db=self.db,
password=password).connect()
except RedisNoConnException as e:
self.connection = None
pass
# Should we hash keys? There is a very small risk of collision invloved.
self.hashkeys = hashkeys
def make_key(self, key):
return "SimpleCache-{0}:{1}".format(self.prefix, key)
def namespace_key(self, namespace):
return self.make_key(namespace + ':*')
def get_set_name(self):
return "SimpleCache-{0}-keys".format(self.prefix)
def store(self, key, value, expire=None):
"""
Method stores a value after checking for space constraints and
freeing up space if required.
:param key: key by which to reference datum being stored in Redis
:param value: actual value being stored under this key
:param expire: time-to-live (ttl) for this datum
"""
key = to_unicode(key)
value = to_unicode(value)
set_name = self.get_set_name()
while self.connection.scard(set_name) >= self.limit:
del_key = self.connection.spop(set_name)
self.connection.delete(self.make_key(del_key))
pipe = self.connection.pipeline()
if expire is None:
expire = self.expire
if (isinstance(expire, int) and expire <= 0) or (expire is None):
pipe.set(self.make_key(key), value)
else:
pipe.setex(self.make_key(key), expire, value)
pipe.sadd(set_name, key)
pipe.execute()
def expire_all_in_set(self):
"""
Method expires all keys in the namespace of this object.
At times there is a need to invalidate cache in bulk, because a
single change may result in all data returned by a decorated function
to be altered.
Method returns a tuple where first value is total number of keys in
the set of this object's namespace and second value is a number of
keys successfully expired.
:return: int, int
"""
all_members = self.keys()
keys = [self.make_key(k) for k in all_members]
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
return len(self), len(all_members)
def expire_namespace(self, namespace):
"""
Method expires all keys in the namespace of this object.
At times there is a need to invalidate cache in bulk, because a
single change may result in all data returned by a decorated function
to be altered.
Method returns a tuple where first value is total number of keys in
the set of this object's namespace and second value is a number of
keys successfully expired.
:return: int, int
"""
namespace = self.namespace_key(namespace)
all_members = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*all_members)
pipe.execute()
return len(self), len(all_members)
def isexpired(self, key):
"""
Method determines whether a given key is already expired. If not expired,
we expect to get back current ttl for the given key.
:param key: key being looked-up in Redis
:return: bool (True) if expired, or int representing current time-to-live (ttl) value
"""
ttl = self.connection.pttl("SimpleCache-{0}".format(key))
if ttl == -2: # not exist
ttl = self.connection.pttl(self.make_key(key))
elif ttl == -1:
return True
if not ttl is None:
return ttl
else:
return self.connection.pttl("{0}:{1}".format(self.prefix, key))
def store_json(self, key, value, expire=None):
self.store(key, json.dumps(value), expire)
def store_pickle(self, key, value, expire=None):
self.store(key, pickle.dumps(value), expire)
def get(self, key):
key = to_unicode(key)
if key: # No need to validate membership, which is an O(1) operation, but seems we can do without.
value = self.connection.get(self.make_key(key))
if value is None: # expired key
if not key in self: # If key does not exist at all, it is a straight miss.
raise CacheMissException
self.connection.srem(self.get_set_name(), key)
raise ExpiredKeyException
else:
return value
def mget(self, keys):
"""
Method returns a dict of key/values for found keys.
:param keys: array of keys to look up in Redis
:return: dict of found key/values
"""
if keys:
cache_keys = [self.make_key(to_unicode(key)) for key in keys]
values = self.connection.mget(cache_keys)
if None in values:
pipe = self.connection.pipeline()
for cache_key, value in zip(cache_keys, values):
if value is None: # non-existant or expired key
pipe.srem(self.get_set_name(), cache_key)
pipe.execute()
return {k: v for (k, v) in zip(keys, values) if v is not None}
def get_json(self, key):
return json.loads(self.get(key))
def get_pickle(self, key):
return pickle.loads(self.get(key))
def mget_json(self, keys):
"""
Method returns a dict of key/values for found keys with each value
parsed from JSON format.
:param keys: array of keys to look up in Redis
:return: dict of found key/values with values parsed from JSON format
"""
d = self.mget(keys)
if d:
for key in d.keys():
d[key] = json.loads(d[key]) if d[key] else None
return d
def invalidate(self, key):
"""
Method removes (invalidates) an item from the cache.
:param key: key to remove from Redis
"""
key = to_unicode(key)
pipe = self.connection.pipeline()
pipe.srem(self.get_set_name(), key)
pipe.delete(self.make_key(key))
pipe.execute()
def __contains__(self, key):
return self.connection.sismember(self.get_set_name(), key)
def __iter__(self):
if not self.connection:
return iter([])
return iter(
["{0}:{1}".format(self.prefix, x)
for x in self.connection.smembers(self.get_set_name())
])
def __len__(self):
return self.connection.scard(self.get_set_name())
def keys(self):
return self.connection.smembers(self.get_set_name())
def flush(self):
keys = list(self.keys())
keys.append(self.get_set_name())
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
def flush_namespace(self, space):
namespace = self.namespace_key(space)
setname = self.get_set_name()
keys = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.srem(setname, *space)
pipe.execute()
def get_hash(self, args):
if self.hashkeys:
key = hashlib.md5(args).hexdigest()
else:
key = pickle.dumps(args)
return key
def cache_it(limit=10000, expire=DEFAULT_EXPIRY, cache=None,
use_json=False, namespace=None):
"""
Arguments and function result must be pickleable.
:param limit: maximum number of keys to maintain in the set
:param expire: period after which an entry in cache is considered expired
:param cache: SimpleCache object, if created separately
:return: decorated function
"""
cache_ = cache ## Since python 2.x doesn't have the nonlocal keyword, we need to do this
expire_ = expire ## Same here.
def decorator(function):
cache, expire = cache_, expire_
if cache is None:
cache = SimpleCache(limit, expire, hashkeys=True, namespace=function.__module__)
elif expire == DEFAULT_EXPIRY:
# If the expire arg value is the default, set it to None so we store
# the expire value of the passed cache object
expire = None
@wraps(function)
def func(*args, **kwargs):
## Handle cases where caching is down or otherwise not available.
if cache.connection is None:
result = function(*args, **kwargs)
return result
serializer = json if use_json else pickle
fetcher = cache.get_json if use_json else cache.get_pickle
storer = cache.store_json if use_json else cache.store_pickle
## Key will be either a md5 hash or just pickle object,
## in the form of `function name`:`key`
key = cache.get_hash(serializer.dumps([args, kwargs]))
cache_key = '{func_name}:{key}'.format(func_name=function.__name__,
key=key)
if namespace:
cache_key = '{namespace}:{key}'.format(namespace=namespace,
key=cache_key)
try:
return fetcher(cache_key)
except (ExpiredKeyException, CacheMissException) as e:
## Add some sort of cache miss handing here.
pass
except:
logging.exception("Unknown redis-simple-cache error. Please check your Redis free space.")
try:
result = function(*args, **kwargs)
except DoNotCache as e:
result = e.result
else:
try:
storer(cache_key, result, expire)
except redis.ConnectionError as e:
logging.exception(e)
return result
return func
return decorator
def cache_it_json(limit=10000, expire=DEFAULT_EXPIRY, cache=None, namespace=None):
"""
Arguments and function result must be able to convert to JSON.
:param limit: maximum number of keys to maintain in the set
:param expire: period after which an entry in cache is considered expired
:param cache: SimpleCache object, if created separately
:return: decorated function
"""
return cache_it(limit=limit, expire=expire, use_json=True,
cache=cache, namespace=None)
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
| 34.64433 | 107 | 0.588157 | from functools import wraps
import pickle
import json
import hashlib
import redis
import logging
from redis._compat import basestring, unicode
DEFAULT_EXPIRY = 60 * 60 * 24
class RedisConnect(object):
def __init__(self, host=None, port=None, db=None, password=None):
self.host = host if host else 'localhost'
self.port = port if port else 6379
self.db = db if db else 0
self.password = password
def connect(self):
try:
redis.StrictRedis(host=self.host, port=self.port, password=self.password).ping()
except redis.ConnectionError as e:
raise RedisNoConnException("Failed to create connection to redis",
(self.host,
self.port)
)
return redis.StrictRedis(host=self.host,
port=self.port,
db=self.db,
password=self.password)
class CacheMissException(Exception):
pass
class ExpiredKeyException(Exception):
pass
class RedisNoConnException(Exception):
pass
class DoNotCache(Exception):
_result = None
def __init__(self, result):
super(DoNotCache, self).__init__()
self._result = result
@property
def result(self):
return self._result
class SimpleCache(object):
def __init__(self,
limit=10000,
expire=DEFAULT_EXPIRY,
hashkeys=False,
host=None,
port=None,
db=None,
password=None,
namespace="SimpleCache"):
self.limit = limit
self.expire = expire
self.prefix = namespace
self.host = host
self.port = port
self.db = db
try:
self.connection = RedisConnect(host=self.host,
port=self.port,
db=self.db,
password=password).connect()
except RedisNoConnException as e:
self.connection = None
pass
self.hashkeys = hashkeys
def make_key(self, key):
return "SimpleCache-{0}:{1}".format(self.prefix, key)
def namespace_key(self, namespace):
return self.make_key(namespace + ':*')
def get_set_name(self):
return "SimpleCache-{0}-keys".format(self.prefix)
def store(self, key, value, expire=None):
key = to_unicode(key)
value = to_unicode(value)
set_name = self.get_set_name()
while self.connection.scard(set_name) >= self.limit:
del_key = self.connection.spop(set_name)
self.connection.delete(self.make_key(del_key))
pipe = self.connection.pipeline()
if expire is None:
expire = self.expire
if (isinstance(expire, int) and expire <= 0) or (expire is None):
pipe.set(self.make_key(key), value)
else:
pipe.setex(self.make_key(key), expire, value)
pipe.sadd(set_name, key)
pipe.execute()
def expire_all_in_set(self):
all_members = self.keys()
keys = [self.make_key(k) for k in all_members]
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
return len(self), len(all_members)
def expire_namespace(self, namespace):
namespace = self.namespace_key(namespace)
all_members = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*all_members)
pipe.execute()
return len(self), len(all_members)
def isexpired(self, key):
ttl = self.connection.pttl("SimpleCache-{0}".format(key))
if ttl == -2:
ttl = self.connection.pttl(self.make_key(key))
elif ttl == -1:
return True
if not ttl is None:
return ttl
else:
return self.connection.pttl("{0}:{1}".format(self.prefix, key))
def store_json(self, key, value, expire=None):
self.store(key, json.dumps(value), expire)
def store_pickle(self, key, value, expire=None):
self.store(key, pickle.dumps(value), expire)
def get(self, key):
key = to_unicode(key)
if key:
value = self.connection.get(self.make_key(key))
if value is None:
if not key in self:
raise CacheMissException
self.connection.srem(self.get_set_name(), key)
raise ExpiredKeyException
else:
return value
def mget(self, keys):
if keys:
cache_keys = [self.make_key(to_unicode(key)) for key in keys]
values = self.connection.mget(cache_keys)
if None in values:
pipe = self.connection.pipeline()
for cache_key, value in zip(cache_keys, values):
if value is None:
pipe.srem(self.get_set_name(), cache_key)
pipe.execute()
return {k: v for (k, v) in zip(keys, values) if v is not None}
def get_json(self, key):
return json.loads(self.get(key))
def get_pickle(self, key):
return pickle.loads(self.get(key))
def mget_json(self, keys):
d = self.mget(keys)
if d:
for key in d.keys():
d[key] = json.loads(d[key]) if d[key] else None
return d
def invalidate(self, key):
key = to_unicode(key)
pipe = self.connection.pipeline()
pipe.srem(self.get_set_name(), key)
pipe.delete(self.make_key(key))
pipe.execute()
def __contains__(self, key):
return self.connection.sismember(self.get_set_name(), key)
def __iter__(self):
if not self.connection:
return iter([])
return iter(
["{0}:{1}".format(self.prefix, x)
for x in self.connection.smembers(self.get_set_name())
])
def __len__(self):
return self.connection.scard(self.get_set_name())
def keys(self):
return self.connection.smembers(self.get_set_name())
def flush(self):
keys = list(self.keys())
keys.append(self.get_set_name())
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
def flush_namespace(self, space):
namespace = self.namespace_key(space)
setname = self.get_set_name()
keys = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.srem(setname, *space)
pipe.execute()
def get_hash(self, args):
if self.hashkeys:
key = hashlib.md5(args).hexdigest()
else:
key = pickle.dumps(args)
return key
def cache_it(limit=10000, expire=DEFAULT_EXPIRY, cache=None,
use_json=False, namespace=None):
cache_ = cache cache, expire = cache_, expire_
if cache is None:
cache = SimpleCache(limit, expire, hashkeys=True, namespace=function.__module__)
elif expire == DEFAULT_EXPIRY:
# If the expire arg value is the default, set it to None so we store
# the expire value of the passed cache object
expire = None
@wraps(function)
def func(*args, **kwargs):
## Handle cases where caching is down or otherwise not available.
if cache.connection is None:
result = function(*args, **kwargs)
return result
serializer = json if use_json else pickle
fetcher = cache.get_json if use_json else cache.get_pickle
storer = cache.store_json if use_json else cache.store_pickle
## Key will be either a md5 hash or just pickle object,
## in the form of `function name`:`key`
key = cache.get_hash(serializer.dumps([args, kwargs]))
cache_key = '{func_name}:{key}'.format(func_name=function.__name__,
key=key)
if namespace:
cache_key = '{namespace}:{key}'.format(namespace=namespace,
key=cache_key)
try:
return fetcher(cache_key)
except (ExpiredKeyException, CacheMissException) as e:
## Add some sort of cache miss handing here.
pass
except:
logging.exception("Unknown redis-simple-cache error. Please check your Redis free space.")
try:
result = function(*args, **kwargs)
except DoNotCache as e:
result = e.result
else:
try:
storer(cache_key, result, expire)
except redis.ConnectionError as e:
logging.exception(e)
return result
return func
return decorator
def cache_it_json(limit=10000, expire=DEFAULT_EXPIRY, cache=None, namespace=None):
return cache_it(limit=limit, expire=expire, use_json=True,
cache=cache, namespace=None)
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
| true | true |
f720d329eaad65945f4c82bf41d8502618bb8cd8 | 892 | py | Python | setup.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | 1 | 2020-07-28T09:11:57.000Z | 2020-07-28T09:11:57.000Z | setup.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | null | null | null | setup.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='spektral',
version='0.6.0',
packages=find_packages(),
install_requires=['tensorflow>=2.1.0',
'networkx',
'pandas',
'lxml',
'joblib',
'numpy',
'scipy',
'requests',
'scikit-learn'],
url='https://github.com/danielegrattarola/spektral',
license='MIT',
author='Daniele Grattarola',
author_email='daniele.grattarola@gmail.com',
description='Graph Neural Networks with Keras and Tensorflow 2.',
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3.5"
],
)
| 29.733333 | 69 | 0.545964 | from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='spektral',
version='0.6.0',
packages=find_packages(),
install_requires=['tensorflow>=2.1.0',
'networkx',
'pandas',
'lxml',
'joblib',
'numpy',
'scipy',
'requests',
'scikit-learn'],
url='https://github.com/danielegrattarola/spektral',
license='MIT',
author='Daniele Grattarola',
author_email='daniele.grattarola@gmail.com',
description='Graph Neural Networks with Keras and Tensorflow 2.',
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3.5"
],
)
| true | true |
f720d46cfcd6d92dcd55f520e7ee8bb54e90becb | 1,211 | py | Python | Simple_Cipher/simple_cipher.py | triump0870/Interactive_Programming_Python | 97e0f1f5639aecac683053ed742632db14dc6954 | [
"Apache-2.0"
] | 1 | 2015-06-09T22:40:15.000Z | 2015-06-09T22:40:15.000Z | Simple_Cipher/simple_cipher.py | triump0870/Interactive_Programming_Python | 97e0f1f5639aecac683053ed742632db14dc6954 | [
"Apache-2.0"
] | null | null | null | Simple_Cipher/simple_cipher.py | triump0870/Interactive_Programming_Python | 97e0f1f5639aecac683053ed742632db14dc6954 | [
"Apache-2.0"
] | null | null | null | # Simple Cipher Text Generator
# Rohan Roy - 2nd Nov 2013
import simplegui
import random
# Global Variables
CIPHER = {}
LETTER = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMOPQRSTUVWXYZ1234567890!@#$%&" "'
message = ""
# Helper Function
def init():
letter_list = list(LETTER)
random.shuffle(letter_list)
for ch in LETTER:
CIPHER[ch] = letter_list.pop()
# Encoding Fuction
def encode():
emsg = ""
for ch in message:
emsg += CIPHER[ch]
print message , " encodes to ",emsg
# Decoding Function
def decode():
dmsg = ""
for ch in message:
for key,value in CIPHER.items():
if ch == value:
dmsg += key
print message , " decodes to ", dmsg
# Input Message Function
def newmsg(msg):
global message
message = msg
label1=label2.set_text(msg)
# Frames for the program
frame = simplegui.create_frame("SimpleCipher",2,300,300)
frame.add_input("Message:", newmsg,200)
label1 = frame.add_label("Input Message:")
label2 = frame.add_label("",200)
frame.add_button("Encode",encode)
frame.add_button("Decode",decode)
# Initialization of the program
init()
# Starting of the frame
frame.start()
| 22.849057 | 81 | 0.654005 |
import simplegui
import random
CIPHER = {}
LETTER = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMOPQRSTUVWXYZ1234567890!@#$%&" "'
message = ""
def init():
letter_list = list(LETTER)
random.shuffle(letter_list)
for ch in LETTER:
CIPHER[ch] = letter_list.pop()
def encode():
emsg = ""
for ch in message:
emsg += CIPHER[ch]
print message , " encodes to ",emsg
def decode():
dmsg = ""
for ch in message:
for key,value in CIPHER.items():
if ch == value:
dmsg += key
print message , " decodes to ", dmsg
def newmsg(msg):
global message
message = msg
label1=label2.set_text(msg)
frame = simplegui.create_frame("SimpleCipher",2,300,300)
frame.add_input("Message:", newmsg,200)
label1 = frame.add_label("Input Message:")
label2 = frame.add_label("",200)
frame.add_button("Encode",encode)
frame.add_button("Decode",decode)
init()
frame.start()
| false | true |
f720d5217ca55aacc0922b9a609c312d27b6d596 | 3,175 | py | Python | tests/unit/test_subscribers.py | cclauss/s3transfer | 258c3c69416338f8df307621ec5cefa85c453150 | [
"Apache-2.0"
] | 1 | 2021-05-08T10:43:40.000Z | 2021-05-08T10:43:40.000Z | tests/unit/test_subscribers.py | Saiprasad16/s3transfer | 59e968d05288092948284001710c416677102266 | [
"Apache-2.0"
] | 1 | 2021-04-08T21:25:06.000Z | 2021-04-13T16:36:43.000Z | tests/unit/test_subscribers.py | Saiprasad16/s3transfer | 59e968d05288092948284001710c416677102266 | [
"Apache-2.0"
] | 1 | 2020-12-28T19:16:31.000Z | 2020-12-28T19:16:31.000Z | # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from s3transfer.exceptions import InvalidSubscriberMethodError
from s3transfer.subscribers import BaseSubscriber
class ExtraMethodsSubscriber(BaseSubscriber):
def extra_method(self):
return 'called extra method'
class NotCallableSubscriber(BaseSubscriber):
on_done = 'foo'
class NoKwargsSubscriber(BaseSubscriber):
def on_done(self):
pass
class OverrideMethodSubscriber(BaseSubscriber):
def on_queued(self, **kwargs):
return kwargs
class OverrideConstructorSubscriber(BaseSubscriber):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
class TestSubscribers(unittest.TestCase):
def test_can_instantiate_base_subscriber(self):
try:
BaseSubscriber()
except InvalidSubscriberMethodError:
self.fail('BaseSubscriber should be instantiable')
def test_can_call_base_subscriber_method(self):
subscriber = BaseSubscriber()
try:
subscriber.on_done(future=None)
except Exception as e:
self.fail(
'Should be able to call base class subscriber method. '
'instead got: %s' % e)
def test_subclass_can_have_and_call_additional_methods(self):
subscriber = ExtraMethodsSubscriber()
self.assertEqual(subscriber.extra_method(), 'called extra method')
def test_can_subclass_and_override_method_from_base_subscriber(self):
subscriber = OverrideMethodSubscriber()
# Make sure that the overriden method is called
self.assertEqual(subscriber.on_queued(foo='bar'), {'foo': 'bar'})
def test_can_subclass_and_override_constructor_from_base_class(self):
subscriber = OverrideConstructorSubscriber('foo', arg2='bar')
# Make sure you can create a custom constructor.
self.assertEqual(subscriber.arg1, 'foo')
self.assertEqual(subscriber.arg2, 'bar')
def test_invalid_arguments_in_constructor_of_subclass_subscriber(self):
# The override constructor should still have validation of
# constructor args.
with self.assertRaises(TypeError):
OverrideConstructorSubscriber()
def test_not_callable_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must be callable'):
NotCallableSubscriber()
def test_no_kwargs_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must accept keyword'):
NoKwargsSubscriber()
| 35.674157 | 75 | 0.716535 |
from tests import unittest
from s3transfer.exceptions import InvalidSubscriberMethodError
from s3transfer.subscribers import BaseSubscriber
class ExtraMethodsSubscriber(BaseSubscriber):
def extra_method(self):
return 'called extra method'
class NotCallableSubscriber(BaseSubscriber):
on_done = 'foo'
class NoKwargsSubscriber(BaseSubscriber):
def on_done(self):
pass
class OverrideMethodSubscriber(BaseSubscriber):
def on_queued(self, **kwargs):
return kwargs
class OverrideConstructorSubscriber(BaseSubscriber):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
class TestSubscribers(unittest.TestCase):
def test_can_instantiate_base_subscriber(self):
try:
BaseSubscriber()
except InvalidSubscriberMethodError:
self.fail('BaseSubscriber should be instantiable')
def test_can_call_base_subscriber_method(self):
subscriber = BaseSubscriber()
try:
subscriber.on_done(future=None)
except Exception as e:
self.fail(
'Should be able to call base class subscriber method. '
'instead got: %s' % e)
def test_subclass_can_have_and_call_additional_methods(self):
subscriber = ExtraMethodsSubscriber()
self.assertEqual(subscriber.extra_method(), 'called extra method')
def test_can_subclass_and_override_method_from_base_subscriber(self):
subscriber = OverrideMethodSubscriber()
self.assertEqual(subscriber.on_queued(foo='bar'), {'foo': 'bar'})
def test_can_subclass_and_override_constructor_from_base_class(self):
subscriber = OverrideConstructorSubscriber('foo', arg2='bar')
self.assertEqual(subscriber.arg1, 'foo')
self.assertEqual(subscriber.arg2, 'bar')
def test_invalid_arguments_in_constructor_of_subclass_subscriber(self):
with self.assertRaises(TypeError):
OverrideConstructorSubscriber()
def test_not_callable_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must be callable'):
NotCallableSubscriber()
def test_no_kwargs_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must accept keyword'):
NoKwargsSubscriber()
| true | true |
f720d5fe861a06e326fd1453b262a21ad8d73c63 | 233 | py | Python | encapsulation_exercise/restaurant/project/beverage/cold_beverage.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | encapsulation_exercise/restaurant/project/beverage/cold_beverage.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | encapsulation_exercise/restaurant/project/beverage/cold_beverage.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | from encapsulation_exercise.restaurant.project.beverage.beverage import Beverage
class ColdBeverage(Beverage):
def __init__(self, name: str, price: float, milliliters: float):
super().__init__(name, price, milliliters)
| 33.285714 | 80 | 0.76824 | from encapsulation_exercise.restaurant.project.beverage.beverage import Beverage
class ColdBeverage(Beverage):
def __init__(self, name: str, price: float, milliliters: float):
super().__init__(name, price, milliliters)
| true | true |
f720d64ceba2868cd71f12c692ec517b850f2ae3 | 5,655 | py | Python | qiskit/providers/basicaer/statevector_simulator.py | biplab37/qiskit-aakash | e10b204887606f1f75bdfde182bb0c6d0a322c68 | [
"Apache-2.0"
] | 22 | 2019-08-15T04:39:15.000Z | 2022-03-06T05:17:04.000Z | qiskit/providers/basicaer/statevector_simulator.py | biplab37/qiskit-aakash | e10b204887606f1f75bdfde182bb0c6d0a322c68 | [
"Apache-2.0"
] | 2 | 2020-10-26T07:12:12.000Z | 2021-12-09T16:22:51.000Z | qiskit/providers/basicaer/statevector_simulator.py | biplab37/qiskit-aakash | e10b204887606f1f75bdfde182bb0c6d0a322c68 | [
"Apache-2.0"
] | 9 | 2019-09-05T05:33:00.000Z | 2021-10-09T16:04:53.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Contains a (slow) python statevector simulator.
It simulates the statevector through a quantum circuit. It is exponential in
the number of qubits.
We advise using the c++ simulator or online simulator for larger size systems.
The input is a qobj dictionary and the output is a Result object.
The input qobj to this simulator has no shots, no measures, no reset, no noise.
"""
import logging
from math import log2
from qiskit.util import local_hardware_info
from qiskit.providers.basicaer.exceptions import BasicAerError
from qiskit.providers.models import QasmBackendConfiguration
from .qasm_simulator import QasmSimulatorPy
logger = logging.getLogger(__name__)
class StatevectorSimulatorPy(QasmSimulatorPy):
"""Python statevector simulator."""
MAX_QUBITS_MEMORY = int(log2(local_hardware_info()['memory'] * (1024 ** 3) / 16))
DEFAULT_CONFIGURATION = {
'backend_name': 'statevector_simulator',
'backend_version': '1.0.0',
'n_qubits': min(24, MAX_QUBITS_MEMORY),
'url': 'https://github.com/Qiskit/qiskit-terra',
'simulator': True,
'local': True,
'conditional': True,
'open_pulse': False,
'memory': True,
'max_shots': 65536,
'coupling_map': None,
'description': 'A Python statevector simulator for qobj files',
'basis_gates': ['u1', 'u2', 'u3', 'cx', 'id', 'snapshot'],
'gates': [
{
'name': 'u1',
'parameters': ['lambda'],
'qasm_def': 'gate u1(lambda) q { U(0,0,lambda) q; }'
},
{
'name': 'u2',
'parameters': ['phi', 'lambda'],
'qasm_def': 'gate u2(phi,lambda) q { U(pi/2,phi,lambda) q; }'
},
{
'name': 'u3',
'parameters': ['theta', 'phi', 'lambda'],
'qasm_def': 'gate u3(theta,phi,lambda) q { U(theta,phi,lambda) q; }'
},
{
'name': 'cx',
'parameters': ['c', 't'],
'qasm_def': 'gate cx c,t { CX c,t; }'
},
{
'name': 'id',
'parameters': ['a'],
'qasm_def': 'gate id a { U(0,0,0) a; }'
},
{
'name': 'snapshot',
'parameters': ['slot'],
'qasm_def': 'gate snapshot(slot) q { TODO }'
}
]
}
# Override base class value to return the final state vector
SHOW_FINAL_STATE = True
def __init__(self, configuration=None, provider=None):
super().__init__(configuration=(
configuration or QasmBackendConfiguration.from_dict(self.DEFAULT_CONFIGURATION)),
provider=provider)
def run(self, qobj, backend_options=None):
"""Run qobj asynchronously.
Args:
qobj (Qobj): payload of the experiment
backend_options (dict): backend options
Returns:
BasicAerJob: derived from BaseJob
Additional Information::
backend_options: Is a dict of options for the backend. It may contain
* "initial_statevector": vector_like
* "chop_threshold": double
The "initial_statevector" option specifies a custom initial
initial statevector for the simulator to be used instead of the all
zero state. This size of this vector must be correct for the number
of qubits in all experiments in the qobj.
The "chop_threshold" option specifies a truncation value for
setting small values to zero in the output statevector. The default
value is 1e-15.
Example::
backend_options = {
"initial_statevector": np.array([1, 0, 0, 1j]) / np.sqrt(2),
"chop_threshold": 1e-15
}
"""
return super().run(qobj, backend_options=backend_options)
def _validate(self, qobj):
"""Semantic validations of the qobj which cannot be done via schemas.
Some of these may later move to backend schemas.
1. No shots
2. No measurements in the middle
"""
n_qubits = qobj.config.n_qubits
max_qubits = self.configuration().n_qubits
if n_qubits > max_qubits:
raise BasicAerError('Number of qubits {} '.format(n_qubits) +
'is greater than maximum ({}) '.format(max_qubits) +
'for "{}".'.format(self.name()))
if qobj.config.shots != 1:
logger.info('"%s" only supports 1 shot. Setting shots=1.',
self.name())
qobj.config.shots = 1
for experiment in qobj.experiments:
name = experiment.header.name
if getattr(experiment.config, 'shots', 1) != 1:
logger.info('"%s" only supports 1 shot. '
'Setting shots=1 for circuit "%s".',
self.name(), name)
experiment.config.shots = 1
| 36.019108 | 93 | 0.567286 |
import logging
from math import log2
from qiskit.util import local_hardware_info
from qiskit.providers.basicaer.exceptions import BasicAerError
from qiskit.providers.models import QasmBackendConfiguration
from .qasm_simulator import QasmSimulatorPy
logger = logging.getLogger(__name__)
class StatevectorSimulatorPy(QasmSimulatorPy):
MAX_QUBITS_MEMORY = int(log2(local_hardware_info()['memory'] * (1024 ** 3) / 16))
DEFAULT_CONFIGURATION = {
'backend_name': 'statevector_simulator',
'backend_version': '1.0.0',
'n_qubits': min(24, MAX_QUBITS_MEMORY),
'url': 'https://github.com/Qiskit/qiskit-terra',
'simulator': True,
'local': True,
'conditional': True,
'open_pulse': False,
'memory': True,
'max_shots': 65536,
'coupling_map': None,
'description': 'A Python statevector simulator for qobj files',
'basis_gates': ['u1', 'u2', 'u3', 'cx', 'id', 'snapshot'],
'gates': [
{
'name': 'u1',
'parameters': ['lambda'],
'qasm_def': 'gate u1(lambda) q { U(0,0,lambda) q; }'
},
{
'name': 'u2',
'parameters': ['phi', 'lambda'],
'qasm_def': 'gate u2(phi,lambda) q { U(pi/2,phi,lambda) q; }'
},
{
'name': 'u3',
'parameters': ['theta', 'phi', 'lambda'],
'qasm_def': 'gate u3(theta,phi,lambda) q { U(theta,phi,lambda) q; }'
},
{
'name': 'cx',
'parameters': ['c', 't'],
'qasm_def': 'gate cx c,t { CX c,t; }'
},
{
'name': 'id',
'parameters': ['a'],
'qasm_def': 'gate id a { U(0,0,0) a; }'
},
{
'name': 'snapshot',
'parameters': ['slot'],
'qasm_def': 'gate snapshot(slot) q { TODO }'
}
]
}
SHOW_FINAL_STATE = True
def __init__(self, configuration=None, provider=None):
super().__init__(configuration=(
configuration or QasmBackendConfiguration.from_dict(self.DEFAULT_CONFIGURATION)),
provider=provider)
def run(self, qobj, backend_options=None):
return super().run(qobj, backend_options=backend_options)
def _validate(self, qobj):
n_qubits = qobj.config.n_qubits
max_qubits = self.configuration().n_qubits
if n_qubits > max_qubits:
raise BasicAerError('Number of qubits {} '.format(n_qubits) +
'is greater than maximum ({}) '.format(max_qubits) +
'for "{}".'.format(self.name()))
if qobj.config.shots != 1:
logger.info('"%s" only supports 1 shot. Setting shots=1.',
self.name())
qobj.config.shots = 1
for experiment in qobj.experiments:
name = experiment.header.name
if getattr(experiment.config, 'shots', 1) != 1:
logger.info('"%s" only supports 1 shot. '
'Setting shots=1 for circuit "%s".',
self.name(), name)
experiment.config.shots = 1
| true | true |
f720d6c78dc5035a3c9b881b6fc3670b51d08456 | 3,919 | py | Python | myprojectenv/lib/python3.5/site-packages/ansible/modules/windows/win_unzip.py | lancerenteria/doFlask | 2d4e242469b108c6c8316ee18a540307497bfb53 | [
"MIT"
] | null | null | null | myprojectenv/lib/python3.5/site-packages/ansible/modules/windows/win_unzip.py | lancerenteria/doFlask | 2d4e242469b108c6c8316ee18a540307497bfb53 | [
"MIT"
] | null | null | null | myprojectenv/lib/python3.5/site-packages/ansible/modules/windows/win_unzip.py | lancerenteria/doFlask | 2d4e242469b108c6c8316ee18a540307497bfb53 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Phil Schwartz <schwartzmx@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_unzip
version_added: "2.0"
short_description: Unzips compressed files and archives on the Windows node
description:
- Unzips compressed files and archives.
- Supports .zip files natively
- Supports other formats supported by the Powershell Community Extensions (PSCX) module (basically everything 7zip supports)
requirements:
- PSCX
options:
src:
description:
- File to be unzipped (provide absolute path)
required: true
dest:
description:
- Destination of zip file (provide absolute path of directory). If it does not exist, the directory will be created.
required: true
rm:
description:
- Remove the zip file, after unzipping
required: no
choices:
- true
- false
- yes
- no
default: false
recurse:
description:
- Recursively expand zipped files within the src file.
required: no
default: false
choices:
- true
- false
- yes
- no
creates:
description:
- If this file or directory exists the specified src will not be extracted.
required: no
default: null
notes:
- For extracting any compression types other than .zip, the PowerShellCommunityExtensions (PSCX) Module is required. This module (in conjunction with PSCX)
has the ability to recursively unzip files within the src zip file provided and also functionality for many other compression types. If the destination
directory does not exist, it will be created before unzipping the file. Specifying rm parameter will force removal of the src file after extraction.
author: Phil Schwartz
'''
EXAMPLES = r'''
# This unzips a library that was downloaded with win_get_url, and removes the file after extraction
# $ ansible -i hosts -m win_unzip -a "src=C:\\LibraryToUnzip.zip dest=C:\\Lib rm=true" all
# Playbook example
# Simple unzip
---
- name: Unzip a bz2 (BZip) file
win_unzip:
src: C:\Users\Phil\Logs.bz2
dest: C:\Users\Phil\OldLogs
creates: C:\Users\Phil\OldLogs
# This playbook example unzips a .zip file and recursively decompresses the contained .gz files and removes all unneeded compressed files after completion.
- name: Unzip ApplicationLogs.zip and decompress all GZipped log files
hosts: all
gather_facts: false
tasks:
- name: Recursively decompress GZ files in ApplicationLogs.zip
win_unzip:
src: C:\Downloads\ApplicationLogs.zip
dest: C:\Application\Logs
recurse: yes
rm: true
# Install PSCX to use for extracting a gz file
- name: Grab PSCX msi
win_get_url:
url: http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=pscx&DownloadId=923562&FileTime=130585918034470000&Build=20959
dest: C:\pscx.msi
- name: Install PSCX
win_msi:
path: C:\pscx.msi
- name: Unzip gz log
win_unzip:
src: C:\Logs\application-error-logs.gz
dest: C:\ExtractedLogs\application-error-logs
'''
| 32.932773 | 156 | 0.713958 |
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_unzip
version_added: "2.0"
short_description: Unzips compressed files and archives on the Windows node
description:
- Unzips compressed files and archives.
- Supports .zip files natively
- Supports other formats supported by the Powershell Community Extensions (PSCX) module (basically everything 7zip supports)
requirements:
- PSCX
options:
src:
description:
- File to be unzipped (provide absolute path)
required: true
dest:
description:
- Destination of zip file (provide absolute path of directory). If it does not exist, the directory will be created.
required: true
rm:
description:
- Remove the zip file, after unzipping
required: no
choices:
- true
- false
- yes
- no
default: false
recurse:
description:
- Recursively expand zipped files within the src file.
required: no
default: false
choices:
- true
- false
- yes
- no
creates:
description:
- If this file or directory exists the specified src will not be extracted.
required: no
default: null
notes:
- For extracting any compression types other than .zip, the PowerShellCommunityExtensions (PSCX) Module is required. This module (in conjunction with PSCX)
has the ability to recursively unzip files within the src zip file provided and also functionality for many other compression types. If the destination
directory does not exist, it will be created before unzipping the file. Specifying rm parameter will force removal of the src file after extraction.
author: Phil Schwartz
'''
EXAMPLES = r'''
# This unzips a library that was downloaded with win_get_url, and removes the file after extraction
# $ ansible -i hosts -m win_unzip -a "src=C:\\LibraryToUnzip.zip dest=C:\\Lib rm=true" all
# Playbook example
# Simple unzip
---
- name: Unzip a bz2 (BZip) file
win_unzip:
src: C:\Users\Phil\Logs.bz2
dest: C:\Users\Phil\OldLogs
creates: C:\Users\Phil\OldLogs
# This playbook example unzips a .zip file and recursively decompresses the contained .gz files and removes all unneeded compressed files after completion.
- name: Unzip ApplicationLogs.zip and decompress all GZipped log files
hosts: all
gather_facts: false
tasks:
- name: Recursively decompress GZ files in ApplicationLogs.zip
win_unzip:
src: C:\Downloads\ApplicationLogs.zip
dest: C:\Application\Logs
recurse: yes
rm: true
# Install PSCX to use for extracting a gz file
- name: Grab PSCX msi
win_get_url:
url: http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=pscx&DownloadId=923562&FileTime=130585918034470000&Build=20959
dest: C:\pscx.msi
- name: Install PSCX
win_msi:
path: C:\pscx.msi
- name: Unzip gz log
win_unzip:
src: C:\Logs\application-error-logs.gz
dest: C:\ExtractedLogs\application-error-logs
'''
| true | true |
f720d7542161f6d3c83a81ed0d3c647a9030afd4 | 259 | py | Python | mmaction/apis/__init__.py | HypnosXC/mmaction2 | a26d5f981449445a5e22a0a60d8b285e06c3dd6e | [
"Apache-2.0"
] | 648 | 2021-06-24T19:33:09.000Z | 2022-03-31T06:27:24.000Z | mmaction/apis/__init__.py | xumingze0308/mmaction2 | 777546f27f8f5a3c83e10d966e2149be2fc9fa31 | [
"Apache-2.0"
] | 98 | 2020-01-21T09:41:30.000Z | 2022-03-12T00:53:06.000Z | mmaction/apis/__init__.py | xumingze0308/mmaction2 | 777546f27f8f5a3c83e10d966e2149be2fc9fa31 | [
"Apache-2.0"
] | 233 | 2020-01-18T03:46:27.000Z | 2022-03-19T03:17:47.000Z | from .inference import inference_recognizer, init_recognizer
from .test import multi_gpu_test, single_gpu_test
from .train import train_model
__all__ = [
'train_model', 'init_recognizer', 'inference_recognizer', 'multi_gpu_test',
'single_gpu_test'
]
| 28.777778 | 79 | 0.791506 | from .inference import inference_recognizer, init_recognizer
from .test import multi_gpu_test, single_gpu_test
from .train import train_model
__all__ = [
'train_model', 'init_recognizer', 'inference_recognizer', 'multi_gpu_test',
'single_gpu_test'
]
| true | true |
f720d77ecc540423a6a6545f9e50c117ad1c08db | 2,579 | py | Python | se3_transformer/model/layers/linear.py | RosettaCommons/RFDesign | b404b8b2c57f89c047529c30259aeeb8f6012b61 | [
"MIT"
] | 45 | 2022-01-12T04:39:36.000Z | 2022-03-25T12:33:36.000Z | se3_transformer/model/layers/linear.py | RosettaCommons/RFDesign | b404b8b2c57f89c047529c30259aeeb8f6012b61 | [
"MIT"
] | 6 | 2022-01-15T16:48:39.000Z | 2022-03-15T16:20:34.000Z | se3_transformer/model/layers/linear.py | RosettaCommons/RFDesign | b404b8b2c57f89c047529c30259aeeb8f6012b61 | [
"MIT"
] | 10 | 2022-01-12T11:28:03.000Z | 2022-03-30T11:36:41.000Z | # Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES
# SPDX-License-Identifier: MIT
from typing import Dict
import numpy as np
import torch
import torch.nn as nn
from torch import Tensor
from se3_transformer.model.fiber import Fiber
class LinearSE3(nn.Module):
"""
Graph Linear SE(3)-equivariant layer, equivalent to a 1x1 convolution.
Maps a fiber to a fiber with the same degrees (channels may be different).
No interaction between degrees, but interaction between channels.
type-0 features (C_0 channels) ────> Linear(bias=False) ────> type-0 features (C'_0 channels)
type-1 features (C_1 channels) ────> Linear(bias=False) ────> type-1 features (C'_1 channels)
:
type-k features (C_k channels) ────> Linear(bias=False) ────> type-k features (C'_k channels)
"""
def __init__(self, fiber_in: Fiber, fiber_out: Fiber):
super().__init__()
self.weights = nn.ParameterDict({
str(degree_out): nn.Parameter(
torch.randn(channels_out, fiber_in[degree_out]) / np.sqrt(fiber_in[degree_out]))
for degree_out, channels_out in fiber_out
})
def forward(self, features: Dict[str, Tensor], *args, **kwargs) -> Dict[str, Tensor]:
return {
degree: self.weights[degree] @ features[degree]
for degree, weight in self.weights.items()
}
| 42.983333 | 97 | 0.703761 |
from typing import Dict
import numpy as np
import torch
import torch.nn as nn
from torch import Tensor
from se3_transformer.model.fiber import Fiber
class LinearSE3(nn.Module):
def __init__(self, fiber_in: Fiber, fiber_out: Fiber):
super().__init__()
self.weights = nn.ParameterDict({
str(degree_out): nn.Parameter(
torch.randn(channels_out, fiber_in[degree_out]) / np.sqrt(fiber_in[degree_out]))
for degree_out, channels_out in fiber_out
})
def forward(self, features: Dict[str, Tensor], *args, **kwargs) -> Dict[str, Tensor]:
return {
degree: self.weights[degree] @ features[degree]
for degree, weight in self.weights.items()
}
| true | true |
f720d79b4d6d96c43d1bfceebd505df12ce179cf | 1,524 | py | Python | plotly/validators/streamtube/colorbar/_titlefont.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 12 | 2020-04-18T18:10:22.000Z | 2021-12-06T10:11:15.000Z | plotly/validators/streamtube/colorbar/_titlefont.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 1 | 2020-12-15T16:56:11.000Z | 2020-12-15T16:56:11.000Z | plotly/validators/streamtube/colorbar/_titlefont.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 6 | 2020-04-18T23:07:08.000Z | 2021-11-18T07:53:06.000Z | import _plotly_utils.basevalidators
class TitlefontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='titlefont',
parent_name='streamtube.colorbar',
**kwargs
):
super(TitlefontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Titlefont'),
data_docs=kwargs.pop(
'data_docs', """
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
"""
),
**kwargs
)
| 36.285714 | 73 | 0.557743 | import _plotly_utils.basevalidators
class TitlefontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='titlefont',
parent_name='streamtube.colorbar',
**kwargs
):
super(TitlefontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Titlefont'),
data_docs=kwargs.pop(
'data_docs', """
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
"""
),
**kwargs
)
| true | true |
f720d9caab26b0c898d32c3bc5d19d61e2797724 | 7,527 | py | Python | divvydata/historical_data.py | chrisluedtke/divvy-data-analysis | 441fa9028ed4bb77ad47e8109a8be749ea1d30b1 | [
"MIT"
] | 2 | 2019-02-09T12:54:02.000Z | 2019-02-11T23:02:35.000Z | divvydata/historical_data.py | chrisluedtke/divvy-data-analysis | 441fa9028ed4bb77ad47e8109a8be749ea1d30b1 | [
"MIT"
] | null | null | null | divvydata/historical_data.py | chrisluedtke/divvy-data-analysis | 441fa9028ed4bb77ad47e8109a8be749ea1d30b1 | [
"MIT"
] | null | null | null | """
Pulls data from:
https://www.divvybikes.com/system-data
https://s3.amazonaws.com/divvy-data/tripdata
"""
from io import BytesIO
import os
import re
import requests
from zipfile import ZipFile
from typing import List
from lxml import html
import pandas as pd
from .stations_feed import StationsFeed
STN_DT_FORM = {
'2013': "%m/%d/%Y", # Not labeled for quarters
'2014_Q1Q2': None, # xlsx file
'2014_Q3Q4': "%m/%d/%Y %H:%M",
'2015': None, # no date column and not labeled for quarters
'2016_Q1Q2': "%m/%d/%Y",
'2016_Q3': "%m/%d/%Y",
'2016_Q4': "%m/%d/%Y",
'2017_Q1Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3Q4': "%m/%d/%Y %H:%M",
}
STN_COL_MAP = {
'latitude': 'lat',
'longitude': 'lon',
'dateCreated': 'online_date',
'online date': 'online_date',
}
RD_DT_FORM = {
'2013': "%Y-%m-%d %H:%M", # Not labeled for quarters
'2014_Q1Q2': "%m/%d/%Y %H:%M",
'2014_Q3': "%m/%d/%Y %H:%M",
'2014_Q4': "%m/%d/%Y %H:%M",
'2015_Q1': "%m/%d/%Y %H:%M",
'2015_Q2': "%m/%d/%Y %H:%M",
'2015': "%m/%d/%Y %H:%M", # Q3 labeled as month integer
'2015_Q4': "%m/%d/%Y %H:%M",
'2016_Q1': "%m/%d/%Y %H:%M",
'2016': "%m/%d/%Y %H:%M", # Q2 labeled as month integer
'2016_Q3': "%m/%d/%Y %H:%M:%S",
'2016_Q4': "%m/%d/%Y %H:%M:%S",
'2017_Q1': "%m/%d/%Y %H:%M:%S",
'2017_Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3': "%m/%d/%Y %H:%M:%S",
'2017_Q4': "%m/%d/%Y %H:%M",
'2018_Q1': "%Y-%m-%d %H:%M:%S",
'2018_Q2': "%Y-%m-%d %H:%M:%S",
'2018_Q3': "%Y-%m-%d %H:%M:%S",
'2018_Q4': "%Y-%m-%d %H:%M:%S",
}
RD_COL_MAP = {
'01 - Rental Details Rental ID': 'trip_id',
'01 - Rental Details Local Start Time': 'start_time',
'01 - Rental Details Local End Time': 'end_time',
'01 - Rental Details Bike ID': 'bikeid',
'01 - Rental Details Duration In Seconds Uncapped': 'tripduration',
'03 - Rental Start Station ID': 'from_station_id',
'03 - Rental Start Station Name': 'from_station_name',
'02 - Rental End Station ID': 'to_station_id',
'02 - Rental End Station Name': 'to_station_name',
'User Type': 'usertype',
'Member Gender': 'gender',
'05 - Member Details Member Birthday Year': 'birthyear',
'stoptime': 'end_time',
'starttime': 'start_time',
'birthday': 'birthyear',
}
def parse_zip_urls_from_url(url):
r = requests.get(url)
webpage = html.fromstring(r.content)
base_source = 'https://s3.amazonaws.com/divvy-data/tripdata/'
urls = [url for url in set(webpage.xpath('//a/@href'))
if (base_source in url and url.endswith('.zip'))]
return urls
def year_lookup_to_date(yr_lookup: str) -> str:
q_map = {
'Q1': '03-31',
'Q2': '06-30',
'Q3': '09-30',
'Q4': '12-31',
}
yr_l_splt = yr_lookup.split('_')
q = yr_l_splt[-1][-2:]
date = q_map.get(q, '12-31')
date = f'{yr_l_splt[0]}-{date}'
return date
def get_current_stations():
"""Pulls most recent data from Divvy JSON feed.
Necessar because Divvy did not provide 2018 station data.
"""
df = StationsFeed().get_current_data()
cols = ['id', 'stationName', 'latitude', 'longitude',
'totalDocks', 'lastCommunicationTime']
df = df[cols].rename(columns={
'stationName': 'name',
'lastCommunicationTime': 'as_of_date',
'totalDocks': 'dpcapacity'
})
df = df.rename(columns=STN_COL_MAP)
return df
def process_ride_df(z, fpath, year_lookup):
df = (pd.read_csv(z.open(fpath))
.rename(columns=RD_COL_MAP))
df['start_time'] = pd.to_datetime(
df['start_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
df['end_time'] = pd.to_datetime(
df['end_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def process_station_df(z, fpath, year_lookup):
if fpath.endswith('.csv'):
df = pd.read_csv(z.open(fpath))
else: # must be '.xlsx'
df = pd.read_excel(z.open(fpath))
df = df.rename(columns=STN_COL_MAP)
df['as_of_date'] = year_lookup_to_date(year_lookup)
df['as_of_date'] = pd.to_datetime(df['as_of_date'])
if 'online_date' in df:
df['online_date'] = pd.to_datetime(
df['online_date'],
format=STN_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def combine_ride_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values('start_time')
.reset_index(drop=True))
dfs['tripduration'] = (
dfs.tripduration.astype(str).str.replace(',', '').astype(float)
)
cols = ['trip_id', 'bikeid', 'start_time', 'end_time', 'tripduration',
'from_station_id', 'from_station_name', 'to_station_id',
'to_station_name', 'usertype', 'gender', 'birthyear']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def combine_station_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values(['id', 'as_of_date'])
.reset_index(drop=True))
# excludes ['city', 'Unnamed: 7']
cols = ['id', 'name', 'as_of_date', 'lat', 'lon', 'dpcapacity',
'online_date', 'landmark']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def get_historical_data(years: List[str], write_to: str = '', rides=True,
stations=True):
"""Gathers and cleans historical Divvy data
write_to: optional local folder path to extract zip files to
returns: (pandas.DataFrame of rides, pandas.DataFrame of stations)
"""
if isinstance(years, str):
years = [years]
ride_dfs = []
station_dfs = []
if not (rides or stations):
return ride_dfs, station_dfs
urls = parse_zip_urls_from_url('https://www.divvybikes.com/system-data')
for url in sorted(urls):
z_fn = url.split('/')[-1]
z_year = re.findall(r'20\d{2}', z_fn)[0]
if z_year not in years:
continue
print(url)
r = requests.get(url)
with ZipFile(BytesIO(r.content)) as z:
if write_to:
write_path = os.path.join(write_to, z_fn.replace('.zip', ''))
z.extractall(write_path)
for fpath in z.namelist():
fn = fpath.split('/')[-1]
if fn.endswith(('.csv', '.xlsx')) and not fn.startswith('.'):
quarter = re.findall('Q[1-4]', fn)
if quarter:
year_lookup = f"{z_year}_{''.join(quarter)}"
else:
year_lookup = z_year
else:
continue
if rides and '_trips_' in fn.lower():
print(fn, year_lookup)
df = process_ride_df(z, fpath, year_lookup)
ride_dfs.append(df)
elif stations and '_stations_' in fn.lower():
print(fn, year_lookup)
df = process_station_df(z, fpath, year_lookup)
station_dfs.append(df)
if rides:
ride_dfs = combine_ride_dfs(ride_dfs)
if stations:
if '2018' in years:
df = get_current_stations()
station_dfs.append(df)
station_dfs = combine_station_dfs(station_dfs)
return ride_dfs, station_dfs
| 29.287938 | 77 | 0.563571 | from io import BytesIO
import os
import re
import requests
from zipfile import ZipFile
from typing import List
from lxml import html
import pandas as pd
from .stations_feed import StationsFeed
STN_DT_FORM = {
'2013': "%m/%d/%Y",
'2014_Q1Q2': None,
'2014_Q3Q4': "%m/%d/%Y %H:%M",
'2015': None,
'2016_Q1Q2': "%m/%d/%Y",
'2016_Q3': "%m/%d/%Y",
'2016_Q4': "%m/%d/%Y",
'2017_Q1Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3Q4': "%m/%d/%Y %H:%M",
}
STN_COL_MAP = {
'latitude': 'lat',
'longitude': 'lon',
'dateCreated': 'online_date',
'online date': 'online_date',
}
RD_DT_FORM = {
'2013': "%Y-%m-%d %H:%M",
'2014_Q1Q2': "%m/%d/%Y %H:%M",
'2014_Q3': "%m/%d/%Y %H:%M",
'2014_Q4': "%m/%d/%Y %H:%M",
'2015_Q1': "%m/%d/%Y %H:%M",
'2015_Q2': "%m/%d/%Y %H:%M",
'2015': "%m/%d/%Y %H:%M",
'2015_Q4': "%m/%d/%Y %H:%M",
'2016_Q1': "%m/%d/%Y %H:%M",
'2016': "%m/%d/%Y %H:%M",
'2016_Q3': "%m/%d/%Y %H:%M:%S",
'2016_Q4': "%m/%d/%Y %H:%M:%S",
'2017_Q1': "%m/%d/%Y %H:%M:%S",
'2017_Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3': "%m/%d/%Y %H:%M:%S",
'2017_Q4': "%m/%d/%Y %H:%M",
'2018_Q1': "%Y-%m-%d %H:%M:%S",
'2018_Q2': "%Y-%m-%d %H:%M:%S",
'2018_Q3': "%Y-%m-%d %H:%M:%S",
'2018_Q4': "%Y-%m-%d %H:%M:%S",
}
RD_COL_MAP = {
'01 - Rental Details Rental ID': 'trip_id',
'01 - Rental Details Local Start Time': 'start_time',
'01 - Rental Details Local End Time': 'end_time',
'01 - Rental Details Bike ID': 'bikeid',
'01 - Rental Details Duration In Seconds Uncapped': 'tripduration',
'03 - Rental Start Station ID': 'from_station_id',
'03 - Rental Start Station Name': 'from_station_name',
'02 - Rental End Station ID': 'to_station_id',
'02 - Rental End Station Name': 'to_station_name',
'User Type': 'usertype',
'Member Gender': 'gender',
'05 - Member Details Member Birthday Year': 'birthyear',
'stoptime': 'end_time',
'starttime': 'start_time',
'birthday': 'birthyear',
}
def parse_zip_urls_from_url(url):
r = requests.get(url)
webpage = html.fromstring(r.content)
base_source = 'https://s3.amazonaws.com/divvy-data/tripdata/'
urls = [url for url in set(webpage.xpath('//a/@href'))
if (base_source in url and url.endswith('.zip'))]
return urls
def year_lookup_to_date(yr_lookup: str) -> str:
q_map = {
'Q1': '03-31',
'Q2': '06-30',
'Q3': '09-30',
'Q4': '12-31',
}
yr_l_splt = yr_lookup.split('_')
q = yr_l_splt[-1][-2:]
date = q_map.get(q, '12-31')
date = f'{yr_l_splt[0]}-{date}'
return date
def get_current_stations():
df = StationsFeed().get_current_data()
cols = ['id', 'stationName', 'latitude', 'longitude',
'totalDocks', 'lastCommunicationTime']
df = df[cols].rename(columns={
'stationName': 'name',
'lastCommunicationTime': 'as_of_date',
'totalDocks': 'dpcapacity'
})
df = df.rename(columns=STN_COL_MAP)
return df
def process_ride_df(z, fpath, year_lookup):
df = (pd.read_csv(z.open(fpath))
.rename(columns=RD_COL_MAP))
df['start_time'] = pd.to_datetime(
df['start_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
df['end_time'] = pd.to_datetime(
df['end_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def process_station_df(z, fpath, year_lookup):
if fpath.endswith('.csv'):
df = pd.read_csv(z.open(fpath))
else:
df = pd.read_excel(z.open(fpath))
df = df.rename(columns=STN_COL_MAP)
df['as_of_date'] = year_lookup_to_date(year_lookup)
df['as_of_date'] = pd.to_datetime(df['as_of_date'])
if 'online_date' in df:
df['online_date'] = pd.to_datetime(
df['online_date'],
format=STN_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def combine_ride_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values('start_time')
.reset_index(drop=True))
dfs['tripduration'] = (
dfs.tripduration.astype(str).str.replace(',', '').astype(float)
)
cols = ['trip_id', 'bikeid', 'start_time', 'end_time', 'tripduration',
'from_station_id', 'from_station_name', 'to_station_id',
'to_station_name', 'usertype', 'gender', 'birthyear']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def combine_station_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values(['id', 'as_of_date'])
.reset_index(drop=True))
cols = ['id', 'name', 'as_of_date', 'lat', 'lon', 'dpcapacity',
'online_date', 'landmark']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def get_historical_data(years: List[str], write_to: str = '', rides=True,
stations=True):
if isinstance(years, str):
years = [years]
ride_dfs = []
station_dfs = []
if not (rides or stations):
return ride_dfs, station_dfs
urls = parse_zip_urls_from_url('https://www.divvybikes.com/system-data')
for url in sorted(urls):
z_fn = url.split('/')[-1]
z_year = re.findall(r'20\d{2}', z_fn)[0]
if z_year not in years:
continue
print(url)
r = requests.get(url)
with ZipFile(BytesIO(r.content)) as z:
if write_to:
write_path = os.path.join(write_to, z_fn.replace('.zip', ''))
z.extractall(write_path)
for fpath in z.namelist():
fn = fpath.split('/')[-1]
if fn.endswith(('.csv', '.xlsx')) and not fn.startswith('.'):
quarter = re.findall('Q[1-4]', fn)
if quarter:
year_lookup = f"{z_year}_{''.join(quarter)}"
else:
year_lookup = z_year
else:
continue
if rides and '_trips_' in fn.lower():
print(fn, year_lookup)
df = process_ride_df(z, fpath, year_lookup)
ride_dfs.append(df)
elif stations and '_stations_' in fn.lower():
print(fn, year_lookup)
df = process_station_df(z, fpath, year_lookup)
station_dfs.append(df)
if rides:
ride_dfs = combine_ride_dfs(ride_dfs)
if stations:
if '2018' in years:
df = get_current_stations()
station_dfs.append(df)
station_dfs = combine_station_dfs(station_dfs)
return ride_dfs, station_dfs
| true | true |
f720d9f5df4419371640fe5d3822b74acdb36bf0 | 35,757 | py | Python | incidentes/views.py | Alvaruz/ATMS | 962a1967e1654efe4d448891deb7881fa3addf85 | [
"MIT"
] | null | null | null | incidentes/views.py | Alvaruz/ATMS | 962a1967e1654efe4d448891deb7881fa3addf85 | [
"MIT"
] | null | null | null | incidentes/views.py | Alvaruz/ATMS | 962a1967e1654efe4d448891deb7881fa3addf85 | [
"MIT"
] | null | null | null | from django.shortcuts import render, redirect
from django.template import loader
from django.urls import reverse_lazy
from .models import *
from django.http import HttpResponse
from .forms import TicketForm
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db import connections
from django.db.models import Count
from django.http import JsonResponse
from django.core import serializers
from datetime import *
from django.utils import timezone
from django.utils.timezone import make_aware
# Create your views here.
def home(request):
return render(request, "index2.html", {})
def base(request):
return render(request, "base.html", {})
def ticket_list(request):
return render(request, "ticket_list.html", {})
def ticket_home(request):
return render(request, "tickets2.html", {})
def login(request):
return render(request, "login.html", {})
def tickets(request):
ticket = Ticket.objects.order_by('-fecha')
paginator = Paginator(ticket, 25) # Show 25 contacts per page
# paginate_by = 25
# tk_vencido = Ticket.objects.order_by('fecha')
template = loader.get_template('ticket_list.html')
context = {
'ticket': ticket,
'categoria': ticket,
'grupo_destino': ticket,
'fecha': ticket,
'estado': ticket,
}
# page = request.GET.get('page')
# context = paginator.get_page(page)
# return render(request, 'list.html', {'context': context})
return HttpResponse(template.render(context, request))
def ticket_view(request):
if request.method == 'POST':
form = TicketForm(request.POST)
if form.is_valid():
form.save()
print("formulario guardado")
return redirect('tickets')
else:
form = TicketForm()
return render(request, 'ticket_form.html', {'form':form})
# version de prueba class
class TicketListView(ListView):
template_name = 'ticket_list.html'
model = Ticket
paginate_by = 25
listado_tickets = Ticket.objects.all()
# paginator = Paginator(listado_tickets, 10) # Muestra 10 elementos por página.
# pagina = request.GET.get('page')
# pagina_actual = paginator.get_page(page)
# return render(request, 'list.html', {'pagina_actual': pagina_actual})
def get_queryset(self):
queryset = super(TicketListView, self).get_queryset()
return queryset.filter(author_id=self.kwargs['author_id'])
class TicketAddView(CreateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def form_valid(self, form):
form.save()
return super(TicketAddView, self).form_valid(form)
def ticket_edit(request, pk):
ticket = Ticket.objects.get(id=pk)
if request.method == 'GET':
form = TicketForm(instance=ticket)
else:
form = TicketForm(request.POST, instance=ticket)
f = open('wtf.txt','w')
f.write(form)
f.close()
print(form)
if form.is_valid():
form.save()
return redirect('ticket_list')
return render(request, 'ticket_form2.html',{'form':form})
class TicketEditView(UpdateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
paginate_by = 25
# def form_valid(self, form):
# form.save()
# return super(TicketEditView, self).form_valid(form)
class TicketDeleteView(DeleteView):
model = Ticket
template_name = 'ticket_delete2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def estadisticas_main(request):
return render(request, 'estadisticas_main.html', {})
def apimes(request):
data = Ticket.objects.all() \
.extra(select={'month': connections[Ticket.objects.db].ops.date_trunc_sql('month', 'fecha')}) \
.values('month') \
.annotate(count_items=Count('id'))
return JsonResponse(list(data), safe=False)
def estadisticas_total(request):
# data = serializers.serialize("json", Ticket.objects.only("categoria").annotate(Count('id')))
#--CATEGORIA---
mantenimiento = Ticket.objects.only("categoria").filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.only("categoria").filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.only("categoria").filter(categoria=3).count()
manifestacion = Ticket.objects.only("categoria").filter(categoria=4).count()
cierre_de_calle = Ticket.objects.only("categoria").filter(categoria=5).count()
accidente = Ticket.objects.only("categoria").filter(categoria=6).count()
obras = Ticket.objects.only("categoria").filter(categoria=7).count()
obstaculo = Ticket.objects.only("categoria").filter(categoria=8).count()
congestionamiento = Ticket.objects.only("categoria").filter(categoria=9).count()
sincronizacion = Ticket.objects.only("categoria").filter(categoria=10).count()
semaforo_apagado = Ticket.objects.only("categoria").filter(categoria=11).count()
infracciones = Ticket.objects.only("categoria").filter(categoria=12).count()
led_foco = Ticket.objects.only("categoria").filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.only("grupo_destino").filter(grupo_destino=1).count()
redes = Ticket.objects.only("grupo_destino").filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.only("grupo_destino").filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.only("grupo_destino").filter(grupo_destino=4).count()
operadores = Ticket.objects.only("grupo_destino").filter(grupo_destino=5).count()
tecnicos = Ticket.objects.only("grupo_destino").filter(grupo_destino=6).count()
administrativa = Ticket.objects.only("grupo_destino").filter(grupo_destino=7).count()
jefatura = Ticket.objects.only("grupo_destino").filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.only("estado").filter(estado=1).count()
cerrado = Ticket.objects.only("estado").filter(estado=2).count()
atendido = Ticket.objects.only("estado").filter(estado=3).count()
vencido = Ticket.objects.only("estado").filter(estado=4).count()
#--USUARIOS--
atms = Ticket.objects.filter(usuario=1).count()
jose = Ticket.objects.filter(usuario=2).count()
emilio = Ticket.objects.filter(usuario=3).count()
gustavo = Ticket.objects.filter(usuario=4).count()
elias = Ticket.objects.filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_global.html', {'data':data})
def estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
# mes = 11
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
#--ESTADO---
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
#--USUARIOS--
atms = Ticket.objects.filter(fecha__month = mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__month = mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__month = mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__month = mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__month = mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_mes.html', {'data':data})
def estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
#--USUARIOS--
atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_dia.html', {'data':data})
def comunicaciones_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
# mes = 11
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
#--ESTADO---
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_mes.html', {'data':data})
def comunicaciones_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_hoy.html', {'data':data})
def prensa_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
#--ESTADO---
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_mes.html', {'data':data})
# Sin uso
def prensa_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_hoy.html', {'data':data})
def global_versus(request):
#-- Tickets total por Grupo destino --
pmt_atms_total = Ticket.objects.filter(grupo_destino=3).count()
pmt_otros_total = Ticket.objects.filter(grupo_destino=4).count()
#-- Tickets total vencidos por grupo destino --
pmt_atms_vencidos = Ticket.objects.filter(grupo_destino=3, estado=4).count()
pmt_otros_vencidos = Ticket.objects.filter(grupo_destino=4, estado=4).count()
#-- Tickets total tipos por pmt_atms --
pmt_atms_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=2).count()
pmt_atms_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=3).count()
pmt_atms_manifestacion = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=4).count()
pmt_atms_cierre_de_calle = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=5).count()
pmt_atms_accidente = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=6).count()
pmt_atms_obras = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=7).count()
pmt_atms_obstaculo = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=8).count()
pmt_atms_congestionamiento = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=9).count()
pmt_atms_infracciones_varias = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=12).count()
#-- Tickets total tipos por pmt_otros --
pmt_otros_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=2).count()
pmt_otros_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=3).count()
pmt_otros_manifestacion = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=4).count()
pmt_otros_cierre_de_calle = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=5).count()
pmt_otros_accidente = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=6).count()
pmt_otros_obras = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=7).count()
pmt_otros_obstaculo = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=8).count()
pmt_otros_congestionamiento = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=9).count()
pmt_otros_infracciones_varias = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=12).count()
data = {
"pmt_atms_total": pmt_atms_total,
"pmt_otros_total": pmt_otros_total,
"pmt_atms_vencidos": pmt_atms_vencidos,
"pmt_otros_vencidos": pmt_otros_vencidos,
"pmt_atms_vehiculo_mal_estacionado": pmt_atms_vehiculo_mal_estacionado,
"pmt_atms_vehiculo_descompuesto": pmt_atms_vehiculo_descompuesto,
"pmt_atms_manifestacion": pmt_atms_manifestacion,
"pmt_atms_cierre_de_calle": pmt_atms_cierre_de_calle,
"pmt_atms_accidente": pmt_atms_accidente,
"pmt_atms_obras": pmt_atms_obras,
"pmt_atms_obstaculo": pmt_atms_obstaculo,
"pmt_atms_congestionamiento": pmt_atms_congestionamiento,
"pmt_atms_infracciones_varias": pmt_atms_infracciones_varias,
"pmt_otros_vehiculo_mal_estacionado": pmt_otros_vehiculo_mal_estacionado,
"pmt_otros_vehiculo_descompuesto": pmt_otros_vehiculo_descompuesto,
"pmt_otros_manifestacion": pmt_otros_manifestacion,
"pmt_otros_cierre_de_calle": pmt_otros_cierre_de_calle,
"pmt_otros_accidente": pmt_otros_accidente,
"pmt_otros_obras": pmt_otros_obras,
"pmt_otros_obstaculo": pmt_otros_obstaculo,
"pmt_otros_congestionamiento": pmt_otros_congestionamiento,
"pmt_otros_infracciones_varias": pmt_otros_infracciones_varias,
}
return render(request, 'global_versus.html', {'data':data})
# Mcal. López
mcal_lopez = Ticket.objects.filter(ubicacion__contains='cal')
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=3).count() # PMT Atms
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=4).count() # PMT Otros | 44.090012 | 228 | 0.711833 | from django.shortcuts import render, redirect
from django.template import loader
from django.urls import reverse_lazy
from .models import *
from django.http import HttpResponse
from .forms import TicketForm
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db import connections
from django.db.models import Count
from django.http import JsonResponse
from django.core import serializers
from datetime import *
from django.utils import timezone
from django.utils.timezone import make_aware
def home(request):
return render(request, "index2.html", {})
def base(request):
return render(request, "base.html", {})
def ticket_list(request):
return render(request, "ticket_list.html", {})
def ticket_home(request):
return render(request, "tickets2.html", {})
def login(request):
return render(request, "login.html", {})
def tickets(request):
ticket = Ticket.objects.order_by('-fecha')
paginator = Paginator(ticket, 25)
template = loader.get_template('ticket_list.html')
context = {
'ticket': ticket,
'categoria': ticket,
'grupo_destino': ticket,
'fecha': ticket,
'estado': ticket,
}
return HttpResponse(template.render(context, request))
def ticket_view(request):
if request.method == 'POST':
form = TicketForm(request.POST)
if form.is_valid():
form.save()
print("formulario guardado")
return redirect('tickets')
else:
form = TicketForm()
return render(request, 'ticket_form.html', {'form':form})
class TicketListView(ListView):
template_name = 'ticket_list.html'
model = Ticket
paginate_by = 25
listado_tickets = Ticket.objects.all()
yset(self):
queryset = super(TicketListView, self).get_queryset()
return queryset.filter(author_id=self.kwargs['author_id'])
class TicketAddView(CreateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def form_valid(self, form):
form.save()
return super(TicketAddView, self).form_valid(form)
def ticket_edit(request, pk):
ticket = Ticket.objects.get(id=pk)
if request.method == 'GET':
form = TicketForm(instance=ticket)
else:
form = TicketForm(request.POST, instance=ticket)
f = open('wtf.txt','w')
f.write(form)
f.close()
print(form)
if form.is_valid():
form.save()
return redirect('ticket_list')
return render(request, 'ticket_form2.html',{'form':form})
class TicketEditView(UpdateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
paginate_by = 25
class TicketDeleteView(DeleteView):
model = Ticket
template_name = 'ticket_delete2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def estadisticas_main(request):
return render(request, 'estadisticas_main.html', {})
def apimes(request):
data = Ticket.objects.all() \
.extra(select={'month': connections[Ticket.objects.db].ops.date_trunc_sql('month', 'fecha')}) \
.values('month') \
.annotate(count_items=Count('id'))
return JsonResponse(list(data), safe=False)
def estadisticas_total(request):
mantenimiento = Ticket.objects.only("categoria").filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.only("categoria").filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.only("categoria").filter(categoria=3).count()
manifestacion = Ticket.objects.only("categoria").filter(categoria=4).count()
cierre_de_calle = Ticket.objects.only("categoria").filter(categoria=5).count()
accidente = Ticket.objects.only("categoria").filter(categoria=6).count()
obras = Ticket.objects.only("categoria").filter(categoria=7).count()
obstaculo = Ticket.objects.only("categoria").filter(categoria=8).count()
congestionamiento = Ticket.objects.only("categoria").filter(categoria=9).count()
sincronizacion = Ticket.objects.only("categoria").filter(categoria=10).count()
semaforo_apagado = Ticket.objects.only("categoria").filter(categoria=11).count()
infracciones = Ticket.objects.only("categoria").filter(categoria=12).count()
led_foco = Ticket.objects.only("categoria").filter(categoria=13).count()
sistemas = Ticket.objects.only("grupo_destino").filter(grupo_destino=1).count()
redes = Ticket.objects.only("grupo_destino").filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.only("grupo_destino").filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.only("grupo_destino").filter(grupo_destino=4).count()
operadores = Ticket.objects.only("grupo_destino").filter(grupo_destino=5).count()
tecnicos = Ticket.objects.only("grupo_destino").filter(grupo_destino=6).count()
administrativa = Ticket.objects.only("grupo_destino").filter(grupo_destino=7).count()
jefatura = Ticket.objects.only("grupo_destino").filter(grupo_destino=8).count()
pendiente = Ticket.objects.only("estado").filter(estado=1).count()
cerrado = Ticket.objects.only("estado").filter(estado=2).count()
atendido = Ticket.objects.only("estado").filter(estado=3).count()
vencido = Ticket.objects.only("estado").filter(estado=4).count()
atms = Ticket.objects.filter(usuario=1).count()
jose = Ticket.objects.filter(usuario=2).count()
emilio = Ticket.objects.filter(usuario=3).count()
gustavo = Ticket.objects.filter(usuario=4).count()
elias = Ticket.objects.filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_global.html', {'data':data})
def estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
atms = Ticket.objects.filter(fecha__month = mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__month = mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__month = mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__month = mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__month = mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_mes.html', {'data':data})
def estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_dia.html', {'data':data})
def comunicaciones_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_mes.html', {'data':data})
def comunicaciones_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_hoy.html', {'data':data})
def prensa_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_mes.html', {'data':data})
def prensa_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_hoy.html', {'data':data})
def global_versus(request):
pmt_atms_total = Ticket.objects.filter(grupo_destino=3).count()
pmt_otros_total = Ticket.objects.filter(grupo_destino=4).count()
pmt_atms_vencidos = Ticket.objects.filter(grupo_destino=3, estado=4).count()
pmt_otros_vencidos = Ticket.objects.filter(grupo_destino=4, estado=4).count()
pmt_atms_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=2).count()
pmt_atms_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=3).count()
pmt_atms_manifestacion = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=4).count()
pmt_atms_cierre_de_calle = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=5).count()
pmt_atms_accidente = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=6).count()
pmt_atms_obras = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=7).count()
pmt_atms_obstaculo = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=8).count()
pmt_atms_congestionamiento = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=9).count()
pmt_atms_infracciones_varias = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=12).count()
pmt_otros_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=2).count()
pmt_otros_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=3).count()
pmt_otros_manifestacion = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=4).count()
pmt_otros_cierre_de_calle = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=5).count()
pmt_otros_accidente = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=6).count()
pmt_otros_obras = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=7).count()
pmt_otros_obstaculo = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=8).count()
pmt_otros_congestionamiento = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=9).count()
pmt_otros_infracciones_varias = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=12).count()
data = {
"pmt_atms_total": pmt_atms_total,
"pmt_otros_total": pmt_otros_total,
"pmt_atms_vencidos": pmt_atms_vencidos,
"pmt_otros_vencidos": pmt_otros_vencidos,
"pmt_atms_vehiculo_mal_estacionado": pmt_atms_vehiculo_mal_estacionado,
"pmt_atms_vehiculo_descompuesto": pmt_atms_vehiculo_descompuesto,
"pmt_atms_manifestacion": pmt_atms_manifestacion,
"pmt_atms_cierre_de_calle": pmt_atms_cierre_de_calle,
"pmt_atms_accidente": pmt_atms_accidente,
"pmt_atms_obras": pmt_atms_obras,
"pmt_atms_obstaculo": pmt_atms_obstaculo,
"pmt_atms_congestionamiento": pmt_atms_congestionamiento,
"pmt_atms_infracciones_varias": pmt_atms_infracciones_varias,
"pmt_otros_vehiculo_mal_estacionado": pmt_otros_vehiculo_mal_estacionado,
"pmt_otros_vehiculo_descompuesto": pmt_otros_vehiculo_descompuesto,
"pmt_otros_manifestacion": pmt_otros_manifestacion,
"pmt_otros_cierre_de_calle": pmt_otros_cierre_de_calle,
"pmt_otros_accidente": pmt_otros_accidente,
"pmt_otros_obras": pmt_otros_obras,
"pmt_otros_obstaculo": pmt_otros_obstaculo,
"pmt_otros_congestionamiento": pmt_otros_congestionamiento,
"pmt_otros_infracciones_varias": pmt_otros_infracciones_varias,
}
return render(request, 'global_versus.html', {'data':data})
mcal_lopez = Ticket.objects.filter(ubicacion__contains='cal')
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=3).count()
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=4).count() | true | true |
f720da7486a07c56f32fcbde3e8956ad3ccbd326 | 1,830 | py | Python | doc/listings/interstore/webcal.py | jonathanj/mantissa | 53e5502aba23ce99be78b27f923a276593033fe8 | [
"MIT"
] | 6 | 2016-02-17T15:04:53.000Z | 2021-08-20T09:44:10.000Z | doc/listings/interstore/webcal.py | jonathanj/mantissa | 53e5502aba23ce99be78b27f923a276593033fe8 | [
"MIT"
] | 62 | 2015-02-04T23:40:55.000Z | 2021-02-18T19:56:02.000Z | doc/listings/interstore/webcal.py | jonathanj/mantissa | 53e5502aba23ce99be78b27f923a276593033fe8 | [
"MIT"
] | 8 | 2015-11-15T17:26:42.000Z | 2020-12-02T06:36:52.000Z |
from datetime import timedelta
from epsilon.extime import Time
from nevow.page import renderer
from nevow.loaders import stan
from nevow.tags import div
from nevow.athena import LiveElement
from xmantissa.liveform import TEXT_INPUT, LiveForm, Parameter
class CalendarElement(LiveElement):
docFactory = stan(div[
"It's a calendar!",
div(render="appointments"),
div(render="appointmentForm")])
def __init__(self, calendar):
LiveElement.__init__(self)
self.calendar = calendar
@renderer
def appointments(self, request, tag):
appointments = self.calendar.getAppointments()
for appointment in appointments:
appDiv = div[
"Appointment with ",
appointment.withWhomUsername, "@",
appointment.withWhomDomain, " at ",
appointment.when.asHumanly()]
if appointment.failed is not None:
appDiv[" (Rejected: ", appointment.failed, ")"]
elif appointment.remoteID is None:
appDiv[" (Pending confirmation)"]
tag[appDiv]
return tag
def _requestAppointment(self, whom):
local, domain = whom.split(u"@")
target = self.calendar.calendarIDFor(local, domain)
self.calendar.requestAppointmentWith(target, Time() + timedelta(days=2))
@renderer
def appointmentForm(self, request, tag):
form = LiveForm(
self._requestAppointment,
[Parameter(u"whom", TEXT_INPUT, unicode, u"Whom:",
u"The username of the person with whom "
u"to create an appointment (user@domain).",
None)],
"Request An Appointment")
form.setFragmentParent(self)
return form
| 29.516129 | 80 | 0.604918 |
from datetime import timedelta
from epsilon.extime import Time
from nevow.page import renderer
from nevow.loaders import stan
from nevow.tags import div
from nevow.athena import LiveElement
from xmantissa.liveform import TEXT_INPUT, LiveForm, Parameter
class CalendarElement(LiveElement):
docFactory = stan(div[
"It's a calendar!",
div(render="appointments"),
div(render="appointmentForm")])
def __init__(self, calendar):
LiveElement.__init__(self)
self.calendar = calendar
@renderer
def appointments(self, request, tag):
appointments = self.calendar.getAppointments()
for appointment in appointments:
appDiv = div[
"Appointment with ",
appointment.withWhomUsername, "@",
appointment.withWhomDomain, " at ",
appointment.when.asHumanly()]
if appointment.failed is not None:
appDiv[" (Rejected: ", appointment.failed, ")"]
elif appointment.remoteID is None:
appDiv[" (Pending confirmation)"]
tag[appDiv]
return tag
def _requestAppointment(self, whom):
local, domain = whom.split(u"@")
target = self.calendar.calendarIDFor(local, domain)
self.calendar.requestAppointmentWith(target, Time() + timedelta(days=2))
@renderer
def appointmentForm(self, request, tag):
form = LiveForm(
self._requestAppointment,
[Parameter(u"whom", TEXT_INPUT, unicode, u"Whom:",
u"The username of the person with whom "
u"to create an appointment (user@domain).",
None)],
"Request An Appointment")
form.setFragmentParent(self)
return form
| true | true |
f720da77bf370fc9b4db8eeeefff5308d08c418c | 197 | py | Python | robots/test/strategies/run_tests/tests/test_sharing/test_share/t1.py | memristor/mep2 | bc5cddacba3d740f791f3454b8cb51bda83ce202 | [
"MIT"
] | 5 | 2018-11-27T15:15:00.000Z | 2022-02-10T21:44:13.000Z | robots/test/strategies/run_tests/tests/test_sharing/test_share/t1.py | memristor/mep2 | bc5cddacba3d740f791f3454b8cb51bda83ce202 | [
"MIT"
] | 2 | 2018-10-20T15:48:40.000Z | 2018-11-20T05:11:33.000Z | robots/test/strategies/run_tests/tests/test_sharing/test_share/t1.py | memristor/mep2 | bc5cddacba3d740f791f3454b8cb51bda83ce202 | [
"MIT"
] | 1 | 2020-02-07T12:44:47.000Z | 2020-02-07T12:44:47.000Z | weight=1
a=_State('a', name='var1', shared=True)
def run():
@_do
def _():
print(a.val)
sleep(10)
a.val = 5
@_do
def _():
print(a.val)
sleep(10)
a.val = 8
@_do
def _():
print(a.val)
| 11.588235 | 39 | 0.563452 | weight=1
a=_State('a', name='var1', shared=True)
def run():
@_do
def _():
print(a.val)
sleep(10)
a.val = 5
@_do
def _():
print(a.val)
sleep(10)
a.val = 8
@_do
def _():
print(a.val)
| true | true |
f720da93b083e8b08000df92605af508a5009d38 | 2,479 | py | Python | csympy/tests/test_arit.py | shipci/csympy | 6b5a1d7d8a3f9bbe0b983b78a44be90a70db0743 | [
"MIT"
] | null | null | null | csympy/tests/test_arit.py | shipci/csympy | 6b5a1d7d8a3f9bbe0b983b78a44be90a70db0743 | [
"MIT"
] | null | null | null | csympy/tests/test_arit.py | shipci/csympy | 6b5a1d7d8a3f9bbe0b983b78a44be90a70db0743 | [
"MIT"
] | null | null | null | from nose.tools import raises
from csympy import Symbol, Integer, Add, Pow
def test_arit1():
x = Symbol("x")
y = Symbol("y")
e = x + y
e = x * y
e = Integer(2)*x
e = 2*x
e = x + 1
e = 1 + x
def test_arit2():
x = Symbol("x")
y = Symbol("y")
assert x+x == Integer(2) * x
assert x+x != Integer(3) * x
assert x+y == y+x
assert x+x == 2*x
assert x+x == x*2
assert x+x+x == 3*x
assert x+y+x+x == 3*x+y
assert not x+x == 3*x
assert not x+x != 2*x
@raises(TypeError)
def test_arit3():
x = Symbol("x")
y = Symbol("y")
e = "x"*x
def test_arit4():
x = Symbol("x")
y = Symbol("y")
assert x*x == x**2
assert x*y == y*x
assert x*x*x == x**3
assert x*y*x*x == x**3*y
def test_arit5():
x = Symbol("x")
y = Symbol("y")
e = (x+y)**2
f = e.expand()
assert e == (x+y)**2
assert e != x**2 + 2*x*y + y**2
assert isinstance(e, Pow)
assert f == x**2 + 2*x*y + y**2
assert isinstance(f, Add)
def test_arit6():
x = Symbol("x")
y = Symbol("y")
e = x + y
assert str(e) == "x + y" or "y + x"
e = x * y
assert str(e) == "x*y" or "y*x"
e = Integer(2)*x
assert str(e) == "2x"
e = 2*x
assert str(e) == "2x"
def test_arit7():
x = Symbol("x")
y = Symbol("y")
assert x - x == 0
assert x - y != y - x
assert 2*x - x == x
assert 3*x - x == 2*x
assert 2*x*y - x*y == x*y
def test_arit8():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert x**y * x**x == x**(x+y)
assert x**y * x**x * x**z == x**(x+y+z)
assert x**y - x**y == 0
assert x**2 / x == x
assert y*x**2 / (x*y) == x
assert (2 * x**3 * y**2 * z)**3 / 8 == x**9 * y**6 * z**3
assert (2*y**(-2*x**2)) * (3*y**(2*x**2)) == 6
def test_expand1():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert ((2*x+y)**2).expand() == 4*x**2 + 4*x*y + y**2
assert (x**2)**3 == x**6
assert ((2*x**2+3*y)**2).expand() == 4*x**4 + 12*x**2*y + 9*y**2
assert ((2*x/3+y/4)**2).expand() == 4*x**2/9 + x*y/3 + y**2/16
def test_arit9():
x = Symbol("x")
y = Symbol("y")
assert 1/x == 1/x
assert 1/x != 1/y
def test_expand2():
y = Symbol("y")
z = Symbol("z")
assert ((1/(y*z) - y*z)*y*z).expand() == 1-(y*z)**2
def test_expand3():
x = Symbol("x")
y = Symbol("y")
assert ((1/(x*y) - x*y+2)*(1+x*y)).expand() == 3 + 1/(x*y) + x*y - (x*y)**2
| 21.938053 | 79 | 0.449375 | from nose.tools import raises
from csympy import Symbol, Integer, Add, Pow
def test_arit1():
x = Symbol("x")
y = Symbol("y")
e = x + y
e = x * y
e = Integer(2)*x
e = 2*x
e = x + 1
e = 1 + x
def test_arit2():
x = Symbol("x")
y = Symbol("y")
assert x+x == Integer(2) * x
assert x+x != Integer(3) * x
assert x+y == y+x
assert x+x == 2*x
assert x+x == x*2
assert x+x+x == 3*x
assert x+y+x+x == 3*x+y
assert not x+x == 3*x
assert not x+x != 2*x
@raises(TypeError)
def test_arit3():
x = Symbol("x")
y = Symbol("y")
e = "x"*x
def test_arit4():
x = Symbol("x")
y = Symbol("y")
assert x*x == x**2
assert x*y == y*x
assert x*x*x == x**3
assert x*y*x*x == x**3*y
def test_arit5():
x = Symbol("x")
y = Symbol("y")
e = (x+y)**2
f = e.expand()
assert e == (x+y)**2
assert e != x**2 + 2*x*y + y**2
assert isinstance(e, Pow)
assert f == x**2 + 2*x*y + y**2
assert isinstance(f, Add)
def test_arit6():
x = Symbol("x")
y = Symbol("y")
e = x + y
assert str(e) == "x + y" or "y + x"
e = x * y
assert str(e) == "x*y" or "y*x"
e = Integer(2)*x
assert str(e) == "2x"
e = 2*x
assert str(e) == "2x"
def test_arit7():
x = Symbol("x")
y = Symbol("y")
assert x - x == 0
assert x - y != y - x
assert 2*x - x == x
assert 3*x - x == 2*x
assert 2*x*y - x*y == x*y
def test_arit8():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert x**y * x**x == x**(x+y)
assert x**y * x**x * x**z == x**(x+y+z)
assert x**y - x**y == 0
assert x**2 / x == x
assert y*x**2 / (x*y) == x
assert (2 * x**3 * y**2 * z)**3 / 8 == x**9 * y**6 * z**3
assert (2*y**(-2*x**2)) * (3*y**(2*x**2)) == 6
def test_expand1():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert ((2*x+y)**2).expand() == 4*x**2 + 4*x*y + y**2
assert (x**2)**3 == x**6
assert ((2*x**2+3*y)**2).expand() == 4*x**4 + 12*x**2*y + 9*y**2
assert ((2*x/3+y/4)**2).expand() == 4*x**2/9 + x*y/3 + y**2/16
def test_arit9():
x = Symbol("x")
y = Symbol("y")
assert 1/x == 1/x
assert 1/x != 1/y
def test_expand2():
y = Symbol("y")
z = Symbol("z")
assert ((1/(y*z) - y*z)*y*z).expand() == 1-(y*z)**2
def test_expand3():
x = Symbol("x")
y = Symbol("y")
assert ((1/(x*y) - x*y+2)*(1+x*y)).expand() == 3 + 1/(x*y) + x*y - (x*y)**2
| true | true |
f720db2bca4a842dab5f8a8604fb53fae21bea7f | 2,309 | py | Python | epytope/Data/pssms/smmpmbec/mat/B_07_02_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 7 | 2021-02-01T18:11:28.000Z | 2022-01-31T19:14:07.000Z | epytope/Data/pssms/smmpmbec/mat/B_07_02_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 22 | 2021-01-02T15:25:23.000Z | 2022-03-14T11:32:53.000Z | epytope/Data/pssms/smmpmbec/mat/B_07_02_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 4 | 2021-05-28T08:50:38.000Z | 2022-03-14T11:45:32.000Z | B_07_02_9 = {0: {'A': -0.332, 'C': 0.186, 'E': 0.544, 'D': 0.788, 'G': 0.214, 'F': -0.118, 'I': -0.161, 'H': -0.257, 'K': -0.244, 'M': -0.332, 'L': -0.105, 'N': 0.105, 'Q': 0.294, 'P': 0.58, 'S': -0.286, 'R': -0.62, 'T': 0.187, 'W': -0.114, 'V': -0.03, 'Y': -0.3}, 1: {'A': -0.604, 'C': 0.467, 'E': 0.468, 'D': 0.371, 'G': 0.128, 'F': 0.243, 'I': -0.242, 'H': 0.497, 'K': 0.244, 'M': -0.104, 'L': -0.131, 'N': 0.214, 'Q': 0.225, 'P': -2.038, 'S': 0.048, 'R': 0.36, 'T': -0.158, 'W': 0.467, 'V': -0.685, 'Y': 0.228}, 2: {'A': -0.307, 'C': 0.286, 'E': 0.256, 'D': 0.166, 'G': 0.217, 'F': 0.278, 'I': -0.015, 'H': -0.187, 'K': 0.072, 'M': -0.472, 'L': 0.03, 'N': 0.139, 'Q': -0.062, 'P': 0.399, 'S': -0.001, 'R': -0.829, 'T': 0.069, 'W': -0.071, 'V': 0.113, 'Y': -0.081}, 3: {'A': -0.077, 'C': 0.126, 'E': 0.127, 'D': 0.16, 'G': -0.091, 'F': 0.053, 'I': 0.146, 'H': -0.09, 'K': -0.069, 'M': -0.051, 'L': 0.038, 'N': 0.037, 'Q': -0.16, 'P': -0.047, 'S': -0.026, 'R': -0.081, 'T': 0.094, 'W': -0.175, 'V': 0.079, 'Y': 0.006}, 4: {'A': -0.129, 'C': -0.105, 'E': 0.445, 'D': 0.273, 'G': -0.12, 'F': 0.172, 'I': 0.218, 'H': -0.303, 'K': 0.061, 'M': -0.098, 'L': 0.138, 'N': -0.076, 'Q': 0.002, 'P': -0.135, 'S': -0.123, 'R': -0.267, 'T': -0.098, 'W': 0.058, 'V': 0.082, 'Y': 0.006}, 5: {'A': 0.025, 'C': 0.217, 'E': 0.317, 'D': 0.199, 'G': -0.291, 'F': -0.017, 'I': 0.113, 'H': -0.156, 'K': -0.035, 'M': -0.068, 'L': 0.119, 'N': -0.059, 'Q': 0.093, 'P': 0.185, 'S': -0.085, 'R': -0.472, 'T': -0.283, 'W': -0.109, 'V': 0.128, 'Y': 0.178}, 6: {'A': -0.233, 'C': 0.164, 'E': 0.335, 'D': 0.37, 'G': -0.26, 'F': 0.046, 'I': -0.003, 'H': -0.073, 'K': 0.132, 'M': -0.124, 'L': -0.129, 'N': -0.154, 'Q': -0.006, 'P': 0.15, 'S': -0.292, 'R': -0.299, 'T': -0.136, 'W': 0.376, 'V': -0.059, 'Y': 0.196}, 7: {'A': -0.654, 'C': 0.213, 'E': -0.076, 'D': 0.111, 'G': 0.084, 'F': 0.191, 'I': 0.094, 'H': 0.284, 'K': 0.362, 'M': 0.048, 'L': 0.063, 'N': 0.223, 'Q': -0.058, 'P': -0.543, 'S': -0.449, 'R': 0.158, 'T': -0.193, 'W': 0.222, 'V': -0.299, 'Y': 0.22}, 8: {'A': -0.341, 'C': 0.351, 'E': 0.445, 'D': 0.805, 'G': 0.754, 'F': -0.779, 'I': -0.736, 'H': 0.007, 'K': 0.417, 'M': -1.109, 'L': -1.214, 'N': 0.775, 'Q': 0.172, 'P': 0.786, 'S': 0.332, 'R': 0.306, 'T': -0.204, 'W': -0.245, 'V': -0.699, 'Y': 0.178}, -1: {'con': 5.45316}} | 2,309 | 2,309 | 0.395409 | B_07_02_9 = {0: {'A': -0.332, 'C': 0.186, 'E': 0.544, 'D': 0.788, 'G': 0.214, 'F': -0.118, 'I': -0.161, 'H': -0.257, 'K': -0.244, 'M': -0.332, 'L': -0.105, 'N': 0.105, 'Q': 0.294, 'P': 0.58, 'S': -0.286, 'R': -0.62, 'T': 0.187, 'W': -0.114, 'V': -0.03, 'Y': -0.3}, 1: {'A': -0.604, 'C': 0.467, 'E': 0.468, 'D': 0.371, 'G': 0.128, 'F': 0.243, 'I': -0.242, 'H': 0.497, 'K': 0.244, 'M': -0.104, 'L': -0.131, 'N': 0.214, 'Q': 0.225, 'P': -2.038, 'S': 0.048, 'R': 0.36, 'T': -0.158, 'W': 0.467, 'V': -0.685, 'Y': 0.228}, 2: {'A': -0.307, 'C': 0.286, 'E': 0.256, 'D': 0.166, 'G': 0.217, 'F': 0.278, 'I': -0.015, 'H': -0.187, 'K': 0.072, 'M': -0.472, 'L': 0.03, 'N': 0.139, 'Q': -0.062, 'P': 0.399, 'S': -0.001, 'R': -0.829, 'T': 0.069, 'W': -0.071, 'V': 0.113, 'Y': -0.081}, 3: {'A': -0.077, 'C': 0.126, 'E': 0.127, 'D': 0.16, 'G': -0.091, 'F': 0.053, 'I': 0.146, 'H': -0.09, 'K': -0.069, 'M': -0.051, 'L': 0.038, 'N': 0.037, 'Q': -0.16, 'P': -0.047, 'S': -0.026, 'R': -0.081, 'T': 0.094, 'W': -0.175, 'V': 0.079, 'Y': 0.006}, 4: {'A': -0.129, 'C': -0.105, 'E': 0.445, 'D': 0.273, 'G': -0.12, 'F': 0.172, 'I': 0.218, 'H': -0.303, 'K': 0.061, 'M': -0.098, 'L': 0.138, 'N': -0.076, 'Q': 0.002, 'P': -0.135, 'S': -0.123, 'R': -0.267, 'T': -0.098, 'W': 0.058, 'V': 0.082, 'Y': 0.006}, 5: {'A': 0.025, 'C': 0.217, 'E': 0.317, 'D': 0.199, 'G': -0.291, 'F': -0.017, 'I': 0.113, 'H': -0.156, 'K': -0.035, 'M': -0.068, 'L': 0.119, 'N': -0.059, 'Q': 0.093, 'P': 0.185, 'S': -0.085, 'R': -0.472, 'T': -0.283, 'W': -0.109, 'V': 0.128, 'Y': 0.178}, 6: {'A': -0.233, 'C': 0.164, 'E': 0.335, 'D': 0.37, 'G': -0.26, 'F': 0.046, 'I': -0.003, 'H': -0.073, 'K': 0.132, 'M': -0.124, 'L': -0.129, 'N': -0.154, 'Q': -0.006, 'P': 0.15, 'S': -0.292, 'R': -0.299, 'T': -0.136, 'W': 0.376, 'V': -0.059, 'Y': 0.196}, 7: {'A': -0.654, 'C': 0.213, 'E': -0.076, 'D': 0.111, 'G': 0.084, 'F': 0.191, 'I': 0.094, 'H': 0.284, 'K': 0.362, 'M': 0.048, 'L': 0.063, 'N': 0.223, 'Q': -0.058, 'P': -0.543, 'S': -0.449, 'R': 0.158, 'T': -0.193, 'W': 0.222, 'V': -0.299, 'Y': 0.22}, 8: {'A': -0.341, 'C': 0.351, 'E': 0.445, 'D': 0.805, 'G': 0.754, 'F': -0.779, 'I': -0.736, 'H': 0.007, 'K': 0.417, 'M': -1.109, 'L': -1.214, 'N': 0.775, 'Q': 0.172, 'P': 0.786, 'S': 0.332, 'R': 0.306, 'T': -0.204, 'W': -0.245, 'V': -0.699, 'Y': 0.178}, -1: {'con': 5.45316}} | true | true |
f720dbb912a33f6df1fac7c953a783e5d94e86e3 | 13,329 | py | Python | SourceControlMgmt/SourceControlMgmt.py | tigelane/ACI-Simplified-GUI-Management | f2c3d27375421a75de0f5b9bbdc645c380549f05 | [
"MIT"
] | null | null | null | SourceControlMgmt/SourceControlMgmt.py | tigelane/ACI-Simplified-GUI-Management | f2c3d27375421a75de0f5b9bbdc645c380549f05 | [
"MIT"
] | 14 | 2020-02-14T23:47:50.000Z | 2020-03-04T20:16:29.000Z | SourceControlMgmt/SourceControlMgmt.py | IGNW/devnet-create-2020 | 1eea17891a6cd1fedc265605a7b06378542762bb | [
"MIT"
] | 1 | 2021-07-06T14:42:55.000Z | 2021-07-06T14:42:55.000Z | from pathlib import Path
from datetime import datetime
import shutil
import subprocess
import yaml
import requests
class SCMCredentialValidationError(Exception):
pass
class SCMCloneRepoError(Exception):
pass
class SCMCreateBranchError(Exception):
pass
class SCMWriteFileError(Exception):
pass
class SCMPushDataError(Exception):
pass
class SCMDeleteRepoError(Exception):
pass
class SCMGraphQLError(Exception):
pass
class SourceControlMgmt():
def __init__(self, username=None, password=None, friendly_name=None, email=None, repo_name=None, repo_owner=None):
self.username = username
self.password = password
self.friendly_name = friendly_name
self.email = email
self.repo_path = None
self.repo_name = repo_name
self.filename = None
self.branch_name = None
self.full_file_path = None
self.relative_file_path = None
self.existing_branches = {}
self.git_hub_graphql_api = 'https://api.github.com/graphql'
self.github_repo_id = None
self.repo_owner = self.username if not repo_owner else repo_owner
self.get_github_repo_id()
exceptions = ['repo_path', 'filename', 'branch_name', 'full_file_path', 'relative_file_path', 'existing_branches']
if not all(vars(self).values()):
missing_values = [k for k, v in vars(self).items() if not v and k not in exceptions]
if missing_values:
raise TypeError(f"All values must have data. The following attributes are empty: {missing_values}")
def validate_scm_creds(self):
"""
Verify user credentials will return the HEAD
git ls-remote https://<user>:<password>@github.com/IGNW/pge-aci-epgs/ HEAD
"""
results = subprocess.run(['git', 'ls-remote', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
if results.returncode == 0 and b"HEAD" in results.stdout:
return True
raise SCMCredentialValidationError("The supplied credentials do not provide access to the given repo")
def clone_private_repo(self, directory=None):
"""
Clone the repo into the directory specified
git clone https://<user>:<password>@github.com/IGNW/pge-aci-epgs /tmp/pge-aci-epgs
"""
if directory is None:
raise TypeError('Must pass a value for the directory into this function')
# If the directory is a string, convert it to a PathLib object
if isinstance(directory, str):
d = Path(directory)
elif isinstance(directory, Path):
d = directory
self.repo_path = d / self.repo_name
if self.repo_path.exists() is True and self.repo_path.is_dir() is True:
# Delete the directory
print('Directory exists and is being deleted')
shutil.rmtree(self.repo_path)
results = subprocess.run(['git', 'clone', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', f'{self.repo_path}'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
# The git clone writes to stderr instead of stdout
expected_string = f"Cloning into '{self.repo_path}'...\n"
encoded_expected_string = expected_string.encode()
if (results.returncode == 0 and
encoded_expected_string == results.stderr and
self.repo_path.exists() is True and
self.repo_path.is_dir() is True):
return True
else:
raise SCMCloneRepoError("The repo could not be cloned")
def create_new_branch_in_repo(self, branch_name=None):
"""
Create New Branch in existing repo
cd /tmp/pge-aci-epgs
git checkout -b NEW_TEST_BRANCH_NAME1
"""
if not branch_name:
raise TypeError('You must pass a branch name into this function')
else:
self.branch_name = branch_name
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "checkout", "-b", branch_name], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
else:
raise SCMCreateBranchError('You must have a repo cloned before trying to create a branch')
expected_results = f"Switched to a new branch '{self.branch_name}'\n"
if results.returncode == 0 and expected_results.encode() == results.stderr:
return True
else:
raise SCMCreateBranchError("A new branch was not able to be created")
def write_data_to_file_in_repo(self, data, file_path=None, file_name=None, append_timestamp=False, as_yaml=False):
"""
Write the data to a file in the repo
"""
if file_path is None:
raise TypeError('Must pass a string with the folder name of where the file will be stored into this function')
if as_yaml and not isinstance(data, dict):
raise TypeError('Must pass a dictionary to this function')
# if 'schema' not in data.keys() and 'epgname' not in data.keys():
# raise ValueError('Must be a properly formatted aci dictionary object to use this function')
now = datetime.now()
str_now = now.strftime("%Y%m%d-%H%M%S")
if append_timestamp:
file_parts = file_name.split('.')
if len(file_parts) > 1:
self.filename = f"{file_parts[0]}-{str_now}.{file_parts[1]}"
else:
self.filename = f"{file_name}-{str_now}"
else:
self.filename = f"{file_name}"
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
self.full_dir_path = self.repo_path / f"{file_path}"
self.full_file_path = self.full_dir_path / self.filename
self.relative_file_path = f'{file_path}/{self.filename}' if file_path else f'{self.filename}'
if self.full_file_path.exists():
raise SCMWriteFileError(f'This file already exists in the repo: {self.full_file_path}')
elif not self.full_dir_path.exists():
raise SCMWriteFileError('The path provided to save the file in does not exist')
else:
if as_yaml:
with open(self.full_file_path, 'w') as outfile:
yaml.dump(data, outfile, explicit_start=True, explicit_end=True, default_flow_style=False)
else:
with open(self.full_file_path, 'w') as outfile:
outfile.write(data)
else:
raise SCMWriteFileError('You must have a repo cloned before trying to create a file')
if self.full_file_path.exists():
return True
else:
raise SCMWriteFileError('Was not able to write the file to the filesystem')
def push_data_to_remote_repo(self):
"""
Commit the changes and push the branch to master
"""
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "add", f"{self.relative_file_path}"],
cwd=self.repo_path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while adding the file. returncode: {results.returncode} stderr: {results.stderr}")
command = ["git", "-c", f"user.name='{self.username}'", "-c", f"user.email='{self.email}'", "commit", "-m", "Adding file to repo from python"]
results = subprocess.run(command, cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while commiting the changes. returncode: {results.returncode} stderr: {results.stderr}")
dest = f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/'
src = f'{self.branch_name}'
results = subprocess.run(['git', 'push', dest, src], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
print('dest:', dest)
print('src:', src)
raise SCMPushDataError(f"something bad happened while pushing the branch. "
f"returncode: {results.returncode} stderr: {results.stderr} "
f"repo: {self.repo_name} branch: {self.branch_name}")
else:
return True
else:
raise SCMPushDataError("An undefined error occured while attempting to push the data")
def delete_local_copy_of_repo(self):
"""
Delete the local repo when action is completed
"""
try:
shutil.rmtree(self.repo_path)
return True
except Exception as e:
raise SCMDeleteRepoError(f"An error occured while attempting to delete the repo. {type(e)} {e}")
def _gql_query(self, query=None, vars=None):
"""
Helper function to call the GraphQL enpoint in GitHub
"""
if query is None:
raise TypeError("A GraphQL query is required to run this function")
headers = {"Authorization": f"token {self.password}"}
request = requests.post(self.git_hub_graphql_api, json={'query': query, 'variables': vars}, headers=headers)
try:
data = request.json()
if data['data'].get("errors"):
error = data['data']['errors']
raise SCMGraphQLError(f"An error in GraphQL occured. See the following for more info: {error}")
else:
return data
except Exception as e:
print(e)
print(type(e))
print(dir(e))
print(request)
raise
def get_github_repo_id(self):
"""
Takes the github user id and repo name and gets the github internal id
"""
query = """
query RepoIDQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
id
}
}
"""
variables = {
"repo_name": self.repo_name,
"owner": self.repo_owner
}
response = self._gql_query(query=query, vars=variables)
self.github_repo_id = response['data']['repository']['id']
def create_git_hub_pull_request(self, destination_branch=None, source_branch=None, title=None, body=None):
"""
Create a Pull Request in GitHub
Takes 2 branch names, title, body, and the repo ID
"""
if destination_branch is None or source_branch is None:
raise TypeError("Must have a source and destination branch to create a Pull Request")
mutation = """
mutation MyMutation($repo_id: String!, $dest_branch: String!, $src_branch: String!, $title: String!, $body: String!) {
__typename
createPullRequest(input: {repositoryId: $repo_id,
baseRefName: $dest_branch,
headRefName: $src_branch,
title: $title,
body: $body}) {
pullRequest {
number,
url
}
}
}
"""
variables = {
"repo_id": self.github_repo_id,
"dest_branch": destination_branch,
"src_branch": source_branch,
"title": title,
"body": body
}
data = self._gql_query(query=mutation, vars=variables)
return data
def get_all_current_branches(self):
"""
Pull the last 10 branches and ref ID's from a github repo
"""
query = """
query BranchQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
name
nameWithOwner
refs(refPrefix: "refs/heads/", last: 10) {
totalCount
nodes {
id
name
}
}
}
}
"""
variables = {
"owner": self.repo_owner,
"repo_name": self.repo_name
}
data = self._gql_query(query=query, vars=variables)
for ref in data['data']['repository']['refs']['nodes']:
id = ref['id']
name = ref['name']
self.existing_branches[name] = id
| 38.082857 | 162 | 0.579038 | from pathlib import Path
from datetime import datetime
import shutil
import subprocess
import yaml
import requests
class SCMCredentialValidationError(Exception):
pass
class SCMCloneRepoError(Exception):
pass
class SCMCreateBranchError(Exception):
pass
class SCMWriteFileError(Exception):
pass
class SCMPushDataError(Exception):
pass
class SCMDeleteRepoError(Exception):
pass
class SCMGraphQLError(Exception):
pass
class SourceControlMgmt():
def __init__(self, username=None, password=None, friendly_name=None, email=None, repo_name=None, repo_owner=None):
self.username = username
self.password = password
self.friendly_name = friendly_name
self.email = email
self.repo_path = None
self.repo_name = repo_name
self.filename = None
self.branch_name = None
self.full_file_path = None
self.relative_file_path = None
self.existing_branches = {}
self.git_hub_graphql_api = 'https://api.github.com/graphql'
self.github_repo_id = None
self.repo_owner = self.username if not repo_owner else repo_owner
self.get_github_repo_id()
exceptions = ['repo_path', 'filename', 'branch_name', 'full_file_path', 'relative_file_path', 'existing_branches']
if not all(vars(self).values()):
missing_values = [k for k, v in vars(self).items() if not v and k not in exceptions]
if missing_values:
raise TypeError(f"All values must have data. The following attributes are empty: {missing_values}")
def validate_scm_creds(self):
results = subprocess.run(['git', 'ls-remote', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
if results.returncode == 0 and b"HEAD" in results.stdout:
return True
raise SCMCredentialValidationError("The supplied credentials do not provide access to the given repo")
def clone_private_repo(self, directory=None):
if directory is None:
raise TypeError('Must pass a value for the directory into this function')
if isinstance(directory, str):
d = Path(directory)
elif isinstance(directory, Path):
d = directory
self.repo_path = d / self.repo_name
if self.repo_path.exists() is True and self.repo_path.is_dir() is True:
print('Directory exists and is being deleted')
shutil.rmtree(self.repo_path)
results = subprocess.run(['git', 'clone', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', f'{self.repo_path}'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
expected_string = f"Cloning into '{self.repo_path}'...\n"
encoded_expected_string = expected_string.encode()
if (results.returncode == 0 and
encoded_expected_string == results.stderr and
self.repo_path.exists() is True and
self.repo_path.is_dir() is True):
return True
else:
raise SCMCloneRepoError("The repo could not be cloned")
def create_new_branch_in_repo(self, branch_name=None):
if not branch_name:
raise TypeError('You must pass a branch name into this function')
else:
self.branch_name = branch_name
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "checkout", "-b", branch_name], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
else:
raise SCMCreateBranchError('You must have a repo cloned before trying to create a branch')
expected_results = f"Switched to a new branch '{self.branch_name}'\n"
if results.returncode == 0 and expected_results.encode() == results.stderr:
return True
else:
raise SCMCreateBranchError("A new branch was not able to be created")
def write_data_to_file_in_repo(self, data, file_path=None, file_name=None, append_timestamp=False, as_yaml=False):
if file_path is None:
raise TypeError('Must pass a string with the folder name of where the file will be stored into this function')
if as_yaml and not isinstance(data, dict):
raise TypeError('Must pass a dictionary to this function')
now = datetime.now()
str_now = now.strftime("%Y%m%d-%H%M%S")
if append_timestamp:
file_parts = file_name.split('.')
if len(file_parts) > 1:
self.filename = f"{file_parts[0]}-{str_now}.{file_parts[1]}"
else:
self.filename = f"{file_name}-{str_now}"
else:
self.filename = f"{file_name}"
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
self.full_dir_path = self.repo_path / f"{file_path}"
self.full_file_path = self.full_dir_path / self.filename
self.relative_file_path = f'{file_path}/{self.filename}' if file_path else f'{self.filename}'
if self.full_file_path.exists():
raise SCMWriteFileError(f'This file already exists in the repo: {self.full_file_path}')
elif not self.full_dir_path.exists():
raise SCMWriteFileError('The path provided to save the file in does not exist')
else:
if as_yaml:
with open(self.full_file_path, 'w') as outfile:
yaml.dump(data, outfile, explicit_start=True, explicit_end=True, default_flow_style=False)
else:
with open(self.full_file_path, 'w') as outfile:
outfile.write(data)
else:
raise SCMWriteFileError('You must have a repo cloned before trying to create a file')
if self.full_file_path.exists():
return True
else:
raise SCMWriteFileError('Was not able to write the file to the filesystem')
def push_data_to_remote_repo(self):
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "add", f"{self.relative_file_path}"],
cwd=self.repo_path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while adding the file. returncode: {results.returncode} stderr: {results.stderr}")
command = ["git", "-c", f"user.name='{self.username}'", "-c", f"user.email='{self.email}'", "commit", "-m", "Adding file to repo from python"]
results = subprocess.run(command, cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while commiting the changes. returncode: {results.returncode} stderr: {results.stderr}")
dest = f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/'
src = f'{self.branch_name}'
results = subprocess.run(['git', 'push', dest, src], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
print('dest:', dest)
print('src:', src)
raise SCMPushDataError(f"something bad happened while pushing the branch. "
f"returncode: {results.returncode} stderr: {results.stderr} "
f"repo: {self.repo_name} branch: {self.branch_name}")
else:
return True
else:
raise SCMPushDataError("An undefined error occured while attempting to push the data")
def delete_local_copy_of_repo(self):
try:
shutil.rmtree(self.repo_path)
return True
except Exception as e:
raise SCMDeleteRepoError(f"An error occured while attempting to delete the repo. {type(e)} {e}")
def _gql_query(self, query=None, vars=None):
if query is None:
raise TypeError("A GraphQL query is required to run this function")
headers = {"Authorization": f"token {self.password}"}
request = requests.post(self.git_hub_graphql_api, json={'query': query, 'variables': vars}, headers=headers)
try:
data = request.json()
if data['data'].get("errors"):
error = data['data']['errors']
raise SCMGraphQLError(f"An error in GraphQL occured. See the following for more info: {error}")
else:
return data
except Exception as e:
print(e)
print(type(e))
print(dir(e))
print(request)
raise
def get_github_repo_id(self):
query = """
query RepoIDQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
id
}
}
"""
variables = {
"repo_name": self.repo_name,
"owner": self.repo_owner
}
response = self._gql_query(query=query, vars=variables)
self.github_repo_id = response['data']['repository']['id']
def create_git_hub_pull_request(self, destination_branch=None, source_branch=None, title=None, body=None):
if destination_branch is None or source_branch is None:
raise TypeError("Must have a source and destination branch to create a Pull Request")
mutation = """
mutation MyMutation($repo_id: String!, $dest_branch: String!, $src_branch: String!, $title: String!, $body: String!) {
__typename
createPullRequest(input: {repositoryId: $repo_id,
baseRefName: $dest_branch,
headRefName: $src_branch,
title: $title,
body: $body}) {
pullRequest {
number,
url
}
}
}
"""
variables = {
"repo_id": self.github_repo_id,
"dest_branch": destination_branch,
"src_branch": source_branch,
"title": title,
"body": body
}
data = self._gql_query(query=mutation, vars=variables)
return data
def get_all_current_branches(self):
query = """
query BranchQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
name
nameWithOwner
refs(refPrefix: "refs/heads/", last: 10) {
totalCount
nodes {
id
name
}
}
}
}
"""
variables = {
"owner": self.repo_owner,
"repo_name": self.repo_name
}
data = self._gql_query(query=query, vars=variables)
for ref in data['data']['repository']['refs']['nodes']:
id = ref['id']
name = ref['name']
self.existing_branches[name] = id
| true | true |
f720dc83e899603cde1322429190880fb730dec1 | 682 | py | Python | recommendation/recommendation/apps/films/migrations/0003_auto_20200314_0357.py | WillionLei/recommendation | 49fd28a47574877a91458201b21ec2a80409bb5f | [
"MIT"
] | null | null | null | recommendation/recommendation/apps/films/migrations/0003_auto_20200314_0357.py | WillionLei/recommendation | 49fd28a47574877a91458201b21ec2a80409bb5f | [
"MIT"
] | null | null | null | recommendation/recommendation/apps/films/migrations/0003_auto_20200314_0357.py | WillionLei/recommendation | 49fd28a47574877a91458201b21ec2a80409bb5f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2020-03-14 03:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('films', '0002_film'),
]
operations = [
migrations.AddField(
model_name='film',
name='charge',
field=models.SmallIntegerField(choices=[(0, '免费'), (1, '会员'), (2, '付费')], default=0, verbose_name='费用'),
),
migrations.AddField(
model_name='film',
name='fcomment',
field=models.CharField(max_length=200, null=True, verbose_name='描述信息'),
),
]
| 26.230769 | 116 | 0.577713 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('films', '0002_film'),
]
operations = [
migrations.AddField(
model_name='film',
name='charge',
field=models.SmallIntegerField(choices=[(0, '免费'), (1, '会员'), (2, '付费')], default=0, verbose_name='费用'),
),
migrations.AddField(
model_name='film',
name='fcomment',
field=models.CharField(max_length=200, null=True, verbose_name='描述信息'),
),
]
| true | true |
f720dca24b37afd8444ce644acfa3b1e0c6ddc1c | 197 | py | Python | pola/tests/commands/test_send_ai_pics_stats.py | rodkiewicz/pola-backend | e26df1cea07b43c8b4272739234b7e78e2ce08c9 | [
"BSD-3-Clause"
] | 30 | 2015-08-13T01:05:36.000Z | 2022-01-22T03:02:50.000Z | pola/tests/commands/test_send_ai_pics_stats.py | rodkiewicz/pola-backend | e26df1cea07b43c8b4272739234b7e78e2ce08c9 | [
"BSD-3-Clause"
] | 1,428 | 2015-10-08T07:38:26.000Z | 2022-03-31T08:36:08.000Z | pola/tests/commands/test_send_ai_pics_stats.py | rodkiewicz/pola-backend | e26df1cea07b43c8b4272739234b7e78e2ce08c9 | [
"BSD-3-Clause"
] | 13 | 2015-12-27T22:35:25.000Z | 2022-02-01T15:55:58.000Z | from unittest import TestCase
from django.core.management import call_command
class SendAiPicsStatsTestCase(TestCase):
def test_run_command(self):
call_command('send_ai_pics_stats')
| 21.888889 | 47 | 0.796954 | from unittest import TestCase
from django.core.management import call_command
class SendAiPicsStatsTestCase(TestCase):
def test_run_command(self):
call_command('send_ai_pics_stats')
| true | true |
f720dd6581d8165827d17d912cf9df585404c27b | 148 | py | Python | src/mkdv/runners/runner_python.py | fvutils/sim-mk | 271b4374a21785ab1b22fac333e423b5febb6a81 | [
"Apache-2.0"
] | null | null | null | src/mkdv/runners/runner_python.py | fvutils/sim-mk | 271b4374a21785ab1b22fac333e423b5febb6a81 | [
"Apache-2.0"
] | null | null | null | src/mkdv/runners/runner_python.py | fvutils/sim-mk | 271b4374a21785ab1b22fac333e423b5febb6a81 | [
"Apache-2.0"
] | null | null | null | '''
Created on Nov 16, 2021
@author: mballance
'''
from mkdv.runners.runner import Runner
class RunnerPython(Runner):
def __init__(self): | 14.8 | 38 | 0.702703 | '''
Created on Nov 16, 2021
@author: mballance
'''
from mkdv.runners.runner import Runner
class RunnerPython(Runner):
def __init__(self): | false | true |
f720de11464a36f7cc26d40b9c9c173b3751a6c4 | 6,695 | py | Python | tests/kafkatest/tests/core/fetch_from_follower_test.py | heyingquan13/kafka | 620ada9888f82756d6ed0eabe96bb9b54518b378 | [
"Apache-2.0"
] | 35 | 2016-09-22T22:53:14.000Z | 2020-02-13T15:12:21.000Z | tests/kafkatest/tests/core/fetch_from_follower_test.py | heyingquan13/kafka | 620ada9888f82756d6ed0eabe96bb9b54518b378 | [
"Apache-2.0"
] | 27 | 2022-02-07T21:53:02.000Z | 2022-03-15T20:38:46.000Z | tests/kafkatest/tests/core/fetch_from_follower_test.py | heyingquan13/kafka | 620ada9888f82756d6ed0eabe96bb9b54518b378 | [
"Apache-2.0"
] | 88 | 2016-11-27T02:16:11.000Z | 2020-02-28T05:10:26.000Z | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from collections import defaultdict
from ducktape.mark import matrix
from ducktape.mark.resource import cluster
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.kafka import KafkaService, quorum
from kafkatest.services.monitor.jmx import JmxTool
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
class FetchFromFollowerTest(ProduceConsumeValidateTest):
RACK_AWARE_REPLICA_SELECTOR = "org.apache.kafka.common.replica.RackAwareReplicaSelector"
METADATA_MAX_AGE_MS = 3000
def __init__(self, test_context):
super(FetchFromFollowerTest, self).__init__(test_context=test_context)
self.jmx_tool = JmxTool(test_context, jmx_poll_ms=100)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1) if quorum.for_test(test_context) == quorum.zk else None
self.kafka = KafkaService(test_context,
num_nodes=3,
zk=self.zk,
topics={
self.topic: {
"partitions": 1,
"replication-factor": 3,
"configs": {"min.insync.replicas": 1}},
},
server_prop_overrides=[
["replica.selector.class", self.RACK_AWARE_REPLICA_SELECTOR]
],
per_node_server_prop_overrides={
1: [("broker.rack", "rack-a")],
2: [("broker.rack", "rack-b")],
3: [("broker.rack", "rack-c")]
},
controller_num_nodes_override=1)
self.producer_throughput = 1000
self.num_producers = 1
self.num_consumers = 1
def min_cluster_size(self):
return super(FetchFromFollowerTest, self).min_cluster_size() + self.num_producers * 2 + self.num_consumers * 2
def setUp(self):
if self.zk:
self.zk.start()
self.kafka.start()
@cluster(num_nodes=9)
@matrix(metadata_quorum=quorum.all_non_upgrade)
def test_consumer_preferred_read_replica(self, metadata_quorum=quorum.zk):
"""
This test starts up brokers with "broker.rack" and "replica.selector.class" configurations set. The replica
selector is set to the rack-aware implementation. One of the brokers has a different rack than the other two.
We then use a console consumer with the "client.rack" set to the same value as the differing broker. After
producing some records, we verify that the client has been informed of the preferred replica and that all the
records are properly consumed.
"""
# Find the leader, configure consumer to be on a different rack
leader_node = self.kafka.leader(self.topic, 0)
leader_idx = self.kafka.idx(leader_node)
non_leader_idx = 2 if leader_idx != 2 else 1
non_leader_rack = "rack-b" if leader_idx != 2 else "rack-a"
self.logger.debug("Leader %d %s" % (leader_idx, leader_node))
self.logger.debug("Non-Leader %d %s" % (non_leader_idx, non_leader_rack))
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic,
throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
client_id="console-consumer", group_id="test-consumer-group-1",
consumer_timeout_ms=60000, message_validator=is_int,
consumer_properties={"client.rack": non_leader_rack, "metadata.max.age.ms": self.METADATA_MAX_AGE_MS})
# Start up and let some data get produced
self.start_producer_and_consumer()
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
consumer_node = self.consumer.nodes[0]
consumer_idx = self.consumer.idx(consumer_node)
read_replica_attribute = "preferred-read-replica"
read_replica_mbean = "kafka.consumer:type=consumer-fetch-manager-metrics,client-id=%s,topic=%s,partition=%d" % \
("console-consumer", self.topic, 0)
self.jmx_tool.jmx_object_names = [read_replica_mbean]
self.jmx_tool.jmx_attributes = [read_replica_attribute]
self.jmx_tool.start_jmx_tool(consumer_idx, consumer_node)
# Wait for at least one interval of "metadata.max.age.ms"
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
# Read the JMX output
self.jmx_tool.read_jmx_output(consumer_idx, consumer_node)
all_captured_preferred_read_replicas = defaultdict(int)
self.logger.debug(self.jmx_tool.jmx_stats)
for ts, data in self.jmx_tool.jmx_stats[0].items():
for k, v in data.items():
if k.endswith(read_replica_attribute):
all_captured_preferred_read_replicas[int(v)] += 1
self.logger.debug("Saw the following preferred read replicas %s",
dict(all_captured_preferred_read_replicas.items()))
assert all_captured_preferred_read_replicas[non_leader_idx] > 0, \
"Expected to see broker %d (%s) as a preferred replica" % (non_leader_idx, non_leader_rack)
# Validate consumed messages
self.stop_producer_and_consumer()
self.validate()
| 49.592593 | 142 | 0.64003 |
import time
from collections import defaultdict
from ducktape.mark import matrix
from ducktape.mark.resource import cluster
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.kafka import KafkaService, quorum
from kafkatest.services.monitor.jmx import JmxTool
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
class FetchFromFollowerTest(ProduceConsumeValidateTest):
RACK_AWARE_REPLICA_SELECTOR = "org.apache.kafka.common.replica.RackAwareReplicaSelector"
METADATA_MAX_AGE_MS = 3000
def __init__(self, test_context):
super(FetchFromFollowerTest, self).__init__(test_context=test_context)
self.jmx_tool = JmxTool(test_context, jmx_poll_ms=100)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1) if quorum.for_test(test_context) == quorum.zk else None
self.kafka = KafkaService(test_context,
num_nodes=3,
zk=self.zk,
topics={
self.topic: {
"partitions": 1,
"replication-factor": 3,
"configs": {"min.insync.replicas": 1}},
},
server_prop_overrides=[
["replica.selector.class", self.RACK_AWARE_REPLICA_SELECTOR]
],
per_node_server_prop_overrides={
1: [("broker.rack", "rack-a")],
2: [("broker.rack", "rack-b")],
3: [("broker.rack", "rack-c")]
},
controller_num_nodes_override=1)
self.producer_throughput = 1000
self.num_producers = 1
self.num_consumers = 1
def min_cluster_size(self):
return super(FetchFromFollowerTest, self).min_cluster_size() + self.num_producers * 2 + self.num_consumers * 2
def setUp(self):
if self.zk:
self.zk.start()
self.kafka.start()
@cluster(num_nodes=9)
@matrix(metadata_quorum=quorum.all_non_upgrade)
def test_consumer_preferred_read_replica(self, metadata_quorum=quorum.zk):
leader_node = self.kafka.leader(self.topic, 0)
leader_idx = self.kafka.idx(leader_node)
non_leader_idx = 2 if leader_idx != 2 else 1
non_leader_rack = "rack-b" if leader_idx != 2 else "rack-a"
self.logger.debug("Leader %d %s" % (leader_idx, leader_node))
self.logger.debug("Non-Leader %d %s" % (non_leader_idx, non_leader_rack))
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic,
throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
client_id="console-consumer", group_id="test-consumer-group-1",
consumer_timeout_ms=60000, message_validator=is_int,
consumer_properties={"client.rack": non_leader_rack, "metadata.max.age.ms": self.METADATA_MAX_AGE_MS})
self.start_producer_and_consumer()
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
consumer_node = self.consumer.nodes[0]
consumer_idx = self.consumer.idx(consumer_node)
read_replica_attribute = "preferred-read-replica"
read_replica_mbean = "kafka.consumer:type=consumer-fetch-manager-metrics,client-id=%s,topic=%s,partition=%d" % \
("console-consumer", self.topic, 0)
self.jmx_tool.jmx_object_names = [read_replica_mbean]
self.jmx_tool.jmx_attributes = [read_replica_attribute]
self.jmx_tool.start_jmx_tool(consumer_idx, consumer_node)
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
self.jmx_tool.read_jmx_output(consumer_idx, consumer_node)
all_captured_preferred_read_replicas = defaultdict(int)
self.logger.debug(self.jmx_tool.jmx_stats)
for ts, data in self.jmx_tool.jmx_stats[0].items():
for k, v in data.items():
if k.endswith(read_replica_attribute):
all_captured_preferred_read_replicas[int(v)] += 1
self.logger.debug("Saw the following preferred read replicas %s",
dict(all_captured_preferred_read_replicas.items()))
assert all_captured_preferred_read_replicas[non_leader_idx] > 0, \
"Expected to see broker %d (%s) as a preferred replica" % (non_leader_idx, non_leader_rack)
self.stop_producer_and_consumer()
self.validate()
| true | true |
f720def8adc18a066172259ff0e5e88e433e15c0 | 39,628 | py | Python | python/dgl/distributed/graph_partition_book.py | hoangdzung/dgl | f7ce267164118a0526dd2f42f3baf799bb59d6b7 | [
"Apache-2.0"
] | 1 | 2021-08-18T11:54:42.000Z | 2021-08-18T11:54:42.000Z | python/dgl/distributed/graph_partition_book.py | amorehead/dgl | 738b75f41e5d3229e5ccda52d76e1297d7b0520d | [
"Apache-2.0"
] | null | null | null | python/dgl/distributed/graph_partition_book.py | amorehead/dgl | 738b75f41e5d3229e5ccda52d76e1297d7b0520d | [
"Apache-2.0"
] | 1 | 2021-11-28T09:16:55.000Z | 2021-11-28T09:16:55.000Z | """Define graph partition book."""
import pickle
from abc import ABC
import numpy as np
from .. import backend as F
from ..base import NID, EID
from .. import utils
from .shared_mem_utils import _to_shared_mem, _get_ndata_path, _get_edata_path, DTYPE_DICT
from .._ffi.ndarray import empty_shared_mem
from ..ndarray import exist_shared_mem_array
from .id_map import IdMap
def _move_metadata_to_shared_mem(graph_name, num_nodes, num_edges, part_id,
num_partitions, node_map, edge_map, is_range_part):
''' Move all metadata of the partition book to the shared memory.
These metadata will be used to construct graph partition book.
Parameters
----------
graph_name : str
The name of the graph
num_nodes : int
The total number of nodes
num_edges : int
The total number of edges
part_id : int
The partition ID.
num_partitions : int
The number of physical partitions generated for the graph.
node_map : Tensor
It stores the mapping information from node IDs to partitions. With range partitioning,
the tensor stores the serialized result of partition ranges.
edge_map : Tensor
It stores the mapping information from edge IDs to partitions. With range partitioning,
the tensor stores the serialized result of partition ranges.
is_range_part : bool
Indicate that we use a range partition. This is important for us to deserialize data
in node_map and edge_map.
Returns
-------
(Tensor, Tensor, Tensor)
The first tensor stores the serialized metadata, the second tensor stores the serialized
node map and the third tensor stores the serialized edge map. All tensors are stored in
shared memory.
'''
meta = _to_shared_mem(F.tensor([int(is_range_part), num_nodes, num_edges,
num_partitions, part_id,
len(node_map), len(edge_map)]),
_get_ndata_path(graph_name, 'meta'))
node_map = _to_shared_mem(node_map, _get_ndata_path(graph_name, 'node_map'))
edge_map = _to_shared_mem(edge_map, _get_edata_path(graph_name, 'edge_map'))
return meta, node_map, edge_map
def _get_shared_mem_metadata(graph_name):
''' Get the metadata of the graph from shared memory.
The server serializes the metadata of a graph and store them in shared memory.
The client needs to deserialize the data in shared memory and get the metadata
of the graph.
Parameters
----------
graph_name : str
The name of the graph. We can use the graph name to find the shared memory name.
Returns
-------
(bool, int, int, Tensor, Tensor)
The first element indicates whether it is range partitioning;
the second element is the partition ID;
the third element is the number of partitions;
the fourth element is the tensor that stores the serialized result of node maps;
the fifth element is the tensor that stores the serialized result of edge maps.
'''
# The metadata has 7 elements: is_range_part, num_nodes, num_edges, num_partitions, part_id,
# the length of node map and the length of the edge map.
shape = (7,)
dtype = F.int64
dtype = DTYPE_DICT[dtype]
data = empty_shared_mem(_get_ndata_path(graph_name, 'meta'), False, shape, dtype)
dlpack = data.to_dlpack()
meta = F.asnumpy(F.zerocopy_from_dlpack(dlpack))
is_range_part, _, _, num_partitions, part_id, node_map_len, edge_map_len = meta
# Load node map
data = empty_shared_mem(_get_ndata_path(graph_name, 'node_map'), False, (node_map_len,), dtype)
dlpack = data.to_dlpack()
node_map = F.zerocopy_from_dlpack(dlpack)
# Load edge_map
data = empty_shared_mem(_get_edata_path(graph_name, 'edge_map'), False, (edge_map_len,), dtype)
dlpack = data.to_dlpack()
edge_map = F.zerocopy_from_dlpack(dlpack)
return is_range_part, part_id, num_partitions, node_map, edge_map
def get_shared_mem_partition_book(graph_name, graph_part):
'''Get a graph partition book from shared memory.
A graph partition book of a specific graph can be serialized to shared memory.
We can reconstruct a graph partition book from shared memory.
Parameters
----------
graph_name : str
The name of the graph.
graph_part : DGLGraph
The graph structure of a partition.
Returns
-------
GraphPartitionBook
A graph partition book for a particular partition.
'''
if not exist_shared_mem_array(_get_ndata_path(graph_name, 'meta')):
return None
is_range_part, part_id, num_parts, node_map_data, edge_map_data = \
_get_shared_mem_metadata(graph_name)
if is_range_part == 1:
# node ID ranges and edge ID ranges are stored in the order of node type IDs
# and edge type IDs.
node_map = {}
ntypes = {}
# node_map_data and edge_map_data were serialized with pickle and converted into
# a list of bytes and then stored in a numpy array before being placed in shared
# memory. To deserialize, we need to reverse the process.
node_map_data = pickle.loads(bytes(F.asnumpy(node_map_data).tolist()))
for i, (ntype, nid_range) in enumerate(node_map_data):
ntypes[ntype] = i
node_map[ntype] = nid_range
edge_map = {}
etypes = {}
edge_map_data = pickle.loads(bytes(F.asnumpy(edge_map_data).tolist()))
for i, (etype, eid_range) in enumerate(edge_map_data):
etypes[etype] = i
edge_map[etype] = eid_range
return RangePartitionBook(part_id, num_parts, node_map, edge_map, ntypes, etypes)
else:
return BasicPartitionBook(part_id, num_parts, node_map_data, edge_map_data, graph_part)
class GraphPartitionBook(ABC):
""" The base class of the graph partition book.
For distributed training, a graph is partitioned into multiple parts and is loaded
in multiple machines. The partition book contains all necessary information to locate
nodes and edges in the cluster.
The partition book contains various partition information, including
* the number of partitions,
* the partition ID that a node or edge belongs to,
* the node IDs and the edge IDs that a partition has.
* the local IDs of nodes and edges in a partition.
Currently, there are two classes that implement ``GraphPartitionBook``:
``BasicGraphPartitionBook`` and ``RangePartitionBook``. ``BasicGraphPartitionBook``
stores the mappings between every individual node/edge ID and partition ID on
every machine, which usually consumes a lot of memory, while ``RangePartitionBook``
calculates the mapping between node/edge IDs and partition IDs based on some small
metadata because nodes/edges have been relabeled to have IDs in the same partition
fall in a contiguous ID range. ``RangePartitionBook`` is usually a preferred way to
provide mappings between node/edge IDs and partition IDs.
A graph partition book is constructed automatically when a graph is partitioned.
When a graph partition is loaded, a graph partition book is loaded as well.
Please see :py:meth:`~dgl.distributed.partition.partition_graph`,
:py:meth:`~dgl.distributed.partition.load_partition` and
:py:meth:`~dgl.distributed.partition.load_partition_book` for more details.
"""
def shared_memory(self, graph_name):
"""Move the partition book to shared memory.
Parameters
----------
graph_name : str
The graph name. This name will be used to read the partition book from shared
memory in another process.
"""
def num_partitions(self):
"""Return the number of partitions.
Returns
-------
int
number of partitions
"""
def metadata(self):
"""Return the partition meta data.
The meta data includes:
* The machine ID.
* Number of nodes and edges of each partition.
Examples
--------
>>> print(g.get_partition_book().metadata())
>>> [{'machine_id' : 0, 'num_nodes' : 3000, 'num_edges' : 5000},
... {'machine_id' : 1, 'num_nodes' : 2000, 'num_edges' : 4888},
... ...]
Returns
-------
list[dict[str, any]]
Meta data of each partition.
"""
def nid2partid(self, nids, ntype):
"""From global node IDs to partition IDs
Parameters
----------
nids : tensor
global node IDs
ntype : str
The node type
Returns
-------
tensor
partition IDs
"""
def eid2partid(self, eids, etype):
"""From global edge IDs to partition IDs
Parameters
----------
eids : tensor
global edge IDs
etype : str
The edge type
Returns
-------
tensor
partition IDs
"""
def partid2nids(self, partid, ntype):
"""From partition id to global node IDs
Parameters
----------
partid : int
partition id
ntype : str
The node type
Returns
-------
tensor
node IDs
"""
def partid2eids(self, partid, etype):
"""From partition id to global edge IDs
Parameters
----------
partid : int
partition id
etype : str
The edge type
Returns
-------
tensor
edge IDs
"""
def nid2localnid(self, nids, partid, ntype):
"""Get local node IDs within the given partition.
Parameters
----------
nids : tensor
global node IDs
partid : int
partition ID
ntype : str
The node type
Returns
-------
tensor
local node IDs
"""
def eid2localeid(self, eids, partid, etype):
"""Get the local edge ids within the given partition.
Parameters
----------
eids : tensor
global edge IDs
partid : int
partition ID
etype : str
The edge type
Returns
-------
tensor
local edge IDs
"""
@property
def partid(self):
"""Get the current partition ID
Return
------
int
The partition ID of current machine
"""
@property
def ntypes(self):
"""Get the list of node types
"""
@property
def etypes(self):
"""Get the list of edge types
"""
def map_to_per_ntype(self, ids):
"""Map homogeneous node IDs to type-wise IDs and node types.
Parameters
----------
ids : tensor
Homogeneous node IDs.
Returns
-------
(tensor, tensor)
node type IDs and type-wise node IDs.
"""
def map_to_per_etype(self, ids):
"""Map homogeneous edge IDs to type-wise IDs and edge types.
Parameters
----------
ids : tensor
Homogeneous edge IDs.
Returns
-------
(tensor, tensor)
edge type IDs and type-wise edge IDs.
"""
def map_to_homo_nid(self, ids, ntype):
"""Map type-wise node IDs and type IDs to homogeneous node IDs.
Parameters
----------
ids : tensor
Type-wise node Ids
ntype : str
node type
Returns
-------
Tensor
Homogeneous node IDs.
"""
def map_to_homo_eid(self, ids, etype):
"""Map type-wise edge IDs and type IDs to homogeneous edge IDs.
Parameters
----------
ids : tensor
Type-wise edge Ids
etype : str
edge type
Returns
-------
Tensor
Homogeneous edge IDs.
"""
class BasicPartitionBook(GraphPartitionBook):
"""This provides the most flexible way to store parition information.
The partition book maintains the mapping of every single node IDs and edge IDs to
partition IDs. This is very flexible at the coast of large memory consumption.
On a large graph, the mapping consumes significant memory and this partition book
is not recommended.
Parameters
----------
part_id : int
partition ID of current partition book
num_parts : int
number of total partitions
node_map : tensor
global node ID mapping to partition ID
edge_map : tensor
global edge ID mapping to partition ID
part_graph : DGLGraph
The graph partition structure.
"""
def __init__(self, part_id, num_parts, node_map, edge_map, part_graph):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._part_id = int(part_id)
self._num_partitions = int(num_parts)
self._nid2partid = F.tensor(node_map)
assert F.dtype(self._nid2partid) == F.int64, \
'the node map must be stored in an integer array'
self._eid2partid = F.tensor(edge_map)
assert F.dtype(self._eid2partid) == F.int64, \
'the edge map must be stored in an integer array'
# Get meta data of the partition book.
self._partition_meta_data = []
_, nid_count = np.unique(F.asnumpy(self._nid2partid), return_counts=True)
_, eid_count = np.unique(F.asnumpy(self._eid2partid), return_counts=True)
for partid in range(self._num_partitions):
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(nid_count[partid])
part_info['num_edges'] = int(eid_count[partid])
self._partition_meta_data.append(part_info)
# Get partid2nids
self._partid2nids = []
sorted_nid = F.tensor(np.argsort(F.asnumpy(self._nid2partid)))
start = 0
for offset in nid_count:
part_nids = sorted_nid[start:start+offset]
start += offset
self._partid2nids.append(part_nids)
# Get partid2eids
self._partid2eids = []
sorted_eid = F.tensor(np.argsort(F.asnumpy(self._eid2partid)))
start = 0
for offset in eid_count:
part_eids = sorted_eid[start:start+offset]
start += offset
self._partid2eids.append(part_eids)
# Get nidg2l
self._nidg2l = [None] * self._num_partitions
global_id = part_graph.ndata[NID]
max_global_id = np.amax(F.asnumpy(global_id))
# TODO(chao): support int32 index
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._nidg2l[self._part_id] = g2l
# Get eidg2l
self._eidg2l = [None] * self._num_partitions
global_id = part_graph.edata[EID]
max_global_id = np.amax(F.asnumpy(global_id))
# TODO(chao): support int32 index
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._eidg2l[self._part_id] = g2l
# node size and edge size
self._edge_size = len(self.partid2eids(self._part_id))
self._node_size = len(self.partid2nids(self._part_id))
def shared_memory(self, graph_name):
"""Move data to shared memory.
"""
self._meta, self._nid2partid, self._eid2partid = _move_metadata_to_shared_mem(
graph_name, self._num_nodes(), self._num_edges(), self._part_id, self._num_partitions,
self._nid2partid, self._eid2partid, False)
def num_partitions(self):
"""Return the number of partitions.
"""
return self._num_partitions
def metadata(self):
"""Return the partition meta data.
"""
return self._partition_meta_data
def _num_nodes(self, ntype='_N'):
""" The total number of nodes
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return len(self._nid2partid)
def _num_edges(self, etype='_E'):
""" The total number of edges
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return len(self._eid2partid)
def map_to_per_ntype(self, ids):
"""Map global homogeneous node IDs to node type IDs.
Returns
type_ids, per_type_ids
"""
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_per_etype(self, ids):
"""Map global homogeneous edge IDs to edge type IDs.
Returns
type_ids, per_type_ids
"""
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_homo_nid(self, ids, ntype):
"""Map per-node-type IDs to global node IDs in the homogeneous format.
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return ids
def map_to_homo_eid(self, ids, etype):
"""Map per-edge-type IDs to global edge IDs in the homoenegeous format.
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return ids
def nid2partid(self, nids, ntype='_N'):
"""From global node IDs to partition IDs
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._nid2partid, nids)
def eid2partid(self, eids, etype='_E'):
"""From global edge IDs to partition IDs
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._eid2partid, eids)
def partid2nids(self, partid, ntype='_N'):
"""From partition id to global node IDs
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return self._partid2nids[partid]
def partid2eids(self, partid, etype='_E'):
"""From partition id to global edge IDs
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return self._partid2eids[partid]
def nid2localnid(self, nids, partid, ntype='_N'):
"""Get local node IDs within the given partition.
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of nid2localnid.')
return F.gather_row(self._nidg2l[partid], nids)
def eid2localeid(self, eids, partid, etype='_E'):
"""Get the local edge ids within the given partition.
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of eid2localeid.')
return F.gather_row(self._eidg2l[partid], eids)
@property
def partid(self):
"""Get the current partition ID
"""
return self._part_id
@property
def ntypes(self):
"""Get the list of node types
"""
return ['_N']
@property
def etypes(self):
"""Get the list of edge types
"""
return ['_E']
class RangePartitionBook(GraphPartitionBook):
"""This partition book supports more efficient storage of partition information.
This partition book is used if the nodes and edges of a graph partition are assigned
with contiguous IDs. It uses very small amount of memory to store the partition
information.
Parameters
----------
part_id : int
partition ID of current partition book
num_parts : int
number of total partitions
node_map : dict[str, Tensor]
Global node ID ranges within partitions for each node type. The key is the node type
name in string. The value is a tensor of shape :math:`(K, 2)`, where :math:`K` is
the number of partitions. Each row has two integers: the starting and the ending IDs
for a particular node type in a partition. For example, all nodes of type ``"T"`` in
partition ``i`` has ID range ``node_map["T"][i][0]`` to ``node_map["T"][i][1]``.
edge_map : dict[str, Tensor]
Global edge ID ranges within partitions for each edge type. The key is the edge type
name in string. The value is a tensor of shape :math:`(K, 2)`, where :math:`K` is
the number of partitions. Each row has two integers: the starting and the ending IDs
for a particular edge type in a partition. For example, all edges of type ``"T"`` in
partition ``i`` has ID range ``edge_map["T"][i][0]`` to ``edge_map["T"][i][1]``.
ntypes : dict[str, int]
map ntype strings to ntype IDs.
etypes : dict[str, int]
map etype strings to etype IDs.
"""
def __init__(self, part_id, num_parts, node_map, edge_map, ntypes, etypes):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._partid = part_id
self._num_partitions = num_parts
self._ntypes = [None] * len(ntypes)
self._etypes = [None] * len(etypes)
for ntype in ntypes:
ntype_id = ntypes[ntype]
self._ntypes[ntype_id] = ntype
assert all([ntype is not None for ntype in self._ntypes]), \
"The node types have invalid IDs."
for etype in etypes:
etype_id = etypes[etype]
self._etypes[etype_id] = etype
assert all([etype is not None for etype in self._etypes]), \
"The edge types have invalid IDs."
# This stores the node ID ranges for each node type in each partition.
# The key is the node type, the value is a NumPy matrix with two columns, in which
# each row indicates the start and the end of the node ID range in a partition.
# The node IDs are global node IDs in the homogeneous representation.
self._typed_nid_range = {}
# This stores the node ID map for per-node-type IDs in each partition.
# The key is the node type, the value is a NumPy vector which indicates
# the last node ID in a partition.
self._typed_max_node_ids = {}
max_node_map = np.zeros((num_parts,), dtype=np.int64)
for key in node_map:
if not isinstance(node_map[key], np.ndarray):
node_map[key] = F.asnumpy(node_map[key])
assert node_map[key].shape == (num_parts, 2)
self._typed_nid_range[key] = node_map[key]
# This is used for per-node-type lookup.
self._typed_max_node_ids[key] = np.cumsum(self._typed_nid_range[key][:, 1]
- self._typed_nid_range[key][:, 0])
# This is used for homogeneous node ID lookup.
max_node_map = np.maximum(self._typed_nid_range[key][:, 1], max_node_map)
# This is a vector that indicates the last node ID in each partition.
# The ID is the global ID in the homogeneous representation.
self._max_node_ids = max_node_map
# Similar to _typed_nid_range.
self._typed_eid_range = {}
# similar to _typed_max_node_ids.
self._typed_max_edge_ids = {}
max_edge_map = np.zeros((num_parts,), dtype=np.int64)
for key in edge_map:
if not isinstance(edge_map[key], np.ndarray):
edge_map[key] = F.asnumpy(edge_map[key])
assert edge_map[key].shape == (num_parts, 2)
self._typed_eid_range[key] = edge_map[key]
# This is used for per-edge-type lookup.
self._typed_max_edge_ids[key] = np.cumsum(self._typed_eid_range[key][:, 1]
- self._typed_eid_range[key][:, 0])
# This is used for homogeneous edge ID lookup.
max_edge_map = np.maximum(self._typed_eid_range[key][:, 1], max_edge_map)
# Similar to _max_node_ids
self._max_edge_ids = max_edge_map
# These two are map functions that map node/edge IDs to node/edge type IDs.
self._nid_map = IdMap(self._typed_nid_range)
self._eid_map = IdMap(self._typed_eid_range)
# Get meta data of the partition book
self._partition_meta_data = []
for partid in range(self._num_partitions):
nrange_start = max_node_map[partid - 1] if partid > 0 else 0
nrange_end = max_node_map[partid]
num_nodes = nrange_end - nrange_start
erange_start = max_edge_map[partid - 1] if partid > 0 else 0
erange_end = max_edge_map[partid]
num_edges = erange_end - erange_start
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(num_nodes)
part_info['num_edges'] = int(num_edges)
self._partition_meta_data.append(part_info)
def shared_memory(self, graph_name):
"""Move data to shared memory.
"""
# we need to store the nid ranges and eid ranges of different types in the order defined
# by type IDs.
nid_range = [None] * len(self.ntypes)
for i, ntype in enumerate(self.ntypes):
nid_range[i] = (ntype, self._typed_nid_range[ntype])
nid_range_pickle = pickle.dumps(nid_range)
nid_range_pickle = [e for e in nid_range_pickle]
eid_range = [None] * len(self.etypes)
for i, etype in enumerate(self.etypes):
eid_range[i] = (etype, self._typed_eid_range[etype])
eid_range_pickle = pickle.dumps(eid_range)
eid_range_pickle = [e for e in eid_range_pickle]
self._meta = _move_metadata_to_shared_mem(graph_name,
0, # We don't need to provide the number of nodes
0, # We don't need to provide the number of edges
self._partid, self._num_partitions,
F.tensor(nid_range_pickle),
F.tensor(eid_range_pickle),
True)
def num_partitions(self):
"""Return the number of partitions.
"""
return self._num_partitions
def _num_nodes(self, ntype='_N'):
""" The total number of nodes
"""
if ntype == '_N':
return int(self._max_node_ids[-1])
else:
return int(self._typed_max_node_ids[ntype][-1])
def _num_edges(self, etype='_E'):
""" The total number of edges
"""
if etype == '_E':
return int(self._max_edge_ids[-1])
else:
return int(self._typed_max_edge_ids[etype][-1])
def metadata(self):
"""Return the partition meta data.
"""
return self._partition_meta_data
def map_to_per_ntype(self, ids):
"""Map global homogeneous node IDs to node type IDs.
Returns
type_ids, per_type_ids
"""
return self._nid_map(ids)
def map_to_per_etype(self, ids):
"""Map global homogeneous edge IDs to edge type IDs.
Returns
type_ids, per_type_ids
"""
return self._eid_map(ids)
def map_to_homo_nid(self, ids, ntype):
"""Map per-node-type IDs to global node IDs in the homogeneous format.
"""
ids = utils.toindex(ids).tousertensor()
partids = self.nid2partid(ids, ntype)
typed_max_nids = F.zerocopy_from_numpy(self._typed_max_node_ids[ntype])
end_diff = F.gather_row(typed_max_nids, partids) - ids
typed_nid_range = F.zerocopy_from_numpy(self._typed_nid_range[ntype][:, 1])
return F.gather_row(typed_nid_range, partids) - end_diff
def map_to_homo_eid(self, ids, etype):
"""Map per-edge-type IDs to global edge IDs in the homoenegeous format.
"""
ids = utils.toindex(ids).tousertensor()
partids = self.eid2partid(ids, etype)
typed_max_eids = F.zerocopy_from_numpy(self._typed_max_edge_ids[etype])
end_diff = F.gather_row(typed_max_eids, partids) - ids
typed_eid_range = F.zerocopy_from_numpy(self._typed_eid_range[etype][:, 1])
return F.gather_row(typed_eid_range, partids) - end_diff
def nid2partid(self, nids, ntype='_N'):
"""From global node IDs to partition IDs
"""
nids = utils.toindex(nids)
if ntype == '_N':
ret = np.searchsorted(self._max_node_ids, nids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_node_ids[ntype], nids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def eid2partid(self, eids, etype='_E'):
"""From global edge IDs to partition IDs
"""
eids = utils.toindex(eids)
if etype == '_E':
ret = np.searchsorted(self._max_edge_ids, eids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_edge_ids[etype], eids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def partid2nids(self, partid, ntype='_N'):
"""From partition ID to global node IDs
"""
# TODO do we need to cache it?
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
end = self._max_node_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
end = self._typed_max_node_ids[ntype][partid]
return F.arange(start, end)
def partid2eids(self, partid, etype='_E'):
"""From partition ID to global edge IDs
"""
# TODO do we need to cache it?
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
end = self._max_edge_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
end = self._typed_max_edge_ids[etype][partid]
return F.arange(start, end)
def nid2localnid(self, nids, partid, ntype='_N'):
"""Get local node IDs within the given partition.
"""
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of nid2localnid.')
nids = utils.toindex(nids)
nids = nids.tousertensor()
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
return nids - int(start)
def eid2localeid(self, eids, partid, etype='_E'):
"""Get the local edge IDs within the given partition.
"""
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of eid2localeid.')
eids = utils.toindex(eids)
eids = eids.tousertensor()
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
return eids - int(start)
@property
def partid(self):
"""Get the current partition ID.
"""
return self._partid
@property
def ntypes(self):
"""Get the list of node types
"""
return self._ntypes
@property
def etypes(self):
"""Get the list of edge types
"""
return self._etypes
NODE_PART_POLICY = 'node'
EDGE_PART_POLICY = 'edge'
class PartitionPolicy(object):
"""This defines a partition policy for a distributed tensor or distributed embedding.
When DGL shards tensors and stores them in a cluster of machines, it requires
partition policies that map rows of the tensors to machines in the cluster.
Although an arbitrary partition policy can be defined, DGL currently supports
two partition policies for mapping nodes and edges to machines. To define a partition
policy from a graph partition book, users need to specify the policy name ('node' or 'edge').
Parameters
----------
policy_str : str
Partition policy name, e.g., 'edge:_E' or 'node:_N'.
partition_book : GraphPartitionBook
A graph partition book
"""
def __init__(self, policy_str, partition_book):
splits = policy_str.split(':')
if len(splits) == 1:
assert policy_str in (EDGE_PART_POLICY, NODE_PART_POLICY), \
'policy_str must contain \'edge\' or \'node\'.'
if NODE_PART_POLICY == policy_str:
policy_str = NODE_PART_POLICY + ":_N"
else:
policy_str = EDGE_PART_POLICY + ":_E"
self._policy_str = policy_str
self._part_id = partition_book.partid
self._partition_book = partition_book
@property
def policy_str(self):
"""Get the policy name
Returns
-------
str
The name of the partition policy.
"""
return self._policy_str
@property
def part_id(self):
"""Get partition ID
Returns
-------
int
The partition ID
"""
return self._part_id
@property
def partition_book(self):
"""Get partition book
Returns
-------
GraphPartitionBook
The graph partition book
"""
return self._partition_book
def get_data_name(self, name):
"""Get HeteroDataName
"""
is_node = NODE_PART_POLICY in self._policy_str
return HeteroDataName(is_node, self._policy_str[5:], name)
def to_local(self, id_tensor):
"""Mapping global ID to local ID.
Parameters
----------
id_tensor : tensor
Gloabl ID tensor
Return
------
tensor
local ID tensor
"""
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2localeid(id_tensor, self._part_id, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2localnid(id_tensor, self._part_id, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def to_partid(self, id_tensor):
"""Mapping global ID to partition ID.
Parameters
----------
id_tensor : tensor
Global ID tensor
Return
------
tensor
partition ID
"""
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2partid(id_tensor, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2partid(id_tensor, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_part_size(self):
"""Get data size of current partition.
Returns
-------
int
data size
"""
if EDGE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2eids(self._part_id, self._policy_str[5:]))
elif NODE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2nids(self._part_id, self._policy_str[5:]))
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_size(self):
"""Get the full size of the data.
Returns
-------
int
data size
"""
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book._num_edges(self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book._num_nodes(self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
class NodePartitionPolicy(PartitionPolicy):
'''Partition policy for nodes.
'''
def __init__(self, partition_book, ntype='_N'):
super(NodePartitionPolicy, self).__init__(NODE_PART_POLICY + ':' + ntype, partition_book)
class EdgePartitionPolicy(PartitionPolicy):
'''Partition policy for edges.
'''
def __init__(self, partition_book, etype='_E'):
super(EdgePartitionPolicy, self).__init__(EDGE_PART_POLICY + ':' + etype, partition_book)
class HeteroDataName(object):
''' The data name in a heterogeneous graph.
A unique data name has three components:
* indicate it's node data or edge data.
* indicate the node/edge type.
* the name of the data.
Parameters
----------
is_node : bool
Indicate whether it's node data or edge data.
entity_type : str
The type of the node/edge.
data_name : str
The name of the data.
'''
def __init__(self, is_node, entity_type, data_name):
self.policy_str = NODE_PART_POLICY if is_node else EDGE_PART_POLICY
self.policy_str = self.policy_str + ':' + entity_type
self.data_name = data_name
def is_node(self):
''' Is this the name of node data
'''
return NODE_PART_POLICY in self.policy_str
def is_edge(self):
''' Is this the name of edge data
'''
return EDGE_PART_POLICY in self.policy_str
def get_type(self):
''' The type of the node/edge.
This is only meaningful in a heterogeneous graph.
In homogeneous graph, type is '_N' for a node and '_E' for an edge.
'''
return self.policy_str[5:]
def get_name(self):
''' The name of the data.
'''
return self.data_name
def __str__(self):
''' The full name of the data.
The full name is used as the key in the KVStore.
'''
return self.policy_str + ':' + self.data_name
def parse_hetero_data_name(name):
'''Parse data name and create HeteroDataName.
The data name has a specialized format. We can parse the name to determine if
it's node data or edge data, node/edge type and its actual name. The data name
has three fields and they are separated by ":".
Parameters
----------
name : str
The data name
Returns
-------
HeteroDataName
'''
names = name.split(':')
assert len(names) == 3, '{} is not a valid heterograph data name'.format(name)
assert names[0] in (NODE_PART_POLICY, EDGE_PART_POLICY), \
'{} is not a valid heterograph data name'.format(name)
return HeteroDataName(names[0] == NODE_PART_POLICY, names[1], names[2])
| 35.540807 | 100 | 0.609266 |
import pickle
from abc import ABC
import numpy as np
from .. import backend as F
from ..base import NID, EID
from .. import utils
from .shared_mem_utils import _to_shared_mem, _get_ndata_path, _get_edata_path, DTYPE_DICT
from .._ffi.ndarray import empty_shared_mem
from ..ndarray import exist_shared_mem_array
from .id_map import IdMap
def _move_metadata_to_shared_mem(graph_name, num_nodes, num_edges, part_id,
num_partitions, node_map, edge_map, is_range_part):
meta = _to_shared_mem(F.tensor([int(is_range_part), num_nodes, num_edges,
num_partitions, part_id,
len(node_map), len(edge_map)]),
_get_ndata_path(graph_name, 'meta'))
node_map = _to_shared_mem(node_map, _get_ndata_path(graph_name, 'node_map'))
edge_map = _to_shared_mem(edge_map, _get_edata_path(graph_name, 'edge_map'))
return meta, node_map, edge_map
def _get_shared_mem_metadata(graph_name):
shape = (7,)
dtype = F.int64
dtype = DTYPE_DICT[dtype]
data = empty_shared_mem(_get_ndata_path(graph_name, 'meta'), False, shape, dtype)
dlpack = data.to_dlpack()
meta = F.asnumpy(F.zerocopy_from_dlpack(dlpack))
is_range_part, _, _, num_partitions, part_id, node_map_len, edge_map_len = meta
data = empty_shared_mem(_get_ndata_path(graph_name, 'node_map'), False, (node_map_len,), dtype)
dlpack = data.to_dlpack()
node_map = F.zerocopy_from_dlpack(dlpack)
data = empty_shared_mem(_get_edata_path(graph_name, 'edge_map'), False, (edge_map_len,), dtype)
dlpack = data.to_dlpack()
edge_map = F.zerocopy_from_dlpack(dlpack)
return is_range_part, part_id, num_partitions, node_map, edge_map
def get_shared_mem_partition_book(graph_name, graph_part):
if not exist_shared_mem_array(_get_ndata_path(graph_name, 'meta')):
return None
is_range_part, part_id, num_parts, node_map_data, edge_map_data = \
_get_shared_mem_metadata(graph_name)
if is_range_part == 1:
node_map = {}
ntypes = {}
node_map_data = pickle.loads(bytes(F.asnumpy(node_map_data).tolist()))
for i, (ntype, nid_range) in enumerate(node_map_data):
ntypes[ntype] = i
node_map[ntype] = nid_range
edge_map = {}
etypes = {}
edge_map_data = pickle.loads(bytes(F.asnumpy(edge_map_data).tolist()))
for i, (etype, eid_range) in enumerate(edge_map_data):
etypes[etype] = i
edge_map[etype] = eid_range
return RangePartitionBook(part_id, num_parts, node_map, edge_map, ntypes, etypes)
else:
return BasicPartitionBook(part_id, num_parts, node_map_data, edge_map_data, graph_part)
class GraphPartitionBook(ABC):
def shared_memory(self, graph_name):
def num_partitions(self):
def metadata(self):
def nid2partid(self, nids, ntype):
def eid2partid(self, eids, etype):
def partid2nids(self, partid, ntype):
def partid2eids(self, partid, etype):
def nid2localnid(self, nids, partid, ntype):
def eid2localeid(self, eids, partid, etype):
@property
def partid(self):
@property
def ntypes(self):
@property
def etypes(self):
def map_to_per_ntype(self, ids):
def map_to_per_etype(self, ids):
def map_to_homo_nid(self, ids, ntype):
def map_to_homo_eid(self, ids, etype):
class BasicPartitionBook(GraphPartitionBook):
def __init__(self, part_id, num_parts, node_map, edge_map, part_graph):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._part_id = int(part_id)
self._num_partitions = int(num_parts)
self._nid2partid = F.tensor(node_map)
assert F.dtype(self._nid2partid) == F.int64, \
'the node map must be stored in an integer array'
self._eid2partid = F.tensor(edge_map)
assert F.dtype(self._eid2partid) == F.int64, \
'the edge map must be stored in an integer array'
self._partition_meta_data = []
_, nid_count = np.unique(F.asnumpy(self._nid2partid), return_counts=True)
_, eid_count = np.unique(F.asnumpy(self._eid2partid), return_counts=True)
for partid in range(self._num_partitions):
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(nid_count[partid])
part_info['num_edges'] = int(eid_count[partid])
self._partition_meta_data.append(part_info)
self._partid2nids = []
sorted_nid = F.tensor(np.argsort(F.asnumpy(self._nid2partid)))
start = 0
for offset in nid_count:
part_nids = sorted_nid[start:start+offset]
start += offset
self._partid2nids.append(part_nids)
self._partid2eids = []
sorted_eid = F.tensor(np.argsort(F.asnumpy(self._eid2partid)))
start = 0
for offset in eid_count:
part_eids = sorted_eid[start:start+offset]
start += offset
self._partid2eids.append(part_eids)
self._nidg2l = [None] * self._num_partitions
global_id = part_graph.ndata[NID]
max_global_id = np.amax(F.asnumpy(global_id))
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._nidg2l[self._part_id] = g2l
self._eidg2l = [None] * self._num_partitions
global_id = part_graph.edata[EID]
max_global_id = np.amax(F.asnumpy(global_id))
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._eidg2l[self._part_id] = g2l
self._edge_size = len(self.partid2eids(self._part_id))
self._node_size = len(self.partid2nids(self._part_id))
def shared_memory(self, graph_name):
self._meta, self._nid2partid, self._eid2partid = _move_metadata_to_shared_mem(
graph_name, self._num_nodes(), self._num_edges(), self._part_id, self._num_partitions,
self._nid2partid, self._eid2partid, False)
def num_partitions(self):
return self._num_partitions
def metadata(self):
return self._partition_meta_data
def _num_nodes(self, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return len(self._nid2partid)
def _num_edges(self, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return len(self._eid2partid)
def map_to_per_ntype(self, ids):
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_per_etype(self, ids):
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_homo_nid(self, ids, ntype):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return ids
def map_to_homo_eid(self, ids, etype):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return ids
def nid2partid(self, nids, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._nid2partid, nids)
def eid2partid(self, eids, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._eid2partid, eids)
def partid2nids(self, partid, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return self._partid2nids[partid]
def partid2eids(self, partid, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return self._partid2eids[partid]
def nid2localnid(self, nids, partid, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of nid2localnid.')
return F.gather_row(self._nidg2l[partid], nids)
def eid2localeid(self, eids, partid, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of eid2localeid.')
return F.gather_row(self._eidg2l[partid], eids)
@property
def partid(self):
return self._part_id
@property
def ntypes(self):
return ['_N']
@property
def etypes(self):
return ['_E']
class RangePartitionBook(GraphPartitionBook):
def __init__(self, part_id, num_parts, node_map, edge_map, ntypes, etypes):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._partid = part_id
self._num_partitions = num_parts
self._ntypes = [None] * len(ntypes)
self._etypes = [None] * len(etypes)
for ntype in ntypes:
ntype_id = ntypes[ntype]
self._ntypes[ntype_id] = ntype
assert all([ntype is not None for ntype in self._ntypes]), \
"The node types have invalid IDs."
for etype in etypes:
etype_id = etypes[etype]
self._etypes[etype_id] = etype
assert all([etype is not None for etype in self._etypes]), \
"The edge types have invalid IDs."
self._typed_nid_range = {}
self._typed_max_node_ids = {}
max_node_map = np.zeros((num_parts,), dtype=np.int64)
for key in node_map:
if not isinstance(node_map[key], np.ndarray):
node_map[key] = F.asnumpy(node_map[key])
assert node_map[key].shape == (num_parts, 2)
self._typed_nid_range[key] = node_map[key]
self._typed_max_node_ids[key] = np.cumsum(self._typed_nid_range[key][:, 1]
- self._typed_nid_range[key][:, 0])
max_node_map = np.maximum(self._typed_nid_range[key][:, 1], max_node_map)
self._max_node_ids = max_node_map
self._typed_eid_range = {}
self._typed_max_edge_ids = {}
max_edge_map = np.zeros((num_parts,), dtype=np.int64)
for key in edge_map:
if not isinstance(edge_map[key], np.ndarray):
edge_map[key] = F.asnumpy(edge_map[key])
assert edge_map[key].shape == (num_parts, 2)
self._typed_eid_range[key] = edge_map[key]
self._typed_max_edge_ids[key] = np.cumsum(self._typed_eid_range[key][:, 1]
- self._typed_eid_range[key][:, 0])
max_edge_map = np.maximum(self._typed_eid_range[key][:, 1], max_edge_map)
self._max_edge_ids = max_edge_map
self._nid_map = IdMap(self._typed_nid_range)
self._eid_map = IdMap(self._typed_eid_range)
self._partition_meta_data = []
for partid in range(self._num_partitions):
nrange_start = max_node_map[partid - 1] if partid > 0 else 0
nrange_end = max_node_map[partid]
num_nodes = nrange_end - nrange_start
erange_start = max_edge_map[partid - 1] if partid > 0 else 0
erange_end = max_edge_map[partid]
num_edges = erange_end - erange_start
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(num_nodes)
part_info['num_edges'] = int(num_edges)
self._partition_meta_data.append(part_info)
def shared_memory(self, graph_name):
nid_range = [None] * len(self.ntypes)
for i, ntype in enumerate(self.ntypes):
nid_range[i] = (ntype, self._typed_nid_range[ntype])
nid_range_pickle = pickle.dumps(nid_range)
nid_range_pickle = [e for e in nid_range_pickle]
eid_range = [None] * len(self.etypes)
for i, etype in enumerate(self.etypes):
eid_range[i] = (etype, self._typed_eid_range[etype])
eid_range_pickle = pickle.dumps(eid_range)
eid_range_pickle = [e for e in eid_range_pickle]
self._meta = _move_metadata_to_shared_mem(graph_name,
0,
0, # We don't need to provide the number of edges
self._partid, self._num_partitions,
F.tensor(nid_range_pickle),
F.tensor(eid_range_pickle),
True)
def num_partitions(self):
return self._num_partitions
def _num_nodes(self, ntype='_N'):
if ntype == '_N':
return int(self._max_node_ids[-1])
else:
return int(self._typed_max_node_ids[ntype][-1])
def _num_edges(self, etype='_E'):
if etype == '_E':
return int(self._max_edge_ids[-1])
else:
return int(self._typed_max_edge_ids[etype][-1])
def metadata(self):
return self._partition_meta_data
def map_to_per_ntype(self, ids):
return self._nid_map(ids)
def map_to_per_etype(self, ids):
return self._eid_map(ids)
def map_to_homo_nid(self, ids, ntype):
ids = utils.toindex(ids).tousertensor()
partids = self.nid2partid(ids, ntype)
typed_max_nids = F.zerocopy_from_numpy(self._typed_max_node_ids[ntype])
end_diff = F.gather_row(typed_max_nids, partids) - ids
typed_nid_range = F.zerocopy_from_numpy(self._typed_nid_range[ntype][:, 1])
return F.gather_row(typed_nid_range, partids) - end_diff
def map_to_homo_eid(self, ids, etype):
ids = utils.toindex(ids).tousertensor()
partids = self.eid2partid(ids, etype)
typed_max_eids = F.zerocopy_from_numpy(self._typed_max_edge_ids[etype])
end_diff = F.gather_row(typed_max_eids, partids) - ids
typed_eid_range = F.zerocopy_from_numpy(self._typed_eid_range[etype][:, 1])
return F.gather_row(typed_eid_range, partids) - end_diff
def nid2partid(self, nids, ntype='_N'):
nids = utils.toindex(nids)
if ntype == '_N':
ret = np.searchsorted(self._max_node_ids, nids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_node_ids[ntype], nids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def eid2partid(self, eids, etype='_E'):
eids = utils.toindex(eids)
if etype == '_E':
ret = np.searchsorted(self._max_edge_ids, eids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_edge_ids[etype], eids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def partid2nids(self, partid, ntype='_N'):
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
end = self._max_node_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
end = self._typed_max_node_ids[ntype][partid]
return F.arange(start, end)
def partid2eids(self, partid, etype='_E'):
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
end = self._max_edge_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
end = self._typed_max_edge_ids[etype][partid]
return F.arange(start, end)
def nid2localnid(self, nids, partid, ntype='_N'):
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of nid2localnid.')
nids = utils.toindex(nids)
nids = nids.tousertensor()
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
return nids - int(start)
def eid2localeid(self, eids, partid, etype='_E'):
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of eid2localeid.')
eids = utils.toindex(eids)
eids = eids.tousertensor()
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
return eids - int(start)
@property
def partid(self):
return self._partid
@property
def ntypes(self):
return self._ntypes
@property
def etypes(self):
return self._etypes
NODE_PART_POLICY = 'node'
EDGE_PART_POLICY = 'edge'
class PartitionPolicy(object):
def __init__(self, policy_str, partition_book):
splits = policy_str.split(':')
if len(splits) == 1:
assert policy_str in (EDGE_PART_POLICY, NODE_PART_POLICY), \
'policy_str must contain \'edge\' or \'node\'.'
if NODE_PART_POLICY == policy_str:
policy_str = NODE_PART_POLICY + ":_N"
else:
policy_str = EDGE_PART_POLICY + ":_E"
self._policy_str = policy_str
self._part_id = partition_book.partid
self._partition_book = partition_book
@property
def policy_str(self):
return self._policy_str
@property
def part_id(self):
return self._part_id
@property
def partition_book(self):
return self._partition_book
def get_data_name(self, name):
is_node = NODE_PART_POLICY in self._policy_str
return HeteroDataName(is_node, self._policy_str[5:], name)
def to_local(self, id_tensor):
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2localeid(id_tensor, self._part_id, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2localnid(id_tensor, self._part_id, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def to_partid(self, id_tensor):
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2partid(id_tensor, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2partid(id_tensor, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_part_size(self):
if EDGE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2eids(self._part_id, self._policy_str[5:]))
elif NODE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2nids(self._part_id, self._policy_str[5:]))
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_size(self):
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book._num_edges(self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book._num_nodes(self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
class NodePartitionPolicy(PartitionPolicy):
def __init__(self, partition_book, ntype='_N'):
super(NodePartitionPolicy, self).__init__(NODE_PART_POLICY + ':' + ntype, partition_book)
class EdgePartitionPolicy(PartitionPolicy):
def __init__(self, partition_book, etype='_E'):
super(EdgePartitionPolicy, self).__init__(EDGE_PART_POLICY + ':' + etype, partition_book)
class HeteroDataName(object):
def __init__(self, is_node, entity_type, data_name):
self.policy_str = NODE_PART_POLICY if is_node else EDGE_PART_POLICY
self.policy_str = self.policy_str + ':' + entity_type
self.data_name = data_name
def is_node(self):
return NODE_PART_POLICY in self.policy_str
def is_edge(self):
return EDGE_PART_POLICY in self.policy_str
def get_type(self):
return self.policy_str[5:]
def get_name(self):
return self.data_name
def __str__(self):
return self.policy_str + ':' + self.data_name
def parse_hetero_data_name(name):
names = name.split(':')
assert len(names) == 3, '{} is not a valid heterograph data name'.format(name)
assert names[0] in (NODE_PART_POLICY, EDGE_PART_POLICY), \
'{} is not a valid heterograph data name'.format(name)
return HeteroDataName(names[0] == NODE_PART_POLICY, names[1], names[2])
| true | true |
f720df0b58abbc375a8a7a17d5d8da4f91638bcc | 53,237 | py | Python | ecl/tests/unit/test_resource.py | keiichi-hikita/eclsdk | c43afb982fd54eb1875cdc22d46044644d804c4a | [
"Apache-2.0"
] | 5 | 2017-04-07T06:23:04.000Z | 2019-11-19T00:52:34.000Z | ecl/tests/unit/test_resource.py | keiichi-hikita/eclsdk | c43afb982fd54eb1875cdc22d46044644d804c4a | [
"Apache-2.0"
] | 16 | 2018-09-12T11:14:40.000Z | 2021-04-19T09:02:44.000Z | ecl/tests/unit/test_resource.py | keiichi-hikita/eclsdk | c43afb982fd54eb1875cdc22d46044644d804c4a | [
"Apache-2.0"
] | 14 | 2017-05-11T14:26:26.000Z | 2021-07-14T14:00:06.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import os
from keystoneauth1 import session
import mock
import requests
from testtools import matchers
from ecl import exceptions
from ecl import format
from ecl import resource
from ecl.tests.unit import base
from ecl import utils
fake_parent = 'robert'
fake_name = 'rey'
fake_id = 99
fake_attr1 = 'lana'
fake_attr2 = 'del'
fake_resource = 'fake'
fake_resources = 'fakes'
fake_arguments = {'parent_name': fake_parent}
fake_base_path = '/fakes/%(parent_name)s/data'
fake_path = '/fakes/rey/data'
fake_data = {'id': fake_id,
'enabled': True,
'name': fake_name,
'parent': fake_parent,
'attr1': fake_attr1,
'attr2': fake_attr2,
'status': None}
fake_body = {fake_resource: fake_data}
class FakeParent(resource.Resource):
id_attribute = "name"
name = resource.prop('name')
class FakeResource(resource.Resource):
resource_key = fake_resource
resources_key = fake_resources
base_path = fake_base_path
allow_create = allow_retrieve = allow_update = True
allow_delete = allow_list = allow_head = True
enabled = resource.prop('enabled', type=format.BoolStr)
name = resource.prop('name')
parent = resource.prop('parent_name')
first = resource.prop('attr1')
second = resource.prop('attr2')
third = resource.prop('attr3', alias='attr_three')
status = resource.prop('status')
class FakeResourceNoKeys(FakeResource):
resource_key = None
resources_key = None
class PropTests(base.TestCase):
def test_with_alias_and_type(self):
class Test(resource.Resource):
attr = resource.prop("attr1", alias="attr2", type=bool)
t = Test(attrs={"attr2": 500})
# Don't test with assertTrue because 500 evaluates to True.
# Need to test that bool(500) happened and attr2 *is* True.
self.assertIs(t.attr, True)
def test_defaults(self):
new_default = "new_default"
class Test(resource.Resource):
attr1 = resource.prop("attr1")
attr2 = resource.prop("attr2", default=new_default)
t = Test()
self.assertIsNone(t.attr1)
self.assertEqual(new_default, t.attr2)
# When the default value is passed in, it is left untouched.
# Check that attr2 is literally the same object we set as default.
t.attr2 = new_default
self.assertIs(new_default, t.attr2)
not_default = 'not default'
t2 = Test({'attr2': not_default})
self.assertEqual(not_default, t2.attr2)
# Assert that if the default is passed in, it overrides the previously
# set value (bug #1425996)
t2.attr2 = new_default
self.assertEqual(new_default, t2.attr2)
def test_get_without_instance(self):
self.assertIsNone(FakeResource.name)
def test_set_ValueError(self):
class Test(resource.Resource):
attr = resource.prop("attr", type=int)
t = Test()
def should_raise():
t.attr = "this is not an int"
self.assertThat(should_raise, matchers.raises(ValueError))
def test_set_TypeError(self):
class Type(object):
def __init__(self):
pass
class Test(resource.Resource):
attr = resource.prop("attr", type=Type)
t = Test()
def should_raise():
t.attr = "this type takes no args"
self.assertThat(should_raise, matchers.raises(TypeError))
def test_resource_type(self):
class FakestResource(resource.Resource):
shortstop = resource.prop("shortstop", type=FakeResource)
third_base = resource.prop("third_base", type=FakeResource)
sot = FakestResource()
id1 = "Ernie Banks"
id2 = "Ron Santo"
sot.shortstop = id1
sot.third_base = id2
resource1 = FakeResource.new(id=id1)
self.assertEqual(resource1, sot.shortstop)
self.assertEqual(id1, sot.shortstop.id)
self.assertEqual(FakeResource, type(sot.shortstop))
resource2 = FakeResource.new(id=id2)
self.assertEqual(resource2, sot.third_base)
self.assertEqual(id2, sot.third_base.id)
self.assertEqual(FakeResource, type(sot.third_base))
sot2 = FakestResource()
sot2.shortstop = resource1
sot2.third_base = resource2
self.assertEqual(resource1, sot2.shortstop)
self.assertEqual(id1, sot2.shortstop.id)
self.assertEqual(FakeResource, type(sot2.shortstop))
self.assertEqual(resource2, sot2.third_base)
self.assertEqual(id2, sot2.third_base.id)
self.assertEqual(FakeResource, type(sot2.third_base))
body = {
"shortstop": id1,
"third_base": id2
}
sot3 = FakestResource(body)
self.assertEqual(FakeResource({"id": id1}), sot3.shortstop)
self.assertEqual(FakeResource({"id": id2}), sot3.third_base)
def test_set_alias_same_name(self):
class Test(resource.Resource):
attr = resource.prop("something", alias="attr")
val = "hey"
args = {"something": val}
sot = Test(args)
self.assertEqual(val, sot._attrs["something"])
self.assertEqual(val, sot.attr)
def test_property_is_none(self):
class Test(resource.Resource):
attr = resource.prop("something", type=dict)
args = {"something": None}
sot = Test(args)
self.assertIsNone(sot._attrs["something"])
self.assertIsNone(sot.attr)
class HeaderTests(base.TestCase):
class Test(resource.Resource):
base_path = "/ramones"
service = "punk"
allow_create = True
allow_update = True
hey = resource.header("vocals")
ho = resource.header("guitar")
letsgo = resource.header("bass")
def test_get(self):
val = "joey"
args = {"vocals": val}
sot = HeaderTests.Test({'headers': args})
self.assertEqual(val, sot.hey)
self.assertIsNone(sot.ho)
self.assertIsNone(sot.letsgo)
def test_set_new(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
def test_set_old(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.letsgo = "cj"
self.assertEqual("cj", sot.letsgo)
self.assertTrue(sot.is_dirty)
def test_set_brand_new(self):
sot = HeaderTests.Test({'headers': {}})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
self.assertEqual({'headers': {"guitar": "johnny"}}, sot)
def test_1428342(self):
sot = HeaderTests.Test({'headers':
requests.structures.CaseInsensitiveDict()})
self.assertIsNone(sot.hey)
def test_create_update_headers(self):
sot = HeaderTests.Test()
sot._reset_dirty()
sot.ho = "johnny"
sot.letsgo = "deedee"
response = mock.Mock()
response_body = {'id': 1}
response.json = mock.Mock(return_value=response_body)
response.headers = None
sess = mock.Mock()
sess.post = mock.Mock(return_value=response)
sess.put = mock.Mock(return_value=response)
sot.create(sess)
headers = {'guitar': 'johnny', 'bass': 'deedee'}
sess.post.assert_called_with(HeaderTests.Test.base_path,
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
sot['id'] = 1
sot.letsgo = "cj"
headers = {'guitar': 'johnny', 'bass': 'cj'}
sot.update(sess)
sess.put.assert_called_with('ramones/1',
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
class ResourceTests(base.TestCase):
def setUp(self):
super(ResourceTests, self).setUp()
self.session = mock.Mock(spec=session.Session)
self.session.get_filter = mock.Mock(return_value={})
def assertCalledURL(self, method, url):
# call_args gives a tuple of *args and tuple of **kwargs.
# Check that the first arg in *args (the URL) has our url.
self.assertEqual(method.call_args[0][0], url)
def test_empty_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get.return_value = resp
obj = FakeResource.new(**fake_arguments)
self.assertEqual(obj, obj.get(self.session))
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_not_allowed(self):
class Nope(resource.Resource):
allow_create = allow_retrieve = allow_update = False
allow_delete = allow_list = allow_head = False
nope = Nope()
def cant_create():
nope.create_by_id(1, 2)
def cant_retrieve():
nope.get_data_by_id(1, 2)
def cant_update():
nope.update_by_id(1, 2, 3)
def cant_delete():
nope.delete_by_id(1, 2)
def cant_list():
for i in nope.list(1):
pass
def cant_head():
nope.head_data_by_id(1, 2)
self.assertThat(cant_create,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_retrieve,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_update,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_delete,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_list,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_head,
matchers.raises(exceptions.MethodNotSupported))
def _test_create_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.put = mock.Mock(return_value=response)
sess.post = mock.Mock(return_value=response)
resp = FakeResource2.create_by_id(sess, attrs)
self.assertEqual(expected_resp, resp)
sess.post.assert_called_with(FakeResource2.base_path,
endpoint_filter=FakeResource2.service,
json=json_body)
r_id = "my_id"
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.create_by_id(sess, attrs, path_args=path_args)
self.assertEqual(response_value, resp)
sess.post.assert_called_with(FakeResource2.base_path % path_args,
endpoint_filter=FakeResource2.service,
json=json_body)
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_create_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def test_create_with_response_headers(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_create_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_body
json_body = {key: attrs}
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def _test_get_data_by_id(self, key, response_value, response_body):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
sess = mock.Mock()
sess.get = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service)
def test_get_data_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
self._test_get_data_by_id(key, response_value, response_body)
def test_get_data_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
self._test_get_data_by_id(key, response_value, response_body)
def _test_head_data_by_id(self, key, response_value):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.headers = response_value
sess = mock.Mock()
sess.head = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_head_data_without_resource_key(self):
key = None
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def test_head_data_with_resource_key(self):
key = "my_key"
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def _test_update_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
patch_update = True
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.patch = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.update_by_id(sess, r_id, attrs)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.update_by_id(sess, r_id, attrs,
path_args=path_args)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_update_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_value
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_response_headers(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_delete_by_id(self):
class FakeResource2(FakeResource):
service = "my_service"
sess = mock.Mock()
sess.delete = mock.Mock(return_value=None)
r_id = "my_id"
resp = FakeResource2.delete_by_id(sess, r_id)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.delete_by_id(sess, r_id, path_args=path_args)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_create(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.post = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify create refreshes all attributes from response.
obj = FakeResource.new(parent_name=fake_parent,
name=fake_name,
enabled=True,
attr1=fake_attr1)
self.assertEqual(obj, obj.create(self.session))
self.assertFalse(obj.is_dirty)
last_req = self.session.post.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(4, len(last_req))
self.assertTrue(last_req['enabled'])
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_get(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.get = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify get refreshes all attributes from response.
obj = FakeResource.from_id(str(fake_id))
obj['parent_name'] = fake_parent
self.assertEqual(obj, obj.get(self.session))
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertIsNone(obj.location)
def test_get_by_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
obj = FakeResource.get_by_id(self.session, fake_id,
path_args=fake_arguments)
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_get_by_id_with_headers(self):
header1 = "fake-value1"
header2 = "fake-value2"
headers = {"header1": header1,
"header2": header2}
resp = mock.Mock(headers=headers)
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
obj = FakeResource2.get_by_id(self.session, fake_id,
path_args=fake_arguments,
include_headers=True)
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(header1, obj['headers']['header1'])
self.assertEqual(header2, obj['headers']['header2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(header1, obj.header1)
self.assertEqual(header2, obj.header2)
def test_head_by_id(self):
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
resp = mock.Mock(headers={"header1": "one", "header2": "two"})
self.session.head = mock.Mock(return_value=resp)
obj = FakeResource2.head_by_id(self.session, fake_id,
path_args=fake_arguments)
self.assertCalledURL(self.session.head,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual('one', obj['headers']['header1'])
self.assertEqual('two', obj['headers']['header2'])
self.assertEqual('one', obj.header1)
self.assertEqual('two', obj.header2)
def test_patch_update(self):
class FakeResourcePatch(FakeResource):
patch_update = True
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.patch = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePatch.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.patch,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.patch.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_put_update(self):
class FakeResourcePut(FakeResource):
# This is False by default, but explicit for this test.
patch_update = False
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.put = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePut.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.put,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.put.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_update_early_exit(self):
obj = FakeResource()
obj._dirty = [] # Bail out early if there's nothing to update.
self.assertIsNone(obj.update("session"))
def test_update_no_id_attribute(self):
obj = FakeResource.existing(id=1, attr="value1",
parent_name=fake_parent)
obj.first = "value2" # Make it dirty
obj.update_by_id = mock.Mock(return_value=dict())
# If no id_attribute is returned in the update response, make sure
# we handle the resulting KeyError.
self.assertEqual(obj, obj.update("session"))
def test_delete(self):
obj = FakeResource({"id": fake_id, "parent_name": fake_parent})
obj.delete(self.session)
self.assertCalledURL(self.session.delete,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
def _test_list(self, resource_class):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
if resource_class.resources_key is not None:
body = {resource_class.resources_key:
self._get_expected_results()}
sentinel = {resource_class.resources_key: []}
else:
body = self._get_expected_results()
sentinel = []
resp1 = mock.Mock()
resp1.json = mock.Mock(return_value=body)
resp2 = mock.Mock()
resp2.json = mock.Mock(return_value=sentinel)
self.session.get.side_effect = [resp1, resp2]
objs = list(resource_class.list(self.session, path_args=fake_arguments,
paginated=True))
params = {'limit': 3, 'marker': results[-1]['id']}
self.assertEqual(params, self.session.get.call_args[1]['params'])
self.assertEqual(3, len(objs))
for obj in objs:
self.assertIn(obj.id, range(fake_id, fake_id + 3))
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_name, obj.name)
self.assertIsInstance(obj, FakeResource)
def _get_expected_results(self):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
return results
def test_list_keyed_resource(self):
self._test_list(FakeResource)
def test_list_non_keyed_resource(self):
self._test_list(FakeResourceNoKeys)
def _test_list_call_count(self, paginated):
# Test that we've only made one call to receive all data
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
resp = mock.Mock()
resp.json = mock.Mock(return_value={fake_resources: results})
attrs = {"get.return_value": resp}
session = mock.Mock(**attrs)
list(FakeResource.list(session, params={'limit': len(results) + 1},
path_args=fake_arguments,
paginated=paginated))
# Ensure we only made one call to complete this.
self.assertEqual(1, session.get.call_count)
def test_list_bail_out(self):
# When we get less data than limit, make sure we made one call
self._test_list_call_count(True)
def test_list_nonpaginated(self):
# When we call with paginated=False, make sure we made one call
self._test_list_call_count(False)
def test_determine_limit(self):
full_page = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
last_page = [fake_data.copy()]
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: full_page}
full_response.json = mock.Mock(return_value=response_body)
last_response = mock.Mock()
response_body = {FakeResource.resources_key: last_page}
last_response.json = mock.Mock(return_value=response_body)
pages = [full_response, full_response, last_response]
session.get.side_effect = pages
# Don't specify a limit. Resource.list will determine the limit
# is 3 based on the first `full_page`.
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(full_page + full_page + last_page), len(results))
def test_empty_list(self):
page = []
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: page}
full_response.json = mock.Mock(return_value=response_body)
pages = [full_response]
session.get.side_effect = pages
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(page), len(results))
def test_attrs_name(self):
obj = FakeResource()
self.assertIsNone(obj.name)
del obj.name
def test_to_dict(self):
kwargs = {
'enabled': True,
'name': 'FOO',
'parent': 'dad',
'attr1': 'BAR',
'attr2': ['ZOO', 'BAZ'],
'status': 'Active',
'headers': {
'key': 'value'
}
}
obj = FakeResource(kwargs)
res = obj.to_dict()
self.assertIsInstance(res, dict)
self.assertTrue(res['enabled'])
self.assertEqual('FOO', res['name'])
self.assertEqual('dad', res['parent'])
self.assertEqual('BAR', res['attr1'])
self.assertEqual(['ZOO', 'BAZ'], res['attr2'])
self.assertEqual('Active', res['status'])
self.assertNotIn('headers', res)
def test_composite_attr_happy(self):
obj = FakeResource.existing(**{'attr3': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found as expected")
def test_composite_attr_fallback(self):
obj = FakeResource.existing(**{'attr_three': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found in fallback as expected")
def test_id_del(self):
class Test(resource.Resource):
id_attribute = "my_id"
attrs = {"my_id": 100}
t = Test(attrs=attrs)
self.assertEqual(attrs["my_id"], t.id)
del t.id
self.assertTrue(Test.id_attribute not in t._attrs)
def test_from_name_with_name(self):
name = "Ernie Banks"
obj = FakeResource.from_name(name)
self.assertEqual(name, obj.name)
def test_from_id_with_name(self):
name = "Sandy Koufax"
obj = FakeResource.from_id(name)
self.assertEqual(name, obj.id)
def test_from_id_with_object(self):
name = "Mickey Mantle"
obj = FakeResource.new(name=name)
new_obj = FakeResource.from_id(obj)
self.assertIs(new_obj, obj)
self.assertEqual(obj.name, new_obj.name)
def test_from_id_with_bad_value(self):
def should_raise():
FakeResource.from_id(3.14)
self.assertThat(should_raise, matchers.raises(ValueError))
def test_dirty_list(self):
class Test(resource.Resource):
attr = resource.prop("attr")
# Check if dirty after setting by prop
sot1 = Test()
self.assertFalse(sot1.is_dirty)
sot1.attr = 1
self.assertTrue(sot1.is_dirty)
# Check if dirty after setting by mapping
sot2 = Test()
sot2["attr"] = 1
self.assertTrue(sot1.is_dirty)
# Check if dirty after creation
sot3 = Test({"attr": 1})
self.assertTrue(sot3.is_dirty)
def test_update_attrs(self):
class Test(resource.Resource):
moe = resource.prop("the-attr")
larry = resource.prop("the-attr2")
curly = resource.prop("the-attr3", type=int)
shemp = resource.prop("the-attr4")
value1 = "one"
value2 = "two"
value3 = "3"
value4 = "fore"
value5 = "fiver"
sot = Test({"the-attr": value1})
sot.update_attrs({"the-attr2": value2, "notprop": value4})
self.assertTrue(sot.is_dirty)
self.assertEqual(value1, sot.moe)
self.assertEqual(value1, sot["the-attr"])
self.assertEqual(value2, sot.larry)
self.assertEqual(value4, sot.notprop)
sot._reset_dirty()
sot.update_attrs(curly=value3)
self.assertTrue(sot.is_dirty)
self.assertEqual(int, type(sot.curly))
self.assertEqual(int(value3), sot.curly)
sot._reset_dirty()
sot.update_attrs(**{"the-attr4": value5})
self.assertTrue(sot.is_dirty)
self.assertEqual(value5, sot.shemp)
def test_get_id(self):
class Test(resource.Resource):
pass
ID = "an id"
res = Test({"id": ID})
self.assertEqual(ID, resource.Resource.get_id(ID))
self.assertEqual(ID, resource.Resource.get_id(res))
def test_convert_ids(self):
class TestResourceFoo(resource.Resource):
pass
class TestResourceBar(resource.Resource):
pass
resfoo = TestResourceFoo({'id': 'FAKEFOO'})
resbar = TestResourceBar({'id': 'FAKEBAR'})
self.assertIsNone(resource.Resource.convert_ids(None))
attrs = {
'key1': 'value1'
}
self.assertEqual(attrs, resource.Resource.convert_ids(attrs))
attrs = {
'foo': resfoo,
'bar': resbar,
'other': 'whatever',
}
res = resource.Resource.convert_ids(attrs)
self.assertEqual('FAKEFOO', res['foo'])
self.assertEqual('FAKEBAR', res['bar'])
self.assertEqual('whatever', res['other'])
def test_repr(self):
fr = FakeResource()
fr._loaded = False
fr.first = "hey"
fr.second = "hi"
fr.third = "nah"
the_repr = repr(fr)
the_repr = the_repr.replace('ecl.tests.unit.test_resource.', '')
result = eval(the_repr)
self.assertEqual(fr._loaded, result._loaded)
self.assertEqual(fr.first, result.first)
self.assertEqual(fr.second, result.second)
self.assertEqual(fr.third, result.third)
def test_id_attribute(self):
faker = FakeResource(fake_data)
self.assertEqual(fake_id, faker.id)
faker.id_attribute = 'name'
self.assertEqual(fake_name, faker.id)
faker.id_attribute = 'attr1'
self.assertEqual(fake_attr1, faker.id)
faker.id_attribute = 'attr2'
self.assertEqual(fake_attr2, faker.id)
faker.id_attribute = 'id'
self.assertEqual(fake_id, faker.id)
def test_name_attribute(self):
class Person_ES(resource.Resource):
name_attribute = "nombre"
nombre = resource.prop('nombre')
name = "Brian"
args = {'nombre': name}
person = Person_ES(args)
self.assertEqual(name, person.nombre)
self.assertEqual(name, person.name)
new_name = "Julien"
person.name = new_name
self.assertEqual(new_name, person.nombre)
self.assertEqual(new_name, person.name)
def test_boolstr_prop(self):
faker = FakeResource(fake_data)
self.assertTrue(faker.enabled)
self.assertTrue(faker['enabled'])
faker._attrs['enabled'] = False
self.assertFalse(faker.enabled)
self.assertFalse(faker['enabled'])
# should fail fast
def set_invalid():
faker.enabled = 'INVALID'
self.assertRaises(ValueError, set_invalid)
class ResourceMapping(base.TestCase):
def test__getitem(self):
value = 10
class Test(resource.Resource):
attr = resource.prop("attr")
t = Test(attrs={"attr": value})
self.assertEqual(value, t["attr"])
def test__setitem__existing_item_changed(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__setitem__existing_item_unchanged(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
t._reset_dirty() # Clear dirty list so this checks as unchanged.
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key not in t._dirty)
def test__setitem__new_item(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__delitem__(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
del t[key]
self.assertTrue(key not in t._attrs)
self.assertTrue(key in t._dirty)
def test__len__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
self.assertEqual(len(attrs.keys()), len(t))
def test__iter__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
for attr in t:
self.assertEqual(attrs[attr], t[attr])
def _test_resource_serialization(self, session_method, resource_method):
attr_type = resource.Resource
class Test(resource.Resource):
allow_create = True
attr = resource.prop("attr", type=attr_type)
the_id = 123
sot = Test()
sot.attr = resource.Resource({"id": the_id})
self.assertEqual(attr_type, type(sot.attr))
def fake_call(*args, **kwargs):
attrs = kwargs["json"]
try:
json.dumps(attrs)
except TypeError as e:
self.fail("Unable to serialize _attrs: %s" % e)
resp = mock.Mock()
resp.json = mock.Mock(return_value=attrs)
return resp
session = mock.Mock()
setattr(session, session_method, mock.Mock(side_effect=fake_call))
if resource_method == "create_by_id":
session.create_by_id(session, sot._attrs)
elif resource_method == "update_by_id":
session.update_by_id(session, None, sot._attrs)
def test_create_serializes_resource_types(self):
self._test_resource_serialization("post", "create_by_id")
def test_update_serializes_resource_types(self):
self._test_resource_serialization("patch", "update_by_id")
class FakeResponse(object):
def __init__(self, response):
self.body = response
def json(self):
return self.body
class TestFind(base.TestCase):
NAME = 'matrix'
ID = 'Fishburne'
PROP = 'attribute2'
def setUp(self):
super(TestFind, self).setUp()
self.mock_session = mock.Mock()
self.mock_get = mock.Mock()
self.mock_session.get = self.mock_get
self.matrix = {'id': self.ID, 'name': self.NAME, 'prop': self.PROP}
def test_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
result = FakeResource.find(self.mock_session, self.NAME,
path_args=fake_arguments)
self.assertEqual(self.NAME, result.name)
self.assertEqual(self.PROP, result.prop)
def test_id(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: self.matrix})
]
result = FakeResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
path = "fakes/" + fake_parent + "/data/" + self.ID
self.mock_get.assert_any_call(path, endpoint_filter=None)
def test_id_no_retrieve(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
class NoRetrieveResource(FakeResource):
allow_retrieve = False
result = NoRetrieveResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
def test_dups(self):
dupe = self.matrix.copy()
dupe['id'] = 'different'
self.mock_get.side_effect = [
# Raise a 404 first so we get out of the ID search and into name.
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix, dupe]})
]
self.assertRaises(exceptions.DuplicateResource, FakeResource.find,
self.mock_session, self.NAME)
def test_id_attribute_find(self):
floater = {'ip_address': "127.0.0.1", 'prop': self.PROP}
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: floater})
]
FakeResource.id_attribute = 'ip_address'
FakeResource.id_attribute = 'ip_address'
result = FakeResource.find(self.mock_session, "127.0.0.1",
path_args=fake_arguments)
self.assertEqual("127.0.0.1", result.id)
self.assertEqual(self.PROP, result.prop)
FakeResource.id_attribute = 'id'
p = {'ip_address': "127.0.0.1"}
path = fake_path + "?limit=2"
self.mock_get.called_once_with(path, params=p, endpoint_filter=None)
def test_nada(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_no_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
FakeResource.name_attribute = None
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_nada_not_ignored(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertRaises(exceptions.ResourceNotFound, FakeResource.find,
self.mock_session, self.NAME, ignore_missing=False)
class TestWaitForStatus(base.TestCase):
def __init__(self, *args, **kwargs):
super(TestWaitForStatus, self).__init__(*args, **kwargs)
self.build = FakeResponse(self.body_with_status(fake_body, 'BUILD'))
self.active = FakeResponse(self.body_with_status(fake_body, 'ACTIVE'))
self.error = FakeResponse(self.body_with_status(fake_body, 'ERROR'))
def setUp(self):
super(TestWaitForStatus, self).setUp()
self.sess = mock.Mock()
def body_with_status(self, body, status):
body_copy = copy.deepcopy(body)
body_copy[fake_resource]['status'] = status
return body_copy
def test_wait_for_status_nothing(self):
self.sess.get = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.status = 'ACTIVE'
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
self.assertEqual([], self.sess.get.call_args_list)
def test_wait_for_status(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.active]
sot = FakeResource.new(**fake_data)
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
def test_wait_for_status_timeout(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.build]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_failures(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.error]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceFailure, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_no_status(self):
class FakeResourceNoStatus(resource.Resource):
allow_retrieve = True
sot = FakeResourceNoStatus.new(id=123)
self.assertRaises(AttributeError, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
class TestWaitForDelete(base.TestCase):
def test_wait_for_delete(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock()
sot.get.side_effect = [
sot,
exceptions.NotFoundException()]
self.assertEqual(sot, resource.wait_for_delete(sess, sot, 1, 2))
def test_wait_for_delete_fail(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock(return_value=sot)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_delete,
sess, sot, 1, 2)
| 34.704694 | 79 | 0.608148 |
import copy
import json
import os
from keystoneauth1 import session
import mock
import requests
from testtools import matchers
from ecl import exceptions
from ecl import format
from ecl import resource
from ecl.tests.unit import base
from ecl import utils
fake_parent = 'robert'
fake_name = 'rey'
fake_id = 99
fake_attr1 = 'lana'
fake_attr2 = 'del'
fake_resource = 'fake'
fake_resources = 'fakes'
fake_arguments = {'parent_name': fake_parent}
fake_base_path = '/fakes/%(parent_name)s/data'
fake_path = '/fakes/rey/data'
fake_data = {'id': fake_id,
'enabled': True,
'name': fake_name,
'parent': fake_parent,
'attr1': fake_attr1,
'attr2': fake_attr2,
'status': None}
fake_body = {fake_resource: fake_data}
class FakeParent(resource.Resource):
id_attribute = "name"
name = resource.prop('name')
class FakeResource(resource.Resource):
resource_key = fake_resource
resources_key = fake_resources
base_path = fake_base_path
allow_create = allow_retrieve = allow_update = True
allow_delete = allow_list = allow_head = True
enabled = resource.prop('enabled', type=format.BoolStr)
name = resource.prop('name')
parent = resource.prop('parent_name')
first = resource.prop('attr1')
second = resource.prop('attr2')
third = resource.prop('attr3', alias='attr_three')
status = resource.prop('status')
class FakeResourceNoKeys(FakeResource):
resource_key = None
resources_key = None
class PropTests(base.TestCase):
def test_with_alias_and_type(self):
class Test(resource.Resource):
attr = resource.prop("attr1", alias="attr2", type=bool)
t = Test(attrs={"attr2": 500})
# Need to test that bool(500) happened and attr2 *is* True.
self.assertIs(t.attr, True)
def test_defaults(self):
new_default = "new_default"
class Test(resource.Resource):
attr1 = resource.prop("attr1")
attr2 = resource.prop("attr2", default=new_default)
t = Test()
self.assertIsNone(t.attr1)
self.assertEqual(new_default, t.attr2)
# When the default value is passed in, it is left untouched.
# Check that attr2 is literally the same object we set as default.
t.attr2 = new_default
self.assertIs(new_default, t.attr2)
not_default = 'not default'
t2 = Test({'attr2': not_default})
self.assertEqual(not_default, t2.attr2)
# Assert that if the default is passed in, it overrides the previously
# set value (bug #1425996)
t2.attr2 = new_default
self.assertEqual(new_default, t2.attr2)
def test_get_without_instance(self):
self.assertIsNone(FakeResource.name)
def test_set_ValueError(self):
class Test(resource.Resource):
attr = resource.prop("attr", type=int)
t = Test()
def should_raise():
t.attr = "this is not an int"
self.assertThat(should_raise, matchers.raises(ValueError))
def test_set_TypeError(self):
class Type(object):
def __init__(self):
pass
class Test(resource.Resource):
attr = resource.prop("attr", type=Type)
t = Test()
def should_raise():
t.attr = "this type takes no args"
self.assertThat(should_raise, matchers.raises(TypeError))
def test_resource_type(self):
class FakestResource(resource.Resource):
shortstop = resource.prop("shortstop", type=FakeResource)
third_base = resource.prop("third_base", type=FakeResource)
sot = FakestResource()
id1 = "Ernie Banks"
id2 = "Ron Santo"
sot.shortstop = id1
sot.third_base = id2
resource1 = FakeResource.new(id=id1)
self.assertEqual(resource1, sot.shortstop)
self.assertEqual(id1, sot.shortstop.id)
self.assertEqual(FakeResource, type(sot.shortstop))
resource2 = FakeResource.new(id=id2)
self.assertEqual(resource2, sot.third_base)
self.assertEqual(id2, sot.third_base.id)
self.assertEqual(FakeResource, type(sot.third_base))
sot2 = FakestResource()
sot2.shortstop = resource1
sot2.third_base = resource2
self.assertEqual(resource1, sot2.shortstop)
self.assertEqual(id1, sot2.shortstop.id)
self.assertEqual(FakeResource, type(sot2.shortstop))
self.assertEqual(resource2, sot2.third_base)
self.assertEqual(id2, sot2.third_base.id)
self.assertEqual(FakeResource, type(sot2.third_base))
body = {
"shortstop": id1,
"third_base": id2
}
sot3 = FakestResource(body)
self.assertEqual(FakeResource({"id": id1}), sot3.shortstop)
self.assertEqual(FakeResource({"id": id2}), sot3.third_base)
def test_set_alias_same_name(self):
class Test(resource.Resource):
attr = resource.prop("something", alias="attr")
val = "hey"
args = {"something": val}
sot = Test(args)
self.assertEqual(val, sot._attrs["something"])
self.assertEqual(val, sot.attr)
def test_property_is_none(self):
class Test(resource.Resource):
attr = resource.prop("something", type=dict)
args = {"something": None}
sot = Test(args)
self.assertIsNone(sot._attrs["something"])
self.assertIsNone(sot.attr)
class HeaderTests(base.TestCase):
class Test(resource.Resource):
base_path = "/ramones"
service = "punk"
allow_create = True
allow_update = True
hey = resource.header("vocals")
ho = resource.header("guitar")
letsgo = resource.header("bass")
def test_get(self):
val = "joey"
args = {"vocals": val}
sot = HeaderTests.Test({'headers': args})
self.assertEqual(val, sot.hey)
self.assertIsNone(sot.ho)
self.assertIsNone(sot.letsgo)
def test_set_new(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
def test_set_old(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.letsgo = "cj"
self.assertEqual("cj", sot.letsgo)
self.assertTrue(sot.is_dirty)
def test_set_brand_new(self):
sot = HeaderTests.Test({'headers': {}})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
self.assertEqual({'headers': {"guitar": "johnny"}}, sot)
def test_1428342(self):
sot = HeaderTests.Test({'headers':
requests.structures.CaseInsensitiveDict()})
self.assertIsNone(sot.hey)
def test_create_update_headers(self):
sot = HeaderTests.Test()
sot._reset_dirty()
sot.ho = "johnny"
sot.letsgo = "deedee"
response = mock.Mock()
response_body = {'id': 1}
response.json = mock.Mock(return_value=response_body)
response.headers = None
sess = mock.Mock()
sess.post = mock.Mock(return_value=response)
sess.put = mock.Mock(return_value=response)
sot.create(sess)
headers = {'guitar': 'johnny', 'bass': 'deedee'}
sess.post.assert_called_with(HeaderTests.Test.base_path,
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
sot['id'] = 1
sot.letsgo = "cj"
headers = {'guitar': 'johnny', 'bass': 'cj'}
sot.update(sess)
sess.put.assert_called_with('ramones/1',
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
class ResourceTests(base.TestCase):
def setUp(self):
super(ResourceTests, self).setUp()
self.session = mock.Mock(spec=session.Session)
self.session.get_filter = mock.Mock(return_value={})
def assertCalledURL(self, method, url):
# call_args gives a tuple of *args and tuple of **kwargs.
# Check that the first arg in *args (the URL) has our url.
self.assertEqual(method.call_args[0][0], url)
def test_empty_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get.return_value = resp
obj = FakeResource.new(**fake_arguments)
self.assertEqual(obj, obj.get(self.session))
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_not_allowed(self):
class Nope(resource.Resource):
allow_create = allow_retrieve = allow_update = False
allow_delete = allow_list = allow_head = False
nope = Nope()
def cant_create():
nope.create_by_id(1, 2)
def cant_retrieve():
nope.get_data_by_id(1, 2)
def cant_update():
nope.update_by_id(1, 2, 3)
def cant_delete():
nope.delete_by_id(1, 2)
def cant_list():
for i in nope.list(1):
pass
def cant_head():
nope.head_data_by_id(1, 2)
self.assertThat(cant_create,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_retrieve,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_update,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_delete,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_list,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_head,
matchers.raises(exceptions.MethodNotSupported))
def _test_create_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.put = mock.Mock(return_value=response)
sess.post = mock.Mock(return_value=response)
resp = FakeResource2.create_by_id(sess, attrs)
self.assertEqual(expected_resp, resp)
sess.post.assert_called_with(FakeResource2.base_path,
endpoint_filter=FakeResource2.service,
json=json_body)
r_id = "my_id"
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.create_by_id(sess, attrs, path_args=path_args)
self.assertEqual(response_value, resp)
sess.post.assert_called_with(FakeResource2.base_path % path_args,
endpoint_filter=FakeResource2.service,
json=json_body)
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_create_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def test_create_with_response_headers(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_create_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_body
json_body = {key: attrs}
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def _test_get_data_by_id(self, key, response_value, response_body):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
sess = mock.Mock()
sess.get = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service)
def test_get_data_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
self._test_get_data_by_id(key, response_value, response_body)
def test_get_data_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
self._test_get_data_by_id(key, response_value, response_body)
def _test_head_data_by_id(self, key, response_value):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.headers = response_value
sess = mock.Mock()
sess.head = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_head_data_without_resource_key(self):
key = None
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def test_head_data_with_resource_key(self):
key = "my_key"
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def _test_update_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
patch_update = True
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.patch = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.update_by_id(sess, r_id, attrs)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.update_by_id(sess, r_id, attrs,
path_args=path_args)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_update_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_value
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_response_headers(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_delete_by_id(self):
class FakeResource2(FakeResource):
service = "my_service"
sess = mock.Mock()
sess.delete = mock.Mock(return_value=None)
r_id = "my_id"
resp = FakeResource2.delete_by_id(sess, r_id)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.delete_by_id(sess, r_id, path_args=path_args)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_create(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.post = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify create refreshes all attributes from response.
obj = FakeResource.new(parent_name=fake_parent,
name=fake_name,
enabled=True,
attr1=fake_attr1)
self.assertEqual(obj, obj.create(self.session))
self.assertFalse(obj.is_dirty)
last_req = self.session.post.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(4, len(last_req))
self.assertTrue(last_req['enabled'])
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_get(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.get = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify get refreshes all attributes from response.
obj = FakeResource.from_id(str(fake_id))
obj['parent_name'] = fake_parent
self.assertEqual(obj, obj.get(self.session))
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertIsNone(obj.location)
def test_get_by_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
obj = FakeResource.get_by_id(self.session, fake_id,
path_args=fake_arguments)
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_get_by_id_with_headers(self):
header1 = "fake-value1"
header2 = "fake-value2"
headers = {"header1": header1,
"header2": header2}
resp = mock.Mock(headers=headers)
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
obj = FakeResource2.get_by_id(self.session, fake_id,
path_args=fake_arguments,
include_headers=True)
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(header1, obj['headers']['header1'])
self.assertEqual(header2, obj['headers']['header2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(header1, obj.header1)
self.assertEqual(header2, obj.header2)
def test_head_by_id(self):
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
resp = mock.Mock(headers={"header1": "one", "header2": "two"})
self.session.head = mock.Mock(return_value=resp)
obj = FakeResource2.head_by_id(self.session, fake_id,
path_args=fake_arguments)
self.assertCalledURL(self.session.head,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual('one', obj['headers']['header1'])
self.assertEqual('two', obj['headers']['header2'])
self.assertEqual('one', obj.header1)
self.assertEqual('two', obj.header2)
def test_patch_update(self):
class FakeResourcePatch(FakeResource):
patch_update = True
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.patch = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePatch.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.patch,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.patch.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_put_update(self):
class FakeResourcePut(FakeResource):
# This is False by default, but explicit for this test.
patch_update = False
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.put = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePut.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.put,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.put.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_update_early_exit(self):
obj = FakeResource()
obj._dirty = [] # Bail out early if there's nothing to update.
self.assertIsNone(obj.update("session"))
def test_update_no_id_attribute(self):
obj = FakeResource.existing(id=1, attr="value1",
parent_name=fake_parent)
obj.first = "value2"
obj.update_by_id = mock.Mock(return_value=dict())
self.assertEqual(obj, obj.update("session"))
def test_delete(self):
obj = FakeResource({"id": fake_id, "parent_name": fake_parent})
obj.delete(self.session)
self.assertCalledURL(self.session.delete,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
def _test_list(self, resource_class):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
if resource_class.resources_key is not None:
body = {resource_class.resources_key:
self._get_expected_results()}
sentinel = {resource_class.resources_key: []}
else:
body = self._get_expected_results()
sentinel = []
resp1 = mock.Mock()
resp1.json = mock.Mock(return_value=body)
resp2 = mock.Mock()
resp2.json = mock.Mock(return_value=sentinel)
self.session.get.side_effect = [resp1, resp2]
objs = list(resource_class.list(self.session, path_args=fake_arguments,
paginated=True))
params = {'limit': 3, 'marker': results[-1]['id']}
self.assertEqual(params, self.session.get.call_args[1]['params'])
self.assertEqual(3, len(objs))
for obj in objs:
self.assertIn(obj.id, range(fake_id, fake_id + 3))
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_name, obj.name)
self.assertIsInstance(obj, FakeResource)
def _get_expected_results(self):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
return results
def test_list_keyed_resource(self):
self._test_list(FakeResource)
def test_list_non_keyed_resource(self):
self._test_list(FakeResourceNoKeys)
def _test_list_call_count(self, paginated):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
resp = mock.Mock()
resp.json = mock.Mock(return_value={fake_resources: results})
attrs = {"get.return_value": resp}
session = mock.Mock(**attrs)
list(FakeResource.list(session, params={'limit': len(results) + 1},
path_args=fake_arguments,
paginated=paginated))
# Ensure we only made one call to complete this.
self.assertEqual(1, session.get.call_count)
def test_list_bail_out(self):
# When we get less data than limit, make sure we made one call
self._test_list_call_count(True)
def test_list_nonpaginated(self):
# When we call with paginated=False, make sure we made one call
self._test_list_call_count(False)
def test_determine_limit(self):
full_page = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
last_page = [fake_data.copy()]
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: full_page}
full_response.json = mock.Mock(return_value=response_body)
last_response = mock.Mock()
response_body = {FakeResource.resources_key: last_page}
last_response.json = mock.Mock(return_value=response_body)
pages = [full_response, full_response, last_response]
session.get.side_effect = pages
# Don't specify a limit. Resource.list will determine the limit
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(full_page + full_page + last_page), len(results))
def test_empty_list(self):
page = []
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: page}
full_response.json = mock.Mock(return_value=response_body)
pages = [full_response]
session.get.side_effect = pages
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(page), len(results))
def test_attrs_name(self):
obj = FakeResource()
self.assertIsNone(obj.name)
del obj.name
def test_to_dict(self):
kwargs = {
'enabled': True,
'name': 'FOO',
'parent': 'dad',
'attr1': 'BAR',
'attr2': ['ZOO', 'BAZ'],
'status': 'Active',
'headers': {
'key': 'value'
}
}
obj = FakeResource(kwargs)
res = obj.to_dict()
self.assertIsInstance(res, dict)
self.assertTrue(res['enabled'])
self.assertEqual('FOO', res['name'])
self.assertEqual('dad', res['parent'])
self.assertEqual('BAR', res['attr1'])
self.assertEqual(['ZOO', 'BAZ'], res['attr2'])
self.assertEqual('Active', res['status'])
self.assertNotIn('headers', res)
def test_composite_attr_happy(self):
obj = FakeResource.existing(**{'attr3': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found as expected")
def test_composite_attr_fallback(self):
obj = FakeResource.existing(**{'attr_three': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found in fallback as expected")
def test_id_del(self):
class Test(resource.Resource):
id_attribute = "my_id"
attrs = {"my_id": 100}
t = Test(attrs=attrs)
self.assertEqual(attrs["my_id"], t.id)
del t.id
self.assertTrue(Test.id_attribute not in t._attrs)
def test_from_name_with_name(self):
name = "Ernie Banks"
obj = FakeResource.from_name(name)
self.assertEqual(name, obj.name)
def test_from_id_with_name(self):
name = "Sandy Koufax"
obj = FakeResource.from_id(name)
self.assertEqual(name, obj.id)
def test_from_id_with_object(self):
name = "Mickey Mantle"
obj = FakeResource.new(name=name)
new_obj = FakeResource.from_id(obj)
self.assertIs(new_obj, obj)
self.assertEqual(obj.name, new_obj.name)
def test_from_id_with_bad_value(self):
def should_raise():
FakeResource.from_id(3.14)
self.assertThat(should_raise, matchers.raises(ValueError))
def test_dirty_list(self):
class Test(resource.Resource):
attr = resource.prop("attr")
sot1 = Test()
self.assertFalse(sot1.is_dirty)
sot1.attr = 1
self.assertTrue(sot1.is_dirty)
sot2 = Test()
sot2["attr"] = 1
self.assertTrue(sot1.is_dirty)
sot3 = Test({"attr": 1})
self.assertTrue(sot3.is_dirty)
def test_update_attrs(self):
class Test(resource.Resource):
moe = resource.prop("the-attr")
larry = resource.prop("the-attr2")
curly = resource.prop("the-attr3", type=int)
shemp = resource.prop("the-attr4")
value1 = "one"
value2 = "two"
value3 = "3"
value4 = "fore"
value5 = "fiver"
sot = Test({"the-attr": value1})
sot.update_attrs({"the-attr2": value2, "notprop": value4})
self.assertTrue(sot.is_dirty)
self.assertEqual(value1, sot.moe)
self.assertEqual(value1, sot["the-attr"])
self.assertEqual(value2, sot.larry)
self.assertEqual(value4, sot.notprop)
sot._reset_dirty()
sot.update_attrs(curly=value3)
self.assertTrue(sot.is_dirty)
self.assertEqual(int, type(sot.curly))
self.assertEqual(int(value3), sot.curly)
sot._reset_dirty()
sot.update_attrs(**{"the-attr4": value5})
self.assertTrue(sot.is_dirty)
self.assertEqual(value5, sot.shemp)
def test_get_id(self):
class Test(resource.Resource):
pass
ID = "an id"
res = Test({"id": ID})
self.assertEqual(ID, resource.Resource.get_id(ID))
self.assertEqual(ID, resource.Resource.get_id(res))
def test_convert_ids(self):
class TestResourceFoo(resource.Resource):
pass
class TestResourceBar(resource.Resource):
pass
resfoo = TestResourceFoo({'id': 'FAKEFOO'})
resbar = TestResourceBar({'id': 'FAKEBAR'})
self.assertIsNone(resource.Resource.convert_ids(None))
attrs = {
'key1': 'value1'
}
self.assertEqual(attrs, resource.Resource.convert_ids(attrs))
attrs = {
'foo': resfoo,
'bar': resbar,
'other': 'whatever',
}
res = resource.Resource.convert_ids(attrs)
self.assertEqual('FAKEFOO', res['foo'])
self.assertEqual('FAKEBAR', res['bar'])
self.assertEqual('whatever', res['other'])
def test_repr(self):
fr = FakeResource()
fr._loaded = False
fr.first = "hey"
fr.second = "hi"
fr.third = "nah"
the_repr = repr(fr)
the_repr = the_repr.replace('ecl.tests.unit.test_resource.', '')
result = eval(the_repr)
self.assertEqual(fr._loaded, result._loaded)
self.assertEqual(fr.first, result.first)
self.assertEqual(fr.second, result.second)
self.assertEqual(fr.third, result.third)
def test_id_attribute(self):
faker = FakeResource(fake_data)
self.assertEqual(fake_id, faker.id)
faker.id_attribute = 'name'
self.assertEqual(fake_name, faker.id)
faker.id_attribute = 'attr1'
self.assertEqual(fake_attr1, faker.id)
faker.id_attribute = 'attr2'
self.assertEqual(fake_attr2, faker.id)
faker.id_attribute = 'id'
self.assertEqual(fake_id, faker.id)
def test_name_attribute(self):
class Person_ES(resource.Resource):
name_attribute = "nombre"
nombre = resource.prop('nombre')
name = "Brian"
args = {'nombre': name}
person = Person_ES(args)
self.assertEqual(name, person.nombre)
self.assertEqual(name, person.name)
new_name = "Julien"
person.name = new_name
self.assertEqual(new_name, person.nombre)
self.assertEqual(new_name, person.name)
def test_boolstr_prop(self):
faker = FakeResource(fake_data)
self.assertTrue(faker.enabled)
self.assertTrue(faker['enabled'])
faker._attrs['enabled'] = False
self.assertFalse(faker.enabled)
self.assertFalse(faker['enabled'])
def set_invalid():
faker.enabled = 'INVALID'
self.assertRaises(ValueError, set_invalid)
class ResourceMapping(base.TestCase):
def test__getitem(self):
value = 10
class Test(resource.Resource):
attr = resource.prop("attr")
t = Test(attrs={"attr": value})
self.assertEqual(value, t["attr"])
def test__setitem__existing_item_changed(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__setitem__existing_item_unchanged(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
t._reset_dirty()
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key not in t._dirty)
def test__setitem__new_item(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__delitem__(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
del t[key]
self.assertTrue(key not in t._attrs)
self.assertTrue(key in t._dirty)
def test__len__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
self.assertEqual(len(attrs.keys()), len(t))
def test__iter__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
for attr in t:
self.assertEqual(attrs[attr], t[attr])
def _test_resource_serialization(self, session_method, resource_method):
attr_type = resource.Resource
class Test(resource.Resource):
allow_create = True
attr = resource.prop("attr", type=attr_type)
the_id = 123
sot = Test()
sot.attr = resource.Resource({"id": the_id})
self.assertEqual(attr_type, type(sot.attr))
def fake_call(*args, **kwargs):
attrs = kwargs["json"]
try:
json.dumps(attrs)
except TypeError as e:
self.fail("Unable to serialize _attrs: %s" % e)
resp = mock.Mock()
resp.json = mock.Mock(return_value=attrs)
return resp
session = mock.Mock()
setattr(session, session_method, mock.Mock(side_effect=fake_call))
if resource_method == "create_by_id":
session.create_by_id(session, sot._attrs)
elif resource_method == "update_by_id":
session.update_by_id(session, None, sot._attrs)
def test_create_serializes_resource_types(self):
self._test_resource_serialization("post", "create_by_id")
def test_update_serializes_resource_types(self):
self._test_resource_serialization("patch", "update_by_id")
class FakeResponse(object):
def __init__(self, response):
self.body = response
def json(self):
return self.body
class TestFind(base.TestCase):
NAME = 'matrix'
ID = 'Fishburne'
PROP = 'attribute2'
def setUp(self):
super(TestFind, self).setUp()
self.mock_session = mock.Mock()
self.mock_get = mock.Mock()
self.mock_session.get = self.mock_get
self.matrix = {'id': self.ID, 'name': self.NAME, 'prop': self.PROP}
def test_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
result = FakeResource.find(self.mock_session, self.NAME,
path_args=fake_arguments)
self.assertEqual(self.NAME, result.name)
self.assertEqual(self.PROP, result.prop)
def test_id(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: self.matrix})
]
result = FakeResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
path = "fakes/" + fake_parent + "/data/" + self.ID
self.mock_get.assert_any_call(path, endpoint_filter=None)
def test_id_no_retrieve(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
class NoRetrieveResource(FakeResource):
allow_retrieve = False
result = NoRetrieveResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
def test_dups(self):
dupe = self.matrix.copy()
dupe['id'] = 'different'
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix, dupe]})
]
self.assertRaises(exceptions.DuplicateResource, FakeResource.find,
self.mock_session, self.NAME)
def test_id_attribute_find(self):
floater = {'ip_address': "127.0.0.1", 'prop': self.PROP}
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: floater})
]
FakeResource.id_attribute = 'ip_address'
FakeResource.id_attribute = 'ip_address'
result = FakeResource.find(self.mock_session, "127.0.0.1",
path_args=fake_arguments)
self.assertEqual("127.0.0.1", result.id)
self.assertEqual(self.PROP, result.prop)
FakeResource.id_attribute = 'id'
p = {'ip_address': "127.0.0.1"}
path = fake_path + "?limit=2"
self.mock_get.called_once_with(path, params=p, endpoint_filter=None)
def test_nada(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_no_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
FakeResource.name_attribute = None
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_nada_not_ignored(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertRaises(exceptions.ResourceNotFound, FakeResource.find,
self.mock_session, self.NAME, ignore_missing=False)
class TestWaitForStatus(base.TestCase):
def __init__(self, *args, **kwargs):
super(TestWaitForStatus, self).__init__(*args, **kwargs)
self.build = FakeResponse(self.body_with_status(fake_body, 'BUILD'))
self.active = FakeResponse(self.body_with_status(fake_body, 'ACTIVE'))
self.error = FakeResponse(self.body_with_status(fake_body, 'ERROR'))
def setUp(self):
super(TestWaitForStatus, self).setUp()
self.sess = mock.Mock()
def body_with_status(self, body, status):
body_copy = copy.deepcopy(body)
body_copy[fake_resource]['status'] = status
return body_copy
def test_wait_for_status_nothing(self):
self.sess.get = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.status = 'ACTIVE'
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
self.assertEqual([], self.sess.get.call_args_list)
def test_wait_for_status(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.active]
sot = FakeResource.new(**fake_data)
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
def test_wait_for_status_timeout(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.build]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_failures(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.error]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceFailure, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_no_status(self):
class FakeResourceNoStatus(resource.Resource):
allow_retrieve = True
sot = FakeResourceNoStatus.new(id=123)
self.assertRaises(AttributeError, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
class TestWaitForDelete(base.TestCase):
def test_wait_for_delete(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock()
sot.get.side_effect = [
sot,
exceptions.NotFoundException()]
self.assertEqual(sot, resource.wait_for_delete(sess, sot, 1, 2))
def test_wait_for_delete_fail(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock(return_value=sot)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_delete,
sess, sot, 1, 2)
| true | true |
f720df1f8976d6666a660d614734f5c3010f2b3d | 5,980 | py | Python | deep-learning-for-image-processing-master/pytorch_object_detection/train_coco_dataset/network_files/boxes.py | zpwithme/zzzzpppp | 0f5df647f1e9d6cb8c01b3fc7df25ee543714af3 | [
"MIT"
] | null | null | null | deep-learning-for-image-processing-master/pytorch_object_detection/train_coco_dataset/network_files/boxes.py | zpwithme/zzzzpppp | 0f5df647f1e9d6cb8c01b3fc7df25ee543714af3 | [
"MIT"
] | null | null | null | deep-learning-for-image-processing-master/pytorch_object_detection/train_coco_dataset/network_files/boxes.py | zpwithme/zzzzpppp | 0f5df647f1e9d6cb8c01b3fc7df25ee543714af3 | [
"MIT"
] | 2 | 2021-06-26T16:53:38.000Z | 2021-08-29T22:16:20.000Z | import torch
from typing import Tuple
from torch import Tensor
import torchvision
def nms(boxes, scores, iou_threshold):
# type: (Tensor, Tensor, float) -> Tensor
"""
Performs non-maximum suppression (NMS) on the boxes according
to their intersection-over-union (IoU).
NMS iteratively removes lower scoring boxes which have an
IoU greater than iou_threshold with another (higher scoring)
box.
Parameters
----------
boxes : Tensor[N, 4])
boxes to perform NMS on. They
are expected to be in (x1, y1, x2, y2) format
scores : Tensor[N]
scores for each one of the boxes
iou_threshold : float
discards all overlapping
boxes with IoU < iou_threshold
Returns
-------
keep : Tensor
int64 tensor with the indices
of the elements that have been kept
by NMS, sorted in decreasing order of scores
"""
return torch.ops.torchvision.nms(boxes, scores, iou_threshold)
def batched_nms(boxes, scores, idxs, iou_threshold):
# type: (Tensor, Tensor, Tensor, float) -> Tensor
"""
Performs non-maximum suppression in a batched fashion.
Each index value correspond to a category, and NMS
will not be applied between elements of different categories.
Parameters
----------
boxes : Tensor[N, 4]
boxes where NMS will be performed. They
are expected to be in (x1, y1, x2, y2) format
scores : Tensor[N]
scores for each one of the boxes
idxs : Tensor[N]
indices of the categories for each one of the boxes.
iou_threshold : float
discards all overlapping boxes
with IoU < iou_threshold
Returns
-------
keep : Tensor
int64 tensor with the indices of
the elements that have been kept by NMS, sorted
in decreasing order of scores
"""
if boxes.numel() == 0:
return torch.empty((0,), dtype=torch.int64, device=boxes.device)
# strategy: in order to perform NMS independently per class.
# we add an offset to all the boxes. The offset is dependent
# only on the class idx, and is large enough so that boxes
# from different classes do not overlap
# 获取所有boxes中最大的坐标值(xmin, ymin, xmax, ymax)
max_coordinate = boxes.max()
# to(): Performs Tensor dtype and/or device conversion
# 为每一个类别/每一层生成一个很大的偏移量
# 这里的to只是让生成tensor的dytpe和device与boxes保持一致
offsets = idxs.to(boxes) * (max_coordinate + 1)
# boxes加上对应层的偏移量后,保证不同类别/层之间boxes不会有重合的现象
boxes_for_nms = boxes + offsets[:, None]
keep = nms(boxes_for_nms, scores, iou_threshold)
return keep
def remove_small_boxes(boxes, min_size):
# type: (Tensor, float) -> Tensor
"""
Remove boxes which contains at least one side smaller than min_size.
移除宽高小于指定阈值的索引
Arguments:
boxes (Tensor[N, 4]): boxes in (x1, y1, x2, y2) format
min_size (float): minimum size
Returns:
keep (Tensor[K]): indices of the boxes that have both sides
larger than min_size
"""
ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1] # 预测boxes的宽和高
# keep = (ws >= min_size) & (hs >= min_size) # 当满足宽,高都大于给定阈值时为True
keep = torch.logical_and(torch.ge(ws, min_size), torch.ge(hs, min_size))
# nonzero(): Returns a tensor containing the indices of all non-zero elements of input
# keep = keep.nonzero().squeeze(1)
keep = torch.where(keep)[0]
return keep
def clip_boxes_to_image(boxes, size):
# type: (Tensor, Tuple[int, int]) -> Tensor
"""
Clip boxes so that they lie inside an image of size `size`.
裁剪预测的boxes信息,将越界的坐标调整到图片边界上
Arguments:
boxes (Tensor[N, 4]): boxes in (x1, y1, x2, y2) format
size (Tuple[height, width]): size of the image
Returns:
clipped_boxes (Tensor[N, 4])
"""
dim = boxes.dim()
boxes_x = boxes[..., 0::2] # x1, x2
boxes_y = boxes[..., 1::2] # y1, y2
height, width = size
if torchvision._is_tracing():
boxes_x = torch.max(boxes_x, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_x = torch.min(boxes_x, torch.tensor(width, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.max(boxes_y, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.min(boxes_y, torch.tensor(height, dtype=boxes.dtype, device=boxes.device))
else:
boxes_x = boxes_x.clamp(min=0, max=width) # 限制x坐标范围在[0,width]之间
boxes_y = boxes_y.clamp(min=0, max=height) # 限制y坐标范围在[0,height]之间
clipped_boxes = torch.stack((boxes_x, boxes_y), dim=dim)
return clipped_boxes.reshape(boxes.shape)
def box_area(boxes):
"""
Computes the area of a set of bounding boxes, which are specified by its
(x1, y1, x2, y2) coordinates.
Arguments:
boxes (Tensor[N, 4]): boxes for which the area will be computed. They
are expected to be in (x1, y1, x2, y2) format
Returns:
area (Tensor[N]): area for each box
"""
return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
def box_iou(boxes1, boxes2):
"""
Return intersection-over-union (Jaccard index) of boxes.
Both sets of boxes are expected to be in (x1, y1, x2, y2) format.
Arguments:
boxes1 (Tensor[N, 4])
boxes2 (Tensor[M, 4])
Returns:
iou (Tensor[N, M]): the NxM matrix containing the pairwise
IoU values for every element in boxes1 and boxes2
"""
area1 = box_area(boxes1)
area2 = box_area(boxes2)
# When the shapes do not match,
# the shape of the returned output tensor follows the broadcasting rules
lt = torch.max(boxes1[:, None, :2], boxes2[:, :2]) # left-top [N,M,2]
rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) # right-bottom [N,M,2]
wh = (rb - lt).clamp(min=0) # [N,M,2]
inter = wh[:, :, 0] * wh[:, :, 1] # [N,M]
iou = inter / (area1[:, None] + area2 - inter)
return iou
| 32.857143 | 98 | 0.634783 | import torch
from typing import Tuple
from torch import Tensor
import torchvision
def nms(boxes, scores, iou_threshold):
return torch.ops.torchvision.nms(boxes, scores, iou_threshold)
def batched_nms(boxes, scores, idxs, iou_threshold):
if boxes.numel() == 0:
return torch.empty((0,), dtype=torch.int64, device=boxes.device)
max_coordinate = boxes.max()
offsets = idxs.to(boxes) * (max_coordinate + 1)
boxes_for_nms = boxes + offsets[:, None]
keep = nms(boxes_for_nms, scores, iou_threshold)
return keep
def remove_small_boxes(boxes, min_size):
ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1]
ical_and(torch.ge(ws, min_size), torch.ge(hs, min_size))
keep = torch.where(keep)[0]
return keep
def clip_boxes_to_image(boxes, size):
dim = boxes.dim()
boxes_x = boxes[..., 0::2]
boxes_y = boxes[..., 1::2]
height, width = size
if torchvision._is_tracing():
boxes_x = torch.max(boxes_x, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_x = torch.min(boxes_x, torch.tensor(width, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.max(boxes_y, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.min(boxes_y, torch.tensor(height, dtype=boxes.dtype, device=boxes.device))
else:
boxes_x = boxes_x.clamp(min=0, max=width)
boxes_y = boxes_y.clamp(min=0, max=height)
clipped_boxes = torch.stack((boxes_x, boxes_y), dim=dim)
return clipped_boxes.reshape(boxes.shape)
def box_area(boxes):
return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
def box_iou(boxes1, boxes2):
area1 = box_area(boxes1)
area2 = box_area(boxes2)
lt = torch.max(boxes1[:, None, :2], boxes2[:, :2])
rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:])
wh = (rb - lt).clamp(min=0)
inter = wh[:, :, 0] * wh[:, :, 1]
iou = inter / (area1[:, None] + area2 - inter)
return iou
| true | true |
f720dfa2212e24646fbef26faa5e5bdf2d802ce4 | 14,811 | py | Python | PyObjCTest/test_nsgraphics.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | PyObjCTest/test_nsgraphics.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | PyObjCTest/test_nsgraphics.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | import AppKit
import objc
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSGraphics(TestCase):
def testConstants(self):
self.assertEqual(AppKit.NSCompositeClear, 0)
self.assertEqual(AppKit.NSCompositeCopy, 1)
self.assertEqual(AppKit.NSCompositeSourceOver, 2)
self.assertEqual(AppKit.NSCompositeSourceIn, 3)
self.assertEqual(AppKit.NSCompositeSourceOut, 4)
self.assertEqual(AppKit.NSCompositeSourceAtop, 5)
self.assertEqual(AppKit.NSCompositeDestinationOver, 6)
self.assertEqual(AppKit.NSCompositeDestinationIn, 7)
self.assertEqual(AppKit.NSCompositeDestinationOut, 8)
self.assertEqual(AppKit.NSCompositeDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositeXOR, 10)
self.assertEqual(AppKit.NSCompositePlusDarker, 11)
self.assertEqual(AppKit.NSCompositeHighlight, 12)
self.assertEqual(AppKit.NSCompositePlusLighter, 13)
self.assertEqual(AppKit.NSCompositeMultiply, 14)
self.assertEqual(AppKit.NSCompositeScreen, 15)
self.assertEqual(AppKit.NSCompositeOverlay, 16)
self.assertEqual(AppKit.NSCompositeDarken, 17)
self.assertEqual(AppKit.NSCompositeLighten, 18)
self.assertEqual(AppKit.NSCompositeColorDodge, 19)
self.assertEqual(AppKit.NSCompositeColorBurn, 20)
self.assertEqual(AppKit.NSCompositeSoftLight, 21)
self.assertEqual(AppKit.NSCompositeHardLight, 22)
self.assertEqual(AppKit.NSCompositeDifference, 23)
self.assertEqual(AppKit.NSCompositeExclusion, 24)
self.assertEqual(AppKit.NSCompositeHue, 25)
self.assertEqual(AppKit.NSCompositeSaturation, 26)
self.assertEqual(AppKit.NSCompositeColor, 27)
self.assertEqual(AppKit.NSCompositeLuminosity, 28)
self.assertEqual(AppKit.NSCompositingOperationClear, 0)
self.assertEqual(AppKit.NSCompositingOperationCopy, 1)
self.assertEqual(AppKit.NSCompositingOperationSourceOver, 2)
self.assertEqual(AppKit.NSCompositingOperationSourceIn, 3)
self.assertEqual(AppKit.NSCompositingOperationSourceOut, 4)
self.assertEqual(AppKit.NSCompositingOperationSourceAtop, 5)
self.assertEqual(AppKit.NSCompositingOperationDestinationOver, 6)
self.assertEqual(AppKit.NSCompositingOperationDestinationIn, 7)
self.assertEqual(AppKit.NSCompositingOperationDestinationOut, 8)
self.assertEqual(AppKit.NSCompositingOperationDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositingOperationXOR, 10)
self.assertEqual(AppKit.NSCompositingOperationPlusDarker, 11)
self.assertEqual(AppKit.NSCompositingOperationHighlight, 12)
self.assertEqual(AppKit.NSCompositingOperationPlusLighter, 13)
self.assertEqual(AppKit.NSCompositingOperationMultiply, 14)
self.assertEqual(AppKit.NSCompositingOperationScreen, 15)
self.assertEqual(AppKit.NSCompositingOperationOverlay, 16)
self.assertEqual(AppKit.NSCompositingOperationDarken, 17)
self.assertEqual(AppKit.NSCompositingOperationLighten, 18)
self.assertEqual(AppKit.NSCompositingOperationColorDodge, 19)
self.assertEqual(AppKit.NSCompositingOperationColorBurn, 20)
self.assertEqual(AppKit.NSCompositingOperationSoftLight, 21)
self.assertEqual(AppKit.NSCompositingOperationHardLight, 22)
self.assertEqual(AppKit.NSCompositingOperationDifference, 23)
self.assertEqual(AppKit.NSCompositingOperationExclusion, 24)
self.assertEqual(AppKit.NSCompositingOperationHue, 25)
self.assertEqual(AppKit.NSCompositingOperationSaturation, 26)
self.assertEqual(AppKit.NSCompositingOperationColor, 27)
self.assertEqual(AppKit.NSCompositingOperationLuminosity, 28)
self.assertEqual(AppKit.NSBackingStoreRetained, 0)
self.assertEqual(AppKit.NSBackingStoreNonretained, 1)
self.assertEqual(AppKit.NSBackingStoreBuffered, 2)
self.assertEqual(AppKit.NSWindowAbove, 1)
self.assertEqual(AppKit.NSWindowBelow, -1)
self.assertEqual(AppKit.NSWindowOut, 0)
self.assertEqual(AppKit.NSFocusRingOnly, 0)
self.assertEqual(AppKit.NSFocusRingBelow, 1)
self.assertEqual(AppKit.NSFocusRingAbove, 2)
self.assertEqual(AppKit.NSFocusRingTypeDefault, 0)
self.assertEqual(AppKit.NSFocusRingTypeNone, 1)
self.assertEqual(AppKit.NSFocusRingTypeExterior, 2)
self.assertIsInstance(AppKit.NSCalibratedWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedBlackColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceBlackColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceCMYKColorSpace, str)
self.assertIsInstance(AppKit.NSNamedColorSpace, str)
self.assertIsInstance(AppKit.NSPatternColorSpace, str)
self.assertIsInstance(AppKit.NSCustomColorSpace, str)
self.assertIsInstance(AppKit.NSWhite, float)
self.assertIsInstance(AppKit.NSLightGray, float)
self.assertIsInstance(AppKit.NSDarkGray, float)
self.assertIsInstance(AppKit.NSBlack, float)
self.assertIsInstance(AppKit.NSDeviceResolution, str)
self.assertIsInstance(AppKit.NSDeviceColorSpaceName, str)
self.assertIsInstance(AppKit.NSDeviceBitsPerSample, str)
self.assertIsInstance(AppKit.NSDeviceIsScreen, str)
self.assertIsInstance(AppKit.NSDeviceIsPrinter, str)
self.assertIsInstance(AppKit.NSDeviceSize, str)
self.assertEqual(AppKit.NSAnimationEffectDisappearingItemDefault, 0)
self.assertEqual(AppKit.NSAnimationEffectPoof, 10)
self.assertEqual(AppKit.NSDisplayGamutSRGB, 1)
self.assertEqual(AppKit.NSDisplayGamutP3, 2)
def testFunctions(self):
app = AppKit.NSApplication.sharedApplication() # noqa: F841
self.assertArgHasType(AppKit.NSBestDepth, 4, b"o^" + objc._C_NSBOOL)
self.assertArgIsBOOL(AppKit.NSBestDepth, 3)
d, e = AppKit.NSBestDepth(AppKit.NSDeviceRGBColorSpace, 8, 32, False, None)
self.assertIsInstance(d, int)
self.assertIsInstance(e, bool)
self.assertResultIsBOOL(AppKit.NSPlanarFromDepth)
self.assertIsInstance(AppKit.NSPlanarFromDepth(0), bool)
self.assertIsInstance(AppKit.NSColorSpaceFromDepth(0), str)
self.assertIsInstance(AppKit.NSBitsPerSampleFromDepth(0), int)
self.assertIsInstance(AppKit.NSBitsPerPixelFromDepth(0), int)
self.assertIsInstance(
AppKit.NSNumberOfColorComponents(AppKit.NSDeviceRGBColorSpace), int
)
v = AppKit.NSAvailableWindowDepths()
self.assertIsInstance(v, tuple)
self.assertNotEqual(len(v), 0)
self.assertIsInstance(v[0], int)
img = AppKit.NSBitmapImageRep.alloc().initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bitmapFormat_bytesPerRow_bitsPerPixel_( # noqa: B950
None, 255, 255, 8, 4, True, False, AppKit.NSCalibratedRGBColorSpace, 0, 0, 0
)
context = AppKit.NSGraphicsContext.graphicsContextWithBitmapImageRep_(img)
current = AppKit.NSGraphicsContext.currentContext()
try:
AppKit.NSGraphicsContext.setCurrentContext_(context)
AppKit.NSRectFill(((0, 0), (1, 2)))
self.assertArgSizeInArg(AppKit.NSRectFillList, 0, 1)
AppKit.NSRectFillList([((0, 0), (1, 2)), ((10, 50), (9, 9))], 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 1, 2)
AppKit.NSRectFillListWithGrays(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], (0.5, 0.6), 2
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 1, 2)
AppKit.NSRectFillListWithColors(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
)
AppKit.NSRectFillUsingOperation(
((0, 0), (1, 2)), AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListUsingOperation, 0, 1)
AppKit.NSRectFillListUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], 2, AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 1, 2)
AppKit.NSRectFillListWithColorsUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
AppKit.NSCompositeSourceOver,
)
AppKit.NSFrameRect(((5, 5), (20, 30)))
AppKit.NSFrameRectWithWidth(((5, 5), (20, 30)), 4)
AppKit.NSFrameRectWithWidthUsingOperation(
((5, 5), (20, 30)), 4, AppKit.NSCompositeSourceOver
)
AppKit.NSRectClip(((5, 5), (200, 200)))
self.assertArgSizeInArg(AppKit.NSRectClipList, 0, 1)
AppKit.NSRectClipList([((5, 5), (200, 200)), ((50, 50), (90, 100))], 2)
color = AppKit.NSReadPixel((5, 5))
self.assertIsInstance(color, AppKit.NSColor)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 3)
AppKit.NSDrawTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[0.8, 0.9],
2,
)
AppKit.NSDrawGrayBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawGroove(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawWhiteBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawButton(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSEraseRect(((0, 0), (10, 10)))
AppKit.NSCopyBits(0, ((10, 10), (50, 50)), (50, 50))
AppKit.NSHighlightRect(((10, 10), (50, 50)))
AppKit.NSDrawDarkBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawLightBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDottedFrameRect(((10, 10), (50, 50)))
AppKit.NSDrawWindowBackground(((10, 10), (50, 50)))
finally:
AppKit.NSGraphicsContext.setCurrentContext_(current)
AppKit.NSSetFocusRingStyle(AppKit.NSFocusRingAbove)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 1)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 2)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 3)
r = AppKit.NSGetWindowServerMemory(0, None, None, None)
self.assertIsInstance(r[0], int)
self.assertIsInstance(r[1], int)
self.assertIsInstance(r[2], int)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 3)
AppKit.NSDrawColorTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[AppKit.NSColor.redColor(), AppKit.NSColor.blueColor()],
2,
)
# self.assertArgIsBOOL(AppKit.NSDrawBitmap, 7)
# self.assertArgIsBOOL(AppKit.NSDrawBitmap, 8)
# AppKit.NSDrawBitmap(((0, 0), (10, 10)), 10, 20, 8, 4, 32, 40, False, True,
# AppKit.NSDeviceRGBColorSpace, [' '*4*10*20, '', '', '', ''])
self.assertArgSizeInArg(AppKit.NSWindowList, 1, 0)
self.assertArgIsOut(AppKit.NSWindowList, 1)
v = AppKit.NSWindowList(5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
self.assertArgIsOut(AppKit.NSCountWindowsForContext, 1)
v = AppKit.NSCountWindowsForContext(1, None)
self.assertIsInstance(v, int)
self.assertArgIsOut(AppKit.NSWindowListForContext, 2)
self.assertArgSizeInArg(AppKit.NSWindowListForContext, 2, 1)
v = AppKit.NSWindowListForContext(0, 5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
AppKit.NSBeep()
count = AppKit.NSCountWindows(None)
self.assertIsInstance(count, int)
try:
AppKit.NSDisableScreenUpdates()
except objc.error:
pass
try:
AppKit.NSEnableScreenUpdates()
except objc.error:
pass
self.assertArgIsSEL(AppKit.NSShowAnimationEffect, 4, b"v@:^v")
self.assertArgHasType(AppKit.NSShowAnimationEffect, 5, b"^v")
try:
AppKit.NSShowAnimationEffect(
AppKit.NSAnimationEffectPoof, (10, 10), (20, 30), None, None, None
)
except objc.error:
pass
@min_os_level("10.5")
def testConstants10_5(self):
self.assertEqual(AppKit.NSColorRenderingIntentDefault, 0)
self.assertEqual(AppKit.NSColorRenderingIntentAbsoluteColorimetric, 1)
self.assertEqual(AppKit.NSColorRenderingIntentRelativeColorimetric, 2)
self.assertEqual(AppKit.NSColorRenderingIntentPerceptual, 3)
self.assertEqual(AppKit.NSColorRenderingIntentSaturation, 4)
self.assertEqual(AppKit.NSImageInterpolationDefault, 0)
self.assertEqual(AppKit.NSImageInterpolationNone, 1)
self.assertEqual(AppKit.NSImageInterpolationLow, 2)
self.assertEqual(AppKit.NSImageInterpolationHigh, 3)
@min_os_level("10.6")
def testConstants10_6(self):
self.assertEqual(AppKit.NSWindowDepthTwentyfourBitRGB, 0x208)
self.assertEqual(AppKit.NSWindowDepthSixtyfourBitRGB, 0x210)
self.assertEqual(AppKit.NSWindowDepthOnehundredtwentyeightBitRGB, 0x220)
self.assertEqual(AppKit.NSImageInterpolationMedium, 4)
AppKit.NSApplication.sharedApplication()
| 47.932039 | 209 | 0.667207 | import AppKit
import objc
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSGraphics(TestCase):
def testConstants(self):
self.assertEqual(AppKit.NSCompositeClear, 0)
self.assertEqual(AppKit.NSCompositeCopy, 1)
self.assertEqual(AppKit.NSCompositeSourceOver, 2)
self.assertEqual(AppKit.NSCompositeSourceIn, 3)
self.assertEqual(AppKit.NSCompositeSourceOut, 4)
self.assertEqual(AppKit.NSCompositeSourceAtop, 5)
self.assertEqual(AppKit.NSCompositeDestinationOver, 6)
self.assertEqual(AppKit.NSCompositeDestinationIn, 7)
self.assertEqual(AppKit.NSCompositeDestinationOut, 8)
self.assertEqual(AppKit.NSCompositeDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositeXOR, 10)
self.assertEqual(AppKit.NSCompositePlusDarker, 11)
self.assertEqual(AppKit.NSCompositeHighlight, 12)
self.assertEqual(AppKit.NSCompositePlusLighter, 13)
self.assertEqual(AppKit.NSCompositeMultiply, 14)
self.assertEqual(AppKit.NSCompositeScreen, 15)
self.assertEqual(AppKit.NSCompositeOverlay, 16)
self.assertEqual(AppKit.NSCompositeDarken, 17)
self.assertEqual(AppKit.NSCompositeLighten, 18)
self.assertEqual(AppKit.NSCompositeColorDodge, 19)
self.assertEqual(AppKit.NSCompositeColorBurn, 20)
self.assertEqual(AppKit.NSCompositeSoftLight, 21)
self.assertEqual(AppKit.NSCompositeHardLight, 22)
self.assertEqual(AppKit.NSCompositeDifference, 23)
self.assertEqual(AppKit.NSCompositeExclusion, 24)
self.assertEqual(AppKit.NSCompositeHue, 25)
self.assertEqual(AppKit.NSCompositeSaturation, 26)
self.assertEqual(AppKit.NSCompositeColor, 27)
self.assertEqual(AppKit.NSCompositeLuminosity, 28)
self.assertEqual(AppKit.NSCompositingOperationClear, 0)
self.assertEqual(AppKit.NSCompositingOperationCopy, 1)
self.assertEqual(AppKit.NSCompositingOperationSourceOver, 2)
self.assertEqual(AppKit.NSCompositingOperationSourceIn, 3)
self.assertEqual(AppKit.NSCompositingOperationSourceOut, 4)
self.assertEqual(AppKit.NSCompositingOperationSourceAtop, 5)
self.assertEqual(AppKit.NSCompositingOperationDestinationOver, 6)
self.assertEqual(AppKit.NSCompositingOperationDestinationIn, 7)
self.assertEqual(AppKit.NSCompositingOperationDestinationOut, 8)
self.assertEqual(AppKit.NSCompositingOperationDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositingOperationXOR, 10)
self.assertEqual(AppKit.NSCompositingOperationPlusDarker, 11)
self.assertEqual(AppKit.NSCompositingOperationHighlight, 12)
self.assertEqual(AppKit.NSCompositingOperationPlusLighter, 13)
self.assertEqual(AppKit.NSCompositingOperationMultiply, 14)
self.assertEqual(AppKit.NSCompositingOperationScreen, 15)
self.assertEqual(AppKit.NSCompositingOperationOverlay, 16)
self.assertEqual(AppKit.NSCompositingOperationDarken, 17)
self.assertEqual(AppKit.NSCompositingOperationLighten, 18)
self.assertEqual(AppKit.NSCompositingOperationColorDodge, 19)
self.assertEqual(AppKit.NSCompositingOperationColorBurn, 20)
self.assertEqual(AppKit.NSCompositingOperationSoftLight, 21)
self.assertEqual(AppKit.NSCompositingOperationHardLight, 22)
self.assertEqual(AppKit.NSCompositingOperationDifference, 23)
self.assertEqual(AppKit.NSCompositingOperationExclusion, 24)
self.assertEqual(AppKit.NSCompositingOperationHue, 25)
self.assertEqual(AppKit.NSCompositingOperationSaturation, 26)
self.assertEqual(AppKit.NSCompositingOperationColor, 27)
self.assertEqual(AppKit.NSCompositingOperationLuminosity, 28)
self.assertEqual(AppKit.NSBackingStoreRetained, 0)
self.assertEqual(AppKit.NSBackingStoreNonretained, 1)
self.assertEqual(AppKit.NSBackingStoreBuffered, 2)
self.assertEqual(AppKit.NSWindowAbove, 1)
self.assertEqual(AppKit.NSWindowBelow, -1)
self.assertEqual(AppKit.NSWindowOut, 0)
self.assertEqual(AppKit.NSFocusRingOnly, 0)
self.assertEqual(AppKit.NSFocusRingBelow, 1)
self.assertEqual(AppKit.NSFocusRingAbove, 2)
self.assertEqual(AppKit.NSFocusRingTypeDefault, 0)
self.assertEqual(AppKit.NSFocusRingTypeNone, 1)
self.assertEqual(AppKit.NSFocusRingTypeExterior, 2)
self.assertIsInstance(AppKit.NSCalibratedWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedBlackColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceBlackColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceCMYKColorSpace, str)
self.assertIsInstance(AppKit.NSNamedColorSpace, str)
self.assertIsInstance(AppKit.NSPatternColorSpace, str)
self.assertIsInstance(AppKit.NSCustomColorSpace, str)
self.assertIsInstance(AppKit.NSWhite, float)
self.assertIsInstance(AppKit.NSLightGray, float)
self.assertIsInstance(AppKit.NSDarkGray, float)
self.assertIsInstance(AppKit.NSBlack, float)
self.assertIsInstance(AppKit.NSDeviceResolution, str)
self.assertIsInstance(AppKit.NSDeviceColorSpaceName, str)
self.assertIsInstance(AppKit.NSDeviceBitsPerSample, str)
self.assertIsInstance(AppKit.NSDeviceIsScreen, str)
self.assertIsInstance(AppKit.NSDeviceIsPrinter, str)
self.assertIsInstance(AppKit.NSDeviceSize, str)
self.assertEqual(AppKit.NSAnimationEffectDisappearingItemDefault, 0)
self.assertEqual(AppKit.NSAnimationEffectPoof, 10)
self.assertEqual(AppKit.NSDisplayGamutSRGB, 1)
self.assertEqual(AppKit.NSDisplayGamutP3, 2)
def testFunctions(self):
app = AppKit.NSApplication.sharedApplication()
self.assertArgHasType(AppKit.NSBestDepth, 4, b"o^" + objc._C_NSBOOL)
self.assertArgIsBOOL(AppKit.NSBestDepth, 3)
d, e = AppKit.NSBestDepth(AppKit.NSDeviceRGBColorSpace, 8, 32, False, None)
self.assertIsInstance(d, int)
self.assertIsInstance(e, bool)
self.assertResultIsBOOL(AppKit.NSPlanarFromDepth)
self.assertIsInstance(AppKit.NSPlanarFromDepth(0), bool)
self.assertIsInstance(AppKit.NSColorSpaceFromDepth(0), str)
self.assertIsInstance(AppKit.NSBitsPerSampleFromDepth(0), int)
self.assertIsInstance(AppKit.NSBitsPerPixelFromDepth(0), int)
self.assertIsInstance(
AppKit.NSNumberOfColorComponents(AppKit.NSDeviceRGBColorSpace), int
)
v = AppKit.NSAvailableWindowDepths()
self.assertIsInstance(v, tuple)
self.assertNotEqual(len(v), 0)
self.assertIsInstance(v[0], int)
img = AppKit.NSBitmapImageRep.alloc().initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bitmapFormat_bytesPerRow_bitsPerPixel_(
None, 255, 255, 8, 4, True, False, AppKit.NSCalibratedRGBColorSpace, 0, 0, 0
)
context = AppKit.NSGraphicsContext.graphicsContextWithBitmapImageRep_(img)
current = AppKit.NSGraphicsContext.currentContext()
try:
AppKit.NSGraphicsContext.setCurrentContext_(context)
AppKit.NSRectFill(((0, 0), (1, 2)))
self.assertArgSizeInArg(AppKit.NSRectFillList, 0, 1)
AppKit.NSRectFillList([((0, 0), (1, 2)), ((10, 50), (9, 9))], 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 1, 2)
AppKit.NSRectFillListWithGrays(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], (0.5, 0.6), 2
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 1, 2)
AppKit.NSRectFillListWithColors(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
)
AppKit.NSRectFillUsingOperation(
((0, 0), (1, 2)), AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListUsingOperation, 0, 1)
AppKit.NSRectFillListUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], 2, AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 1, 2)
AppKit.NSRectFillListWithColorsUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
AppKit.NSCompositeSourceOver,
)
AppKit.NSFrameRect(((5, 5), (20, 30)))
AppKit.NSFrameRectWithWidth(((5, 5), (20, 30)), 4)
AppKit.NSFrameRectWithWidthUsingOperation(
((5, 5), (20, 30)), 4, AppKit.NSCompositeSourceOver
)
AppKit.NSRectClip(((5, 5), (200, 200)))
self.assertArgSizeInArg(AppKit.NSRectClipList, 0, 1)
AppKit.NSRectClipList([((5, 5), (200, 200)), ((50, 50), (90, 100))], 2)
color = AppKit.NSReadPixel((5, 5))
self.assertIsInstance(color, AppKit.NSColor)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 3)
AppKit.NSDrawTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[0.8, 0.9],
2,
)
AppKit.NSDrawGrayBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawGroove(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawWhiteBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawButton(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSEraseRect(((0, 0), (10, 10)))
AppKit.NSCopyBits(0, ((10, 10), (50, 50)), (50, 50))
AppKit.NSHighlightRect(((10, 10), (50, 50)))
AppKit.NSDrawDarkBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawLightBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDottedFrameRect(((10, 10), (50, 50)))
AppKit.NSDrawWindowBackground(((10, 10), (50, 50)))
finally:
AppKit.NSGraphicsContext.setCurrentContext_(current)
AppKit.NSSetFocusRingStyle(AppKit.NSFocusRingAbove)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 1)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 2)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 3)
r = AppKit.NSGetWindowServerMemory(0, None, None, None)
self.assertIsInstance(r[0], int)
self.assertIsInstance(r[1], int)
self.assertIsInstance(r[2], int)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 3)
AppKit.NSDrawColorTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[AppKit.NSColor.redColor(), AppKit.NSColor.blueColor()],
2,
)
self.assertArgSizeInArg(AppKit.NSWindowList, 1, 0)
self.assertArgIsOut(AppKit.NSWindowList, 1)
v = AppKit.NSWindowList(5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
self.assertArgIsOut(AppKit.NSCountWindowsForContext, 1)
v = AppKit.NSCountWindowsForContext(1, None)
self.assertIsInstance(v, int)
self.assertArgIsOut(AppKit.NSWindowListForContext, 2)
self.assertArgSizeInArg(AppKit.NSWindowListForContext, 2, 1)
v = AppKit.NSWindowListForContext(0, 5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
AppKit.NSBeep()
count = AppKit.NSCountWindows(None)
self.assertIsInstance(count, int)
try:
AppKit.NSDisableScreenUpdates()
except objc.error:
pass
try:
AppKit.NSEnableScreenUpdates()
except objc.error:
pass
self.assertArgIsSEL(AppKit.NSShowAnimationEffect, 4, b"v@:^v")
self.assertArgHasType(AppKit.NSShowAnimationEffect, 5, b"^v")
try:
AppKit.NSShowAnimationEffect(
AppKit.NSAnimationEffectPoof, (10, 10), (20, 30), None, None, None
)
except objc.error:
pass
@min_os_level("10.5")
def testConstants10_5(self):
self.assertEqual(AppKit.NSColorRenderingIntentDefault, 0)
self.assertEqual(AppKit.NSColorRenderingIntentAbsoluteColorimetric, 1)
self.assertEqual(AppKit.NSColorRenderingIntentRelativeColorimetric, 2)
self.assertEqual(AppKit.NSColorRenderingIntentPerceptual, 3)
self.assertEqual(AppKit.NSColorRenderingIntentSaturation, 4)
self.assertEqual(AppKit.NSImageInterpolationDefault, 0)
self.assertEqual(AppKit.NSImageInterpolationNone, 1)
self.assertEqual(AppKit.NSImageInterpolationLow, 2)
self.assertEqual(AppKit.NSImageInterpolationHigh, 3)
@min_os_level("10.6")
def testConstants10_6(self):
self.assertEqual(AppKit.NSWindowDepthTwentyfourBitRGB, 0x208)
self.assertEqual(AppKit.NSWindowDepthSixtyfourBitRGB, 0x210)
self.assertEqual(AppKit.NSWindowDepthOnehundredtwentyeightBitRGB, 0x220)
self.assertEqual(AppKit.NSImageInterpolationMedium, 4)
AppKit.NSApplication.sharedApplication()
| true | true |
f720dfbd8a87908f745dd0e7e519b11314b25551 | 2,649 | py | Python | zExtraLearning/MLPrep/tf2.0/NbExtracts/23tf2_0_mirrored_strategy.py | talk2sunil83/UpgradLearning | 70c4f993c68ce5030e9df0edd15004bbb9fc71e7 | [
"Apache-2.0"
] | null | null | null | zExtraLearning/MLPrep/tf2.0/NbExtracts/23tf2_0_mirrored_strategy.py | talk2sunil83/UpgradLearning | 70c4f993c68ce5030e9df0edd15004bbb9fc71e7 | [
"Apache-2.0"
] | null | null | null | zExtraLearning/MLPrep/tf2.0/NbExtracts/23tf2_0_mirrored_strategy.py | talk2sunil83/UpgradLearning | 70c4f993c68ce5030e9df0edd15004bbb9fc71e7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""TF2.0 Mirrored Strategy.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1e7_N_vVQGyfa3Wz9ND0smWnnsHsQUs_k
"""
# Commented out IPython magic to ensure Python compatibility.
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, Dense, Flatten, Dropout, GlobalMaxPooling2D, MaxPooling2D, BatchNormalization
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
print(tf.__version__)
# additional imports
# Load in the data
cifar10 = tf.keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
y_train, y_test = y_train.flatten(), y_test.flatten()
print("x_train.shape:", x_train.shape)
print("y_train.shape", y_train.shape)
# number of classes
K = len(set(y_train))
print("number of classes:", K)
# Build the model using the functional API
def create_model():
i = Input(shape=x_train[0].shape)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(i)
x = BatchNormalization()(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Flatten()(x)
x = Dropout(0.2)(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.2)(x)
x = Dense(K, activation='softmax')(x)
model = Model(i, x)
return model
strategy = tf.distribute.MirroredStrategy()
# strategy = tf.distribute.experimental.CentralStorageStrategy()
print(f'Number of devices: {strategy.num_replicas_in_sync}')
with strategy.scope():
model = create_model()
model.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# Fit
r = model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
50000/391
10000/79
# Compare this to non-distributed training
model2 = create_model()
model2.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
r = model2.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
| 29.10989 | 128 | 0.678369 |
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, Dense, Flatten, Dropout, GlobalMaxPooling2D, MaxPooling2D, BatchNormalization
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
print(tf.__version__)
cifar10 = tf.keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
y_train, y_test = y_train.flatten(), y_test.flatten()
print("x_train.shape:", x_train.shape)
print("y_train.shape", y_train.shape)
K = len(set(y_train))
print("number of classes:", K)
def create_model():
i = Input(shape=x_train[0].shape)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(i)
x = BatchNormalization()(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Flatten()(x)
x = Dropout(0.2)(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.2)(x)
x = Dense(K, activation='softmax')(x)
model = Model(i, x)
return model
strategy = tf.distribute.MirroredStrategy()
print(f'Number of devices: {strategy.num_replicas_in_sync}')
with strategy.scope():
model = create_model()
model.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
r = model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
50000/391
10000/79
model2 = create_model()
model2.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
r = model2.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
| true | true |
f720e314a25973213209e088a8ac815f6b5568fc | 20,043 | py | Python | src/pregame.py | the5thEmperor/lykos | 62cc7694ec24eb0c177dfd25db79725a092a57fa | [
"BSD-2-Clause"
] | null | null | null | src/pregame.py | the5thEmperor/lykos | 62cc7694ec24eb0c177dfd25db79725a092a57fa | [
"BSD-2-Clause"
] | null | null | null | src/pregame.py | the5thEmperor/lykos | 62cc7694ec24eb0c177dfd25db79725a092a57fa | [
"BSD-2-Clause"
] | null | null | null | from collections import defaultdict, Counter
from datetime import datetime, timedelta
import threading
import itertools
import random
import time
import math
import re
from src.containers import UserDict, UserSet
from src.decorators import COMMANDS, command, event_listener, handle_error
from src.functions import get_players
from src.warnings import decrement_stasis
from src.messages import messages
from src.events import Event
from src.cats import Wolfchat, All
from src import channels
import botconfig
WAIT_LOCK = threading.RLock()
WAIT_TOKENS = 0
WAIT_LAST = 0
LAST_START = UserDict() # type: UserDict[users.User, List[datetime, int]]
LAST_WAIT = UserDict() # type: UserDict[users.User, datetime]
START_VOTES = UserSet() # type: UserSet[users.User]
RESTART_TRIES = 0 # type: int
MAX_RETRIES = 3 # constant: not a setting
@command("wait", playing=True, phases=("join",))
def wait(var, wrapper, message):
"""Increase the wait time until !start can be used."""
if wrapper.target is not channels.Main:
return
pl = get_players()
with WAIT_LOCK:
global WAIT_TOKENS, WAIT_LAST
wait_check_time = time.time()
WAIT_TOKENS += (wait_check_time - WAIT_LAST) / var.WAIT_TB_DELAY
WAIT_LAST = wait_check_time
WAIT_TOKENS = min(WAIT_TOKENS, var.WAIT_TB_BURST)
now = datetime.now()
if ((LAST_WAIT and wrapper.source in LAST_WAIT and LAST_WAIT[wrapper.source] +
timedelta(seconds=var.WAIT_RATE_LIMIT) > now) or WAIT_TOKENS < 1):
wrapper.pm(messages["command_ratelimited"])
return
LAST_WAIT[wrapper.source] = now
WAIT_TOKENS -= 1
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=var.EXTRA_WAIT)
else:
var.CAN_START_TIME += timedelta(seconds=var.EXTRA_WAIT)
wrapper.send(messages["wait_time_increase"].format(wrapper.source, var.EXTRA_WAIT))
@command("fwait", flag="w", phases=("join",))
def fwait(var, wrapper, message):
"""Force an increase (or decrease) in wait time. Can be used with a number of seconds to wait."""
pl = get_players()
msg = re.split(" +", message.strip(), 1)[0]
if msg and (msg.isdigit() or (msg[0] == "-" and msg[1:].isdigit())):
extra = int(msg)
else:
extra = var.EXTRA_WAIT
now = datetime.now()
extra = max(-900, min(900, extra))
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=extra)
else:
var.CAN_START_TIME += timedelta(seconds=extra)
if extra >= 0:
wrapper.send(messages["forced_wait_time_increase"].format(wrapper.source, abs(extra)))
else:
wrapper.send(messages["forced_wait_time_decrease"].format(wrapper.source, abs(extra)))
@command("start", phases=("none", "join"))
def start_cmd(var, wrapper, message):
"""Start a game of Werewolf."""
if wrapper.target is channels.Main:
start(var, wrapper)
@command("fstart", flag="S", phases=("join",))
def fstart(var, wrapper, message):
"""Force the game to start immediately."""
channels.Main.send(messages["fstart_success"].format(wrapper.source))
wrapper.target = channels.Main
start(var, wrapper, forced=True)
@command("retract", phases=("day", "join"))
def retract(var, wrapper, message):
"""Take back your vote during the day (for whom to lynch)."""
if wrapper.source not in get_players() or wrapper.source in var.DISCONNECTED:
return
with var.GRAVEYARD_LOCK, var.WARNING_LOCK:
if var.PHASE == "join":
if wrapper.source not in START_VOTES:
wrapper.pm(messages["start_novote"])
else:
START_VOTES.discard(wrapper.source)
wrapper.send(messages["start_retract"].format(wrapper.source))
if not START_VOTES:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
@event_listener("del_player")
def on_del_player(evt, var, player, all_roles, death_triggers):
if var.PHASE == "join":
with var.WARNING_LOCK:
START_VOTES.discard(player)
# Cancel the start vote timer if there are no votes left
if not START_VOTES and "start_votes" in var.TIMERS:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
def start(var, wrapper, *, forced=False, restart=""):
if (not forced and LAST_START and wrapper.source in LAST_START and
LAST_START[wrapper.source][0] + timedelta(seconds=var.START_RATE_LIMIT) >
datetime.now() and not restart):
LAST_START[wrapper.source][1] += 1
wrapper.source.send(messages["command_ratelimited"])
return
if restart:
global RESTART_TRIES
RESTART_TRIES += 1
if RESTART_TRIES > MAX_RETRIES:
from src.wolfgame import stop_game
stop_game(var, abort=True)
return
if not restart:
LAST_START[wrapper.source] = [datetime.now(), 1]
villagers = get_players()
vils = set(get_players())
if not restart:
if var.PHASE == "none":
wrapper.source.send(messages["no_game_running"])
return
if var.PHASE != "join":
wrapper.source.send(messages["werewolf_already_running"])
return
if wrapper.source not in villagers and not forced:
return
now = datetime.now()
var.GAME_START_TIME = now # Only used for the idler checker
dur = int((var.CAN_START_TIME - now).total_seconds())
if dur > 0 and not forced:
wrapper.send(messages["please_wait"].format(dur))
return
if len(villagers) < var.MIN_PLAYERS:
wrapper.send(messages["not_enough_players"].format(wrapper.source, var.MIN_PLAYERS))
return
if len(villagers) > var.MAX_PLAYERS:
wrapper.send.send(messages["max_players"].format(wrapper.source, var.MAX_PLAYERS))
return
with var.WARNING_LOCK:
if not forced and wrapper.source in START_VOTES:
wrapper.pm(messages["start_already_voted"])
return
start_votes_required = min(math.ceil(len(villagers) * var.START_VOTES_SCALE), var.START_VOTES_MAX)
if not forced and len(START_VOTES) < start_votes_required:
# If there's only one more vote required, start the game immediately.
# Checked here to make sure that a player that has already voted can't
# vote again for the final start.
if len(START_VOTES) < start_votes_required - 1:
START_VOTES.add(wrapper.source)
remaining_votes = start_votes_required - len(START_VOTES)
wrapper.send(messages["start_voted"].format(wrapper.source, remaining_votes))
# If this was the first vote
if len(START_VOTES) == 1:
t = threading.Timer(60, expire_start_votes, (var, wrapper.target))
var.TIMERS["start_votes"] = (t, time.time(), 60)
t.daemon = True
t.start()
return
if not var.FGAMED:
votes = {} #key = gamemode, not hostmask
for gamemode in var.GAMEMODE_VOTES.values():
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2]:
votes[gamemode] = votes.get(gamemode, 0) + 1
voted = [gamemode for gamemode in votes if votes[gamemode] == max(votes.values()) and votes[gamemode] >= len(villagers)/2]
if voted:
from src.wolfgame import cgamemode
cgamemode(random.choice(voted))
else:
possiblegamemodes = []
numvotes = 0
for gamemode, num in votes.items():
if len(villagers) < var.GAME_MODES[gamemode][1] or len(villagers) > var.GAME_MODES[gamemode][2] or var.GAME_MODES[gamemode][3] == 0:
continue
possiblegamemodes += [gamemode] * num
numvotes += num
if len(villagers) - numvotes > 0:
possiblegamemodes += [None] * ((len(villagers) - numvotes) // 2)
# check if we go with a voted mode or a random mode
gamemode = random.choice(possiblegamemodes)
if gamemode is None:
possiblegamemodes = []
for gamemode in var.GAME_MODES.keys() - var.DISABLED_GAMEMODES:
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2] and var.GAME_MODES[gamemode][3] > 0:
possiblegamemodes += [gamemode] * var.GAME_MODES[gamemode][3]
gamemode = random.choice(possiblegamemodes)
from src.wolfgame import cgamemode
cgamemode(gamemode)
else:
from src.wolfgame import cgamemode
cgamemode(restart)
var.GAME_ID = time.time() # restart reaper timer
from src.wolfgame import chk_win_conditions # TODO: Move that into its own postgame module
event = Event("role_attribution", {"addroles": Counter()})
if event.dispatch(var, chk_win_conditions, villagers):
addroles = event.data["addroles"]
strip = lambda x: re.sub(r"\(.*\)", "", x)
lv = len(villagers)
roles = []
for num, rolelist in var.CURRENT_GAMEMODE.ROLE_GUIDE.items():
if num <= lv:
roles.extend(rolelist)
defroles = Counter(strip(x) for x in roles)
for role, count in list(defroles.items()):
if role[0] == "-":
srole = role[1:]
defroles[srole] -= count
del defroles[role]
if defroles[srole] == 0:
del defroles[srole]
if not defroles:
wrapper.send(messages["no_settings_defined"].format(wrapper.source, lv))
return
for role, num in defroles.items():
addroles[role] = max(addroles.get(role, num), len(var.FORCE_ROLES.get(role, ())))
if sum([addroles[r] for r in addroles if r not in var.CURRENT_GAMEMODE.SECONDARY_ROLES]) > lv:
wrapper.send(messages["too_many_roles"])
return
for role in All:
addroles.setdefault(role, 0)
else:
addroles = event.data["addroles"]
# convert roleset aliases into the appropriate roles
possible_rolesets = [Counter()]
roleset_roles = defaultdict(int)
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS = {}
for role, amt in list(addroles.items()):
# not a roleset? add a fixed amount of them
if role not in var.CURRENT_GAMEMODE.ROLE_SETS:
for pr in possible_rolesets:
pr[role] += amt
continue
# if a roleset, ensure we don't try to expose the roleset name in !stats or future attribution
# but do keep track of the sets in use so we can have !stats reflect proper information
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS[role] = amt
del addroles[role]
# init !stats with all 0s so that it can number things properly; the keys need to exist in the Counter
# across every possible roleset so that !stats works right
rs = Counter(var.CURRENT_GAMEMODE.ROLE_SETS[role])
for r in rs:
for pr in possible_rolesets:
pr[r] += 0
toadd = random.sample(list(rs.elements()), amt)
for r in toadd:
addroles[r] += 1
roleset_roles[r] += 1
add_rolesets = []
temp_rolesets = []
for c in itertools.combinations(rs.elements(), amt):
add_rolesets.append(Counter(c))
for pr in possible_rolesets:
for ar in add_rolesets:
temp = Counter(pr)
temp.update(ar)
temp_rolesets.append(temp)
possible_rolesets = temp_rolesets
if var.ORIGINAL_SETTINGS and not restart: # Custom settings
need_reset = True
wvs = sum(addroles[r] for r in Wolfchat)
if len(villagers) < (sum(addroles.values()) - sum(addroles[r] for r in var.CURRENT_GAMEMODE.SECONDARY_ROLES)):
wrapper.send(messages["too_few_players_custom"])
elif not wvs and var.CURRENT_GAMEMODE.name != "villagergame":
wrapper.send(messages["need_one_wolf"])
elif wvs > (len(villagers) / 2):
wrapper.send(messages["too_many_wolves"])
else:
need_reset = False
if need_reset:
from src.wolfgame import reset_settings
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
if var.ADMIN_TO_PING is not None and not restart:
for decor in (COMMANDS["join"] + COMMANDS["start"]):
decor(_command_disabled)
var.ROLES.clear()
var.MAIN_ROLES.clear()
var.NIGHT_COUNT = 0
var.DAY_COUNT = 0
var.FINAL_ROLES.clear()
var.EXTRA_WOLVES = 0
var.DEADCHAT_PLAYERS.clear()
var.SPECTATING_WOLFCHAT.clear()
var.SPECTATING_DEADCHAT.clear()
for role in All:
var.ROLES[role] = UserSet()
var.ROLES[var.DEFAULT_ROLE] = UserSet()
for role, ps in var.FORCE_ROLES.items():
if role not in var.CURRENT_GAMEMODE.SECONDARY_ROLES.keys():
vils.difference_update(ps)
for role, count in addroles.items():
if role in var.CURRENT_GAMEMODE.SECONDARY_ROLES:
var.ROLES[role] = (None,) * count
continue # We deal with those later, see below
to_add = set()
if role in var.FORCE_ROLES:
if len(var.FORCE_ROLES[role]) > count:
channels.Main.send(messages["error_frole_too_many"].format(role))
return
for user in var.FORCE_ROLES[role]:
# If multiple main roles were forced, only first one is put in MAIN_ROLES
if not user in var.MAIN_ROLES:
var.MAIN_ROLES[user] = role
var.ORIGINAL_MAIN_ROLES[user] = role
to_add.add(user)
count -= 1
selected = random.sample(vils, count)
for x in selected:
var.MAIN_ROLES[x] = role
var.ORIGINAL_MAIN_ROLES[x] = role
vils.remove(x)
var.ROLES[role].update(selected)
var.ROLES[role].update(to_add)
var.ROLES[var.DEFAULT_ROLE].update(vils)
for x in vils:
var.MAIN_ROLES[x] = var.DEFAULT_ROLE
var.ORIGINAL_MAIN_ROLES[x] = var.DEFAULT_ROLE
if vils:
for pr in possible_rolesets:
pr[var.DEFAULT_ROLE] += len(vils)
# Collapse possible_rolesets into var.ROLE_STATS
# which is a FrozenSet[FrozenSet[Tuple[str, int]]]
possible_rolesets_set = set()
event = Event("reconfigure_stats", {"new": []})
for pr in possible_rolesets:
event.data["new"] = [pr]
event.dispatch(var, pr, "start")
for v in event.data["new"]:
if min(v.values()) >= 0:
possible_rolesets_set.add(frozenset(v.items()))
var.ROLE_STATS = frozenset(possible_rolesets_set)
# Now for the secondary roles
for role, dfn in var.CURRENT_GAMEMODE.SECONDARY_ROLES.items():
count = len(var.ROLES[role])
var.ROLES[role] = UserSet()
if role in var.FORCE_ROLES:
ps = var.FORCE_ROLES[role]
var.ROLES[role].update(ps)
count -= len(ps)
# Don't do anything further if this secondary role was forced on enough players already
if count <= 0:
continue
possible = get_players(dfn)
if len(possible) < count:
wrapper.send(messages["not_enough_targets"].format(role))
if var.ORIGINAL_SETTINGS:
from src.wolfgame import reset_settings
var.ROLES.clear()
var.ROLES["person"] = UserSet(var.ALL_PLAYERS)
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
else:
wrapper.send(messages["role_skipped"])
continue
var.ROLES[role].update(x for x in random.sample(possible, count))
with var.WARNING_LOCK: # cancel timers
for name in ("join", "join_pinger", "start_votes"):
if name in var.TIMERS:
var.TIMERS[name][0].cancel()
del var.TIMERS[name]
var.LAST_STATS = None
var.LAST_TIME = None
for role, players in var.ROLES.items():
for player in players:
evt = Event("new_role", {"messages": [], "role": role, "in_wolfchat": False}, inherit_from=None)
evt.dispatch(var, player, None)
if not restart:
gamemode = var.CURRENT_GAMEMODE.name
if gamemode == "villagergame":
gamemode = "default"
# Alert the players to option changes they may not be aware of
# All keys begin with gso_* (game start options)
options = []
if var.ORIGINAL_SETTINGS.get("ROLE_REVEAL") is not None:
# Keys used here: gso_rr_on, gso_rr_team, gso_rr_off
options.append(messages["gso_rr_{0}".format(var.ROLE_REVEAL)])
if var.ORIGINAL_SETTINGS.get("STATS_TYPE") is not None:
# Keys used here: gso_st_default, gso_st_accurate, gso_st_team, gso_st_disabled
options.append(messages["gso_st_{0}".format(var.STATS_TYPE)])
if var.ORIGINAL_SETTINGS.get("ABSTAIN_ENABLED") is not None or var.ORIGINAL_SETTINGS.get("LIMIT_ABSTAIN") is not None:
if var.ABSTAIN_ENABLED and var.LIMIT_ABSTAIN:
options.append(messages["gso_abs_rest"])
elif var.ABSTAIN_ENABLED:
options.append(messages["gso_abs_unrest"])
else:
options.append(messages["gso_abs_none"])
key = "welcome_simple"
if options:
key = "welcome_options"
wrapper.send(messages[key].format(villagers, gamemode, options))
wrapper.target.mode("+m")
var.ORIGINAL_ROLES.clear()
for role, players in var.ROLES.items():
var.ORIGINAL_ROLES[role] = players.copy()
var.DAY_TIMEDELTA = timedelta(0)
var.NIGHT_TIMEDELTA = timedelta(0)
var.DAY_START_TIME = datetime.now()
var.NIGHT_START_TIME = datetime.now()
var.LAST_PING = None
if restart:
var.PHASE = "join" # allow transition_* to run properly if game was restarted on first night
if not var.START_WITH_DAY:
from src.wolfgame import transition_night
var.GAMEPHASE = "day" # gamephase needs to be the thing we're transitioning from
transition_night()
else:
from src.wolfgame import transition_day
var.FIRST_DAY = True
var.GAMEPHASE = "night"
transition_day()
decrement_stasis()
if not (botconfig.DEBUG_MODE and var.DISABLE_DEBUG_MODE_REAPER):
# DEATH TO IDLERS!
from src.wolfgame import reaper
reapertimer = threading.Thread(None, reaper, args=(wrapper.client, var.GAME_ID))
reapertimer.daemon = True
reapertimer.start()
def _command_disabled(var, wrapper, message):
wrapper.send(messages["command_disabled_admin"])
@handle_error
def expire_start_votes(var, channel):
# Should never happen as the timer is removed on game start, but just to be safe
if var.PHASE != "join":
return
with var.WARNING_LOCK:
START_VOTES.clear()
channel.send(messages["start_expired"])
@event_listener("reset")
def on_reset(evt, var):
global MAX_RETRIES, WAIT_TOKENS, WAIT_LAST
LAST_START.clear()
LAST_WAIT.clear()
START_VOTES.clear()
MAX_RETRIES = 0
WAIT_TOKENS = 0
WAIT_LAST = 0
| 39.3 | 159 | 0.612533 | from collections import defaultdict, Counter
from datetime import datetime, timedelta
import threading
import itertools
import random
import time
import math
import re
from src.containers import UserDict, UserSet
from src.decorators import COMMANDS, command, event_listener, handle_error
from src.functions import get_players
from src.warnings import decrement_stasis
from src.messages import messages
from src.events import Event
from src.cats import Wolfchat, All
from src import channels
import botconfig
WAIT_LOCK = threading.RLock()
WAIT_TOKENS = 0
WAIT_LAST = 0
LAST_START = UserDict()
LAST_WAIT = UserDict()
START_VOTES = UserSet()
RESTART_TRIES = 0
MAX_RETRIES = 3
@command("wait", playing=True, phases=("join",))
def wait(var, wrapper, message):
if wrapper.target is not channels.Main:
return
pl = get_players()
with WAIT_LOCK:
global WAIT_TOKENS, WAIT_LAST
wait_check_time = time.time()
WAIT_TOKENS += (wait_check_time - WAIT_LAST) / var.WAIT_TB_DELAY
WAIT_LAST = wait_check_time
WAIT_TOKENS = min(WAIT_TOKENS, var.WAIT_TB_BURST)
now = datetime.now()
if ((LAST_WAIT and wrapper.source in LAST_WAIT and LAST_WAIT[wrapper.source] +
timedelta(seconds=var.WAIT_RATE_LIMIT) > now) or WAIT_TOKENS < 1):
wrapper.pm(messages["command_ratelimited"])
return
LAST_WAIT[wrapper.source] = now
WAIT_TOKENS -= 1
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=var.EXTRA_WAIT)
else:
var.CAN_START_TIME += timedelta(seconds=var.EXTRA_WAIT)
wrapper.send(messages["wait_time_increase"].format(wrapper.source, var.EXTRA_WAIT))
@command("fwait", flag="w", phases=("join",))
def fwait(var, wrapper, message):
pl = get_players()
msg = re.split(" +", message.strip(), 1)[0]
if msg and (msg.isdigit() or (msg[0] == "-" and msg[1:].isdigit())):
extra = int(msg)
else:
extra = var.EXTRA_WAIT
now = datetime.now()
extra = max(-900, min(900, extra))
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=extra)
else:
var.CAN_START_TIME += timedelta(seconds=extra)
if extra >= 0:
wrapper.send(messages["forced_wait_time_increase"].format(wrapper.source, abs(extra)))
else:
wrapper.send(messages["forced_wait_time_decrease"].format(wrapper.source, abs(extra)))
@command("start", phases=("none", "join"))
def start_cmd(var, wrapper, message):
if wrapper.target is channels.Main:
start(var, wrapper)
@command("fstart", flag="S", phases=("join",))
def fstart(var, wrapper, message):
channels.Main.send(messages["fstart_success"].format(wrapper.source))
wrapper.target = channels.Main
start(var, wrapper, forced=True)
@command("retract", phases=("day", "join"))
def retract(var, wrapper, message):
if wrapper.source not in get_players() or wrapper.source in var.DISCONNECTED:
return
with var.GRAVEYARD_LOCK, var.WARNING_LOCK:
if var.PHASE == "join":
if wrapper.source not in START_VOTES:
wrapper.pm(messages["start_novote"])
else:
START_VOTES.discard(wrapper.source)
wrapper.send(messages["start_retract"].format(wrapper.source))
if not START_VOTES:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
@event_listener("del_player")
def on_del_player(evt, var, player, all_roles, death_triggers):
if var.PHASE == "join":
with var.WARNING_LOCK:
START_VOTES.discard(player)
if not START_VOTES and "start_votes" in var.TIMERS:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
def start(var, wrapper, *, forced=False, restart=""):
if (not forced and LAST_START and wrapper.source in LAST_START and
LAST_START[wrapper.source][0] + timedelta(seconds=var.START_RATE_LIMIT) >
datetime.now() and not restart):
LAST_START[wrapper.source][1] += 1
wrapper.source.send(messages["command_ratelimited"])
return
if restart:
global RESTART_TRIES
RESTART_TRIES += 1
if RESTART_TRIES > MAX_RETRIES:
from src.wolfgame import stop_game
stop_game(var, abort=True)
return
if not restart:
LAST_START[wrapper.source] = [datetime.now(), 1]
villagers = get_players()
vils = set(get_players())
if not restart:
if var.PHASE == "none":
wrapper.source.send(messages["no_game_running"])
return
if var.PHASE != "join":
wrapper.source.send(messages["werewolf_already_running"])
return
if wrapper.source not in villagers and not forced:
return
now = datetime.now()
var.GAME_START_TIME = now
dur = int((var.CAN_START_TIME - now).total_seconds())
if dur > 0 and not forced:
wrapper.send(messages["please_wait"].format(dur))
return
if len(villagers) < var.MIN_PLAYERS:
wrapper.send(messages["not_enough_players"].format(wrapper.source, var.MIN_PLAYERS))
return
if len(villagers) > var.MAX_PLAYERS:
wrapper.send.send(messages["max_players"].format(wrapper.source, var.MAX_PLAYERS))
return
with var.WARNING_LOCK:
if not forced and wrapper.source in START_VOTES:
wrapper.pm(messages["start_already_voted"])
return
start_votes_required = min(math.ceil(len(villagers) * var.START_VOTES_SCALE), var.START_VOTES_MAX)
if not forced and len(START_VOTES) < start_votes_required:
# Checked here to make sure that a player that has already voted can't
if len(START_VOTES) < start_votes_required - 1:
START_VOTES.add(wrapper.source)
remaining_votes = start_votes_required - len(START_VOTES)
wrapper.send(messages["start_voted"].format(wrapper.source, remaining_votes))
if len(START_VOTES) == 1:
t = threading.Timer(60, expire_start_votes, (var, wrapper.target))
var.TIMERS["start_votes"] = (t, time.time(), 60)
t.daemon = True
t.start()
return
if not var.FGAMED:
votes = {}
for gamemode in var.GAMEMODE_VOTES.values():
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2]:
votes[gamemode] = votes.get(gamemode, 0) + 1
voted = [gamemode for gamemode in votes if votes[gamemode] == max(votes.values()) and votes[gamemode] >= len(villagers)/2]
if voted:
from src.wolfgame import cgamemode
cgamemode(random.choice(voted))
else:
possiblegamemodes = []
numvotes = 0
for gamemode, num in votes.items():
if len(villagers) < var.GAME_MODES[gamemode][1] or len(villagers) > var.GAME_MODES[gamemode][2] or var.GAME_MODES[gamemode][3] == 0:
continue
possiblegamemodes += [gamemode] * num
numvotes += num
if len(villagers) - numvotes > 0:
possiblegamemodes += [None] * ((len(villagers) - numvotes) // 2)
gamemode = random.choice(possiblegamemodes)
if gamemode is None:
possiblegamemodes = []
for gamemode in var.GAME_MODES.keys() - var.DISABLED_GAMEMODES:
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2] and var.GAME_MODES[gamemode][3] > 0:
possiblegamemodes += [gamemode] * var.GAME_MODES[gamemode][3]
gamemode = random.choice(possiblegamemodes)
from src.wolfgame import cgamemode
cgamemode(gamemode)
else:
from src.wolfgame import cgamemode
cgamemode(restart)
var.GAME_ID = time.time()
from src.wolfgame import chk_win_conditions
event = Event("role_attribution", {"addroles": Counter()})
if event.dispatch(var, chk_win_conditions, villagers):
addroles = event.data["addroles"]
strip = lambda x: re.sub(r"\(.*\)", "", x)
lv = len(villagers)
roles = []
for num, rolelist in var.CURRENT_GAMEMODE.ROLE_GUIDE.items():
if num <= lv:
roles.extend(rolelist)
defroles = Counter(strip(x) for x in roles)
for role, count in list(defroles.items()):
if role[0] == "-":
srole = role[1:]
defroles[srole] -= count
del defroles[role]
if defroles[srole] == 0:
del defroles[srole]
if not defroles:
wrapper.send(messages["no_settings_defined"].format(wrapper.source, lv))
return
for role, num in defroles.items():
addroles[role] = max(addroles.get(role, num), len(var.FORCE_ROLES.get(role, ())))
if sum([addroles[r] for r in addroles if r not in var.CURRENT_GAMEMODE.SECONDARY_ROLES]) > lv:
wrapper.send(messages["too_many_roles"])
return
for role in All:
addroles.setdefault(role, 0)
else:
addroles = event.data["addroles"]
possible_rolesets = [Counter()]
roleset_roles = defaultdict(int)
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS = {}
for role, amt in list(addroles.items()):
if role not in var.CURRENT_GAMEMODE.ROLE_SETS:
for pr in possible_rolesets:
pr[role] += amt
continue
# but do keep track of the sets in use so we can have !stats reflect proper information
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS[role] = amt
del addroles[role]
# init !stats with all 0s so that it can number things properly; the keys need to exist in the Counter
# across every possible roleset so that !stats works right
rs = Counter(var.CURRENT_GAMEMODE.ROLE_SETS[role])
for r in rs:
for pr in possible_rolesets:
pr[r] += 0
toadd = random.sample(list(rs.elements()), amt)
for r in toadd:
addroles[r] += 1
roleset_roles[r] += 1
add_rolesets = []
temp_rolesets = []
for c in itertools.combinations(rs.elements(), amt):
add_rolesets.append(Counter(c))
for pr in possible_rolesets:
for ar in add_rolesets:
temp = Counter(pr)
temp.update(ar)
temp_rolesets.append(temp)
possible_rolesets = temp_rolesets
if var.ORIGINAL_SETTINGS and not restart: # Custom settings
need_reset = True
wvs = sum(addroles[r] for r in Wolfchat)
if len(villagers) < (sum(addroles.values()) - sum(addroles[r] for r in var.CURRENT_GAMEMODE.SECONDARY_ROLES)):
wrapper.send(messages["too_few_players_custom"])
elif not wvs and var.CURRENT_GAMEMODE.name != "villagergame":
wrapper.send(messages["need_one_wolf"])
elif wvs > (len(villagers) / 2):
wrapper.send(messages["too_many_wolves"])
else:
need_reset = False
if need_reset:
from src.wolfgame import reset_settings
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
if var.ADMIN_TO_PING is not None and not restart:
for decor in (COMMANDS["join"] + COMMANDS["start"]):
decor(_command_disabled)
var.ROLES.clear()
var.MAIN_ROLES.clear()
var.NIGHT_COUNT = 0
var.DAY_COUNT = 0
var.FINAL_ROLES.clear()
var.EXTRA_WOLVES = 0
var.DEADCHAT_PLAYERS.clear()
var.SPECTATING_WOLFCHAT.clear()
var.SPECTATING_DEADCHAT.clear()
for role in All:
var.ROLES[role] = UserSet()
var.ROLES[var.DEFAULT_ROLE] = UserSet()
for role, ps in var.FORCE_ROLES.items():
if role not in var.CURRENT_GAMEMODE.SECONDARY_ROLES.keys():
vils.difference_update(ps)
for role, count in addroles.items():
if role in var.CURRENT_GAMEMODE.SECONDARY_ROLES:
var.ROLES[role] = (None,) * count
continue # We deal with those later, see below
to_add = set()
if role in var.FORCE_ROLES:
if len(var.FORCE_ROLES[role]) > count:
channels.Main.send(messages["error_frole_too_many"].format(role))
return
for user in var.FORCE_ROLES[role]:
# If multiple main roles were forced, only first one is put in MAIN_ROLES
if not user in var.MAIN_ROLES:
var.MAIN_ROLES[user] = role
var.ORIGINAL_MAIN_ROLES[user] = role
to_add.add(user)
count -= 1
selected = random.sample(vils, count)
for x in selected:
var.MAIN_ROLES[x] = role
var.ORIGINAL_MAIN_ROLES[x] = role
vils.remove(x)
var.ROLES[role].update(selected)
var.ROLES[role].update(to_add)
var.ROLES[var.DEFAULT_ROLE].update(vils)
for x in vils:
var.MAIN_ROLES[x] = var.DEFAULT_ROLE
var.ORIGINAL_MAIN_ROLES[x] = var.DEFAULT_ROLE
if vils:
for pr in possible_rolesets:
pr[var.DEFAULT_ROLE] += len(vils)
# Collapse possible_rolesets into var.ROLE_STATS
# which is a FrozenSet[FrozenSet[Tuple[str, int]]]
possible_rolesets_set = set()
event = Event("reconfigure_stats", {"new": []})
for pr in possible_rolesets:
event.data["new"] = [pr]
event.dispatch(var, pr, "start")
for v in event.data["new"]:
if min(v.values()) >= 0:
possible_rolesets_set.add(frozenset(v.items()))
var.ROLE_STATS = frozenset(possible_rolesets_set)
# Now for the secondary roles
for role, dfn in var.CURRENT_GAMEMODE.SECONDARY_ROLES.items():
count = len(var.ROLES[role])
var.ROLES[role] = UserSet()
if role in var.FORCE_ROLES:
ps = var.FORCE_ROLES[role]
var.ROLES[role].update(ps)
count -= len(ps)
# Don't do anything further if this secondary role was forced on enough players already
if count <= 0:
continue
possible = get_players(dfn)
if len(possible) < count:
wrapper.send(messages["not_enough_targets"].format(role))
if var.ORIGINAL_SETTINGS:
from src.wolfgame import reset_settings
var.ROLES.clear()
var.ROLES["person"] = UserSet(var.ALL_PLAYERS)
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
else:
wrapper.send(messages["role_skipped"])
continue
var.ROLES[role].update(x for x in random.sample(possible, count))
with var.WARNING_LOCK:
for name in ("join", "join_pinger", "start_votes"):
if name in var.TIMERS:
var.TIMERS[name][0].cancel()
del var.TIMERS[name]
var.LAST_STATS = None
var.LAST_TIME = None
for role, players in var.ROLES.items():
for player in players:
evt = Event("new_role", {"messages": [], "role": role, "in_wolfchat": False}, inherit_from=None)
evt.dispatch(var, player, None)
if not restart:
gamemode = var.CURRENT_GAMEMODE.name
if gamemode == "villagergame":
gamemode = "default"
options = []
if var.ORIGINAL_SETTINGS.get("ROLE_REVEAL") is not None:
options.append(messages["gso_rr_{0}".format(var.ROLE_REVEAL)])
if var.ORIGINAL_SETTINGS.get("STATS_TYPE") is not None:
options.append(messages["gso_st_{0}".format(var.STATS_TYPE)])
if var.ORIGINAL_SETTINGS.get("ABSTAIN_ENABLED") is not None or var.ORIGINAL_SETTINGS.get("LIMIT_ABSTAIN") is not None:
if var.ABSTAIN_ENABLED and var.LIMIT_ABSTAIN:
options.append(messages["gso_abs_rest"])
elif var.ABSTAIN_ENABLED:
options.append(messages["gso_abs_unrest"])
else:
options.append(messages["gso_abs_none"])
key = "welcome_simple"
if options:
key = "welcome_options"
wrapper.send(messages[key].format(villagers, gamemode, options))
wrapper.target.mode("+m")
var.ORIGINAL_ROLES.clear()
for role, players in var.ROLES.items():
var.ORIGINAL_ROLES[role] = players.copy()
var.DAY_TIMEDELTA = timedelta(0)
var.NIGHT_TIMEDELTA = timedelta(0)
var.DAY_START_TIME = datetime.now()
var.NIGHT_START_TIME = datetime.now()
var.LAST_PING = None
if restart:
var.PHASE = "join"
if not var.START_WITH_DAY:
from src.wolfgame import transition_night
var.GAMEPHASE = "day"
transition_night()
else:
from src.wolfgame import transition_day
var.FIRST_DAY = True
var.GAMEPHASE = "night"
transition_day()
decrement_stasis()
if not (botconfig.DEBUG_MODE and var.DISABLE_DEBUG_MODE_REAPER):
# DEATH TO IDLERS!
from src.wolfgame import reaper
reapertimer = threading.Thread(None, reaper, args=(wrapper.client, var.GAME_ID))
reapertimer.daemon = True
reapertimer.start()
def _command_disabled(var, wrapper, message):
wrapper.send(messages["command_disabled_admin"])
@handle_error
def expire_start_votes(var, channel):
# Should never happen as the timer is removed on game start, but just to be safe
if var.PHASE != "join":
return
with var.WARNING_LOCK:
START_VOTES.clear()
channel.send(messages["start_expired"])
@event_listener("reset")
def on_reset(evt, var):
global MAX_RETRIES, WAIT_TOKENS, WAIT_LAST
LAST_START.clear()
LAST_WAIT.clear()
START_VOTES.clear()
MAX_RETRIES = 0
WAIT_TOKENS = 0
WAIT_LAST = 0
| true | true |
f720e349ea77eb354bef27e43be8e0b0f558fa43 | 3,840 | py | Python | wes_service/util.py | SamarthVP/workflow-service | a4a557ca17a38c1e8642983c2d3af6b6325da0f8 | [
"Apache-2.0"
] | 2 | 2020-02-14T18:41:08.000Z | 2020-02-17T06:56:10.000Z | wes_service/util.py | Sage-Bionetworks/workflow-service | 8b5dc0afe9ea0972014cdf48a693ee6f893cfe5e | [
"Apache-2.0"
] | 9 | 2021-03-31T19:32:52.000Z | 2022-02-26T23:21:38.000Z | wes_service/util.py | Sage-Bionetworks/workflow-service | 8b5dc0afe9ea0972014cdf48a693ee6f893cfe5e | [
"Apache-2.0"
] | 2 | 2020-02-12T23:21:35.000Z | 2020-06-02T14:50:31.000Z | import tempfile
import json
import os
import logging
from six import itervalues, iterlists
import connexion
from werkzeug.utils import secure_filename
def visit(d, op):
"""Recursively call op(d) for all list subelements and dictionary 'values' that d may have."""
op(d)
if isinstance(d, list):
for i in d:
visit(i, op)
elif isinstance(d, dict):
for i in itervalues(d):
visit(i, op)
class WESBackend(object):
"""Stores and retrieves options. Intended to be inherited."""
def __init__(self, opts):
"""Parse and store options as a list of tuples."""
self.pairs = []
for o in opts if opts else []:
k, v = o.split("=", 1)
self.pairs.append((k, v))
def getopt(self, p, default=None):
"""Returns the first option value stored that matches p or default."""
for k, v in self.pairs:
if k == p:
return v
return default
def getoptlist(self, p):
"""Returns all option values stored that match p as a list."""
optlist = []
for k, v in self.pairs:
if k == p:
optlist.append(v)
return optlist
def log_for_run(self, run_id, message):
logging.info("Workflow %s: %s", run_id, message)
def collect_attachments(self, run_id=None):
tempdir = tempfile.mkdtemp()
body = {}
has_attachments = False
for k, ls in iterlists(connexion.request.files):
try:
for v in ls:
if k == "workflow_attachment":
sp = v.filename.split("/")
fn = []
for p in sp:
if p not in ("", ".", ".."):
fn.append(secure_filename(p))
dest = os.path.join(tempdir, *fn)
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
self.log_for_run(run_id, "Staging attachment '%s' to '%s'" % (v.filename, dest))
v.save(dest)
has_attachments = True
body[k] = "file://%s" % tempdir # Reference to temp working dir.
elif k in ("workflow_params", "tags", "workflow_engine_parameters"):
content = v.read()
body[k] = json.loads(content.decode("utf-8"))
else:
body[k] = v.read().decode()
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
for k, ls in iterlists(connexion.request.form):
try:
for v in ls:
if not v:
continue
if k in ("workflow_params", "tags", "workflow_engine_parameters"):
body[k] = json.loads(v)
else:
body[k] = v
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
if "workflow_url" in body:
if ":" not in body["workflow_url"]:
if not has_attachments:
raise ValueError("Relative 'workflow_url' but missing 'workflow_attachment'")
body["workflow_url"] = "file://%s" % os.path.join(tempdir, secure_filename(body["workflow_url"]))
self.log_for_run(run_id, "Using workflow_url '%s'" % body.get("workflow_url"))
else:
raise ValueError("Missing 'workflow_url' in submission")
if "workflow_params" not in body:
raise ValueError("Missing 'workflow_params' in submission")
return tempdir, body
| 38.019802 | 113 | 0.507552 | import tempfile
import json
import os
import logging
from six import itervalues, iterlists
import connexion
from werkzeug.utils import secure_filename
def visit(d, op):
op(d)
if isinstance(d, list):
for i in d:
visit(i, op)
elif isinstance(d, dict):
for i in itervalues(d):
visit(i, op)
class WESBackend(object):
def __init__(self, opts):
self.pairs = []
for o in opts if opts else []:
k, v = o.split("=", 1)
self.pairs.append((k, v))
def getopt(self, p, default=None):
for k, v in self.pairs:
if k == p:
return v
return default
def getoptlist(self, p):
optlist = []
for k, v in self.pairs:
if k == p:
optlist.append(v)
return optlist
def log_for_run(self, run_id, message):
logging.info("Workflow %s: %s", run_id, message)
def collect_attachments(self, run_id=None):
tempdir = tempfile.mkdtemp()
body = {}
has_attachments = False
for k, ls in iterlists(connexion.request.files):
try:
for v in ls:
if k == "workflow_attachment":
sp = v.filename.split("/")
fn = []
for p in sp:
if p not in ("", ".", ".."):
fn.append(secure_filename(p))
dest = os.path.join(tempdir, *fn)
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
self.log_for_run(run_id, "Staging attachment '%s' to '%s'" % (v.filename, dest))
v.save(dest)
has_attachments = True
body[k] = "file://%s" % tempdir
elif k in ("workflow_params", "tags", "workflow_engine_parameters"):
content = v.read()
body[k] = json.loads(content.decode("utf-8"))
else:
body[k] = v.read().decode()
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
for k, ls in iterlists(connexion.request.form):
try:
for v in ls:
if not v:
continue
if k in ("workflow_params", "tags", "workflow_engine_parameters"):
body[k] = json.loads(v)
else:
body[k] = v
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
if "workflow_url" in body:
if ":" not in body["workflow_url"]:
if not has_attachments:
raise ValueError("Relative 'workflow_url' but missing 'workflow_attachment'")
body["workflow_url"] = "file://%s" % os.path.join(tempdir, secure_filename(body["workflow_url"]))
self.log_for_run(run_id, "Using workflow_url '%s'" % body.get("workflow_url"))
else:
raise ValueError("Missing 'workflow_url' in submission")
if "workflow_params" not in body:
raise ValueError("Missing 'workflow_params' in submission")
return tempdir, body
| true | true |
f720e41f86ef851d3645b1502f4b7c42729748ba | 27,550 | py | Python | autosklearn/smbo.py | a1rb4Ck/auto-sklearn | cdf48b82632927ec56c8c14258c0bfc4c6b2e7d1 | [
"BSD-3-Clause"
] | null | null | null | autosklearn/smbo.py | a1rb4Ck/auto-sklearn | cdf48b82632927ec56c8c14258c0bfc4c6b2e7d1 | [
"BSD-3-Clause"
] | null | null | null | autosklearn/smbo.py | a1rb4Ck/auto-sklearn | cdf48b82632927ec56c8c14258c0bfc4c6b2e7d1 | [
"BSD-3-Clause"
] | null | null | null | import json
import os
import time
import traceback
import warnings
import numpy as np
import pynisher
from smac.facade.smac_facade import SMAC
from smac.optimizer.objective import average_cost
from smac.runhistory.runhistory import RunHistory
from smac.runhistory.runhistory2epm import RunHistory2EPM4Cost
from smac.scenario.scenario import Scenario
from smac.tae.execute_ta_run import StatusType
from smac.optimizer import pSMAC
import autosklearn.metalearning
from autosklearn.constants import MULTILABEL_CLASSIFICATION, \
BINARY_CLASSIFICATION, TASK_TYPES_TO_STRING, CLASSIFICATION_TASKS, \
REGRESSION_TASKS, MULTICLASS_CLASSIFICATION, REGRESSION
from autosklearn.metalearning.mismbo import suggest_via_metalearning
from autosklearn.data.abstract_data_manager import AbstractDataManager
from autosklearn.data.competition_data_manager import CompetitionDataManager
from autosklearn.evaluation import ExecuteTaFuncWithQueue, WORST_POSSIBLE_RESULT
from autosklearn.util import get_logger
from autosklearn.metalearning.metalearning.meta_base import MetaBase
from autosklearn.metalearning.metafeatures.metafeatures import \
calculate_all_metafeatures_with_labels, calculate_all_metafeatures_encoded_labels
EXCLUDE_META_FEATURES_CLASSIFICATION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'PCA'
}
EXCLUDE_META_FEATURES_REGRESSION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'NumberOfClasses',
'ClassOccurences',
'ClassProbabilityMin',
'ClassProbabilityMax',
'ClassProbabilityMean',
'ClassProbabilitySTD',
'ClassEntropy',
'LandmarkRandomNodeLearner',
'PCA',
}
# dataset helpers
def load_data(dataset_info, backend, max_mem=None):
try:
D = backend.load_datamanager()
except IOError:
D = None
# Datamanager probably doesn't exist
if D is None:
if max_mem is None:
D = CompetitionDataManager(dataset_info)
else:
D = CompetitionDataManager(dataset_info, max_memory_in_mb=max_mem)
return D
# metalearning helpers
def _calculate_metafeatures(data_feat_type, data_info_task, basename,
x_train, y_train, watcher, logger):
# == Calculate metafeatures
task_name = 'CalculateMetafeatures'
watcher.start_task(task_name)
categorical = [True if feat_type.lower() in ['categorical'] else False
for feat_type in data_feat_type]
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if data_info_task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
if data_info_task in [MULTICLASS_CLASSIFICATION, BINARY_CLASSIFICATION,
MULTILABEL_CLASSIFICATION, REGRESSION]:
logger.info('Start calculating metafeatures for %s', basename)
result = calculate_all_metafeatures_with_labels(
x_train, y_train, categorical=categorical,
dataset_name=basename,
dont_calculate=EXCLUDE_META_FEATURES, )
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
else:
result = None
logger.info('Metafeatures not calculated')
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (categorical attributes) took %5.2f',
watcher.wall_elapsed(task_name))
return result
def _calculate_metafeatures_encoded(basename, x_train, y_train, watcher,
task, logger):
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
task_name = 'CalculateMetafeaturesEncoded'
watcher.start_task(task_name)
result = calculate_all_metafeatures_encoded_labels(
x_train, y_train, categorical=[False] * x_train.shape[1],
dataset_name=basename, dont_calculate=EXCLUDE_META_FEATURES)
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (encoded attributes) took %5.2fsec',
watcher.wall_elapsed(task_name))
return result
def _get_metalearning_configurations(meta_base, basename, metric,
configuration_space,
task,
initial_configurations_via_metalearning,
is_sparse,
watcher, logger):
task_name = 'InitialConfigurations'
watcher.start_task(task_name)
try:
metalearning_configurations = suggest_via_metalearning(
meta_base, basename, metric,
task,
is_sparse == 1,
initial_configurations_via_metalearning
)
except Exception as e:
logger.error("Error getting metalearning configurations!")
logger.error(str(e))
logger.error(traceback.format_exc())
metalearning_configurations = []
watcher.stop_task(task_name)
return metalearning_configurations
def _print_debug_info_of_init_configuration(initial_configurations, basename,
time_for_task, logger, watcher):
logger.debug('Initial Configurations: (%d)' % len(initial_configurations))
for initial_configuration in initial_configurations:
logger.debug(initial_configuration)
logger.debug('Looking for initial configurations took %5.2fsec',
watcher.wall_elapsed('InitialConfigurations'))
logger.info(
'Time left for %s after finding initial configurations: %5.2fsec',
basename, time_for_task - watcher.wall_elapsed(basename))
def get_smac_object(
scenario_dict,
seed,
ta,
backend,
metalearning_configurations,
runhistory,
):
scenario_dict['input_psmac_dirs'] = backend.get_smac_output_glob(
smac_run_id=seed if not scenario_dict['shared-model'] else '*',
)
scenario = Scenario(scenario_dict)
if len(metalearning_configurations) > 0:
default_config = scenario.cs.get_default_configuration()
initial_configurations = [default_config] + metalearning_configurations
else:
initial_configurations = None
rh2EPM = RunHistory2EPM4Cost(
num_params=len(scenario.cs.get_hyperparameters()),
scenario=scenario,
success_states=[
StatusType.SUCCESS,
StatusType.MEMOUT,
StatusType.TIMEOUT,
# As long as we don't have a model for crashes yet!
StatusType.CRASHED,
],
impute_censored_data=False,
impute_state=None,
)
return SMAC(
scenario=scenario,
rng=seed,
runhistory2epm=rh2EPM,
tae_runner=ta,
initial_configurations=initial_configurations,
runhistory=runhistory,
run_id=seed,
)
class AutoMLSMBO(object):
def __init__(self, config_space, dataset_name,
backend,
total_walltime_limit,
func_eval_time_limit,
memory_limit,
metric,
watcher, start_num_run=1,
data_memory_limit=None,
num_metalearning_cfgs=25,
config_file=None,
seed=1,
metadata_directory=None,
resampling_strategy='holdout',
resampling_strategy_args=None,
shared_mode=False,
include_estimators=None,
exclude_estimators=None,
include_preprocessors=None,
exclude_preprocessors=None,
disable_file_output=False,
std_scores=False,
smac_scenario_args=None,
get_smac_object_callback=None):
super(AutoMLSMBO, self).__init__()
# data related
self.dataset_name = dataset_name
self.datamanager = None
self.metric = metric
self.task = None
self.backend = backend
# the configuration space
self.config_space = config_space
# Evaluation
self.resampling_strategy = resampling_strategy
if resampling_strategy_args is None:
resampling_strategy_args = {}
self.resampling_strategy_args = resampling_strategy_args
# and a bunch of useful limits
self.total_walltime_limit = int(total_walltime_limit)
self.func_eval_time_limit = int(func_eval_time_limit)
self.memory_limit = memory_limit
self.data_memory_limit = data_memory_limit
self.watcher = watcher
self.num_metalearning_cfgs = num_metalearning_cfgs
self.config_file = config_file
self.seed = seed
self.metadata_directory = metadata_directory
self.start_num_run = start_num_run
self.shared_mode = shared_mode
self.include_estimators = include_estimators
self.exclude_estimators = exclude_estimators
self.include_preprocessors = include_preprocessors
self.exclude_preprocessors = exclude_preprocessors
self.disable_file_output = disable_file_output
self.std_scores = std_scores
self.smac_scenario_args = smac_scenario_args
self.get_smac_object_callback = get_smac_object_callback
logger_name = '%s(%d):%s' % (self.__class__.__name__, self.seed,
":" + dataset_name if dataset_name is
not None else "")
self.logger = get_logger(logger_name)
def _send_warnings_to_log(self, message, category, filename, lineno,
file=None, line=None):
self.logger.debug('%s:%s: %s:%s', filename, lineno, category.__name__,
message)
def reset_data_manager(self, max_mem=None):
if max_mem is None:
max_mem = self.data_memory_limit
if self.datamanager is not None:
del self.datamanager
if isinstance(self.dataset_name, AbstractDataManager):
self.datamanager = self.dataset_name
else:
self.datamanager = load_data(self.dataset_name,
self.backend,
max_mem=max_mem)
self.task = self.datamanager.info['task']
def collect_metalearning_suggestions(self, meta_base):
metalearning_configurations = _get_metalearning_configurations(
meta_base=meta_base,
basename=self.dataset_name,
metric=self.metric,
configuration_space=self.config_space,
task=self.task,
is_sparse=self.datamanager.info['is_sparse'],
initial_configurations_via_metalearning=self.num_metalearning_cfgs,
watcher=self.watcher,
logger=self.logger)
_print_debug_info_of_init_configuration(
metalearning_configurations,
self.dataset_name,
self.total_walltime_limit,
self.logger,
self.watcher)
return metalearning_configurations
def _calculate_metafeatures(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features = _calculate_metafeatures(
data_feat_type=self.datamanager.feat_type,
data_info_task=self.datamanager.info['task'],
x_train=self.datamanager.data['X_train'],
y_train=self.datamanager.data['Y_train'],
basename=self.dataset_name,
watcher=self.watcher,
logger=self.logger)
return meta_features
def _calculate_metafeatures_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures: %s', str(e))
return res
def _calculate_metafeatures_encoded(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features_encoded = _calculate_metafeatures_encoded(
self.dataset_name,
self.datamanager.data['X_train'],
self.datamanager.data['Y_train'],
self.watcher,
self.datamanager.info['task'],
self.logger)
return meta_features_encoded
def _calculate_metafeatures_encoded_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures_encoded)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures (encoded) : %s',
str(e))
return res
def run_smbo(self):
self.watcher.start_task('SMBO')
# == first things first: load the datamanager
self.reset_data_manager()
# == Initialize non-SMBO stuff
# first create a scenario
seed = self.seed
self.config_space.seed(seed)
num_params = len(self.config_space.get_hyperparameters())
# allocate a run history
num_run = self.start_num_run
# Initialize some SMAC dependencies
metalearning_configurations = self.get_metalearning_suggestions()
if self.resampling_strategy in ['partial-cv',
'partial-cv-iterative-fit']:
num_folds = self.resampling_strategy_args['folds']
instances = [[json.dumps({'task_id': self.dataset_name,
'fold': fold_number})]
for fold_number in range(num_folds)]
else:
instances = [[json.dumps({'task_id': self.dataset_name})]]
# TODO rebuild target algorithm to be it's own target algorithm
# evaluator, which takes into account that a run can be killed prior
# to the model being fully fitted; thus putting intermediate results
# into a queue and querying them once the time is over
exclude = dict()
include = dict()
if self.include_preprocessors is not None and \
self.exclude_preprocessors is not None:
raise ValueError('Cannot specify include_preprocessors and '
'exclude_preprocessors.')
elif self.include_preprocessors is not None:
include['preprocessor'] = self.include_preprocessors
elif self.exclude_preprocessors is not None:
exclude['preprocessor'] = self.exclude_preprocessors
if self.include_estimators is not None and \
self.exclude_estimators is not None:
raise ValueError('Cannot specify include_estimators and '
'exclude_estimators.')
elif self.include_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
include['classifier'] = self.include_estimators
elif self.task in REGRESSION_TASKS:
include['regressor'] = self.include_estimators
else:
raise ValueError(self.task)
elif self.exclude_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
exclude['classifier'] = self.exclude_estimators
elif self.task in REGRESSION_TASKS:
exclude['regressor'] = self.exclude_estimators
else:
raise ValueError(self.task)
ta = ExecuteTaFuncWithQueue(backend=self.backend,
autosklearn_seed=seed,
resampling_strategy=self.resampling_strategy,
initial_num_run=num_run,
logger=self.logger,
include=include,
exclude=exclude,
metric=self.metric,
memory_limit=self.memory_limit,
disable_file_output=self.disable_file_output,
std_scores=self.std_scores,
**self.resampling_strategy_args)
startup_time = self.watcher.wall_elapsed(self.dataset_name)
total_walltime_limit = self.total_walltime_limit - startup_time - 5
scenario_dict = {
'abort_on_first_run_crash': False,
'cs': self.config_space,
'cutoff_time': self.func_eval_time_limit,
'deterministic': 'true',
'instances': instances,
'memory_limit': self.memory_limit,
'output-dir':
self.backend.get_smac_output_directory(),
'run_obj': 'quality',
'shared-model': self.shared_mode,
'wallclock_limit': total_walltime_limit,
'cost_for_crash': WORST_POSSIBLE_RESULT,
}
if self.smac_scenario_args is not None:
for arg in [
'abort_on_first_run_crash',
'cs',
'deterministic',
'instances',
'output-dir',
'run_obj',
'shared-model',
'cost_for_crash',
]:
if arg in self.smac_scenario_args:
self.logger.warning('Cannot override scenario argument %s, '
'will ignore this.', arg)
del self.smac_scenario_args[arg]
for arg in [
'cutoff_time',
'memory_limit',
'wallclock_limit',
]:
if arg in self.smac_scenario_args:
self.logger.warning(
'Overriding scenario argument %s: %s with value %s',
arg,
scenario_dict[arg],
self.smac_scenario_args[arg]
)
scenario_dict.update(self.smac_scenario_args)
runhistory = RunHistory(aggregate_func=average_cost)
smac_args = {
'scenario_dict': scenario_dict,
'seed': seed,
'ta': ta,
'backend': self.backend,
'metalearning_configurations': metalearning_configurations,
'runhistory': runhistory,
}
if self.get_smac_object_callback is not None:
smac = self.get_smac_object_callback(**smac_args)
else:
smac = get_smac_object(**smac_args)
smac.optimize()
# Patch SMAC to read in data from parallel runs after the last
# function evaluation
if self.shared_mode:
pSMAC.read(
run_history=smac.solver.runhistory,
output_dirs=smac.solver.scenario.input_psmac_dirs,
configuration_space=smac.solver.config_space,
logger=smac.solver.logger,
)
self.runhistory = smac.solver.runhistory
self.trajectory = smac.solver.intensifier.traj_logger.trajectory
return self.runhistory, self.trajectory
def get_metalearning_suggestions(self):
# == METALEARNING suggestions
# we start by evaluating the defaults on the full dataset again
# and add the suggestions from metalearning behind it
if self.num_metalearning_cfgs > 0:
# If metadata directory is None, use default
if self.metadata_directory is None:
metalearning_directory = os.path.dirname(
autosklearn.metalearning.__file__)
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
metalearning_directory, 'files',
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
self.metadata_directory = metadata_directory
# If metadata directory is specified by user,
# then verify that it exists.
else:
if not os.path.exists(self.metadata_directory):
raise ValueError('The specified metadata directory \'%s\' '
'does not exist!' % self.metadata_directory)
else:
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
self.metadata_directory,
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
# Check that the metadata directory has the correct
# subdirectory needed for this dataset.
if os.path.basename(metadata_directory) not in \
os.listdir(self.metadata_directory):
raise ValueError('The specified metadata directory '
'\'%s\' does not have the correct '
'subdirectory \'%s\'' %
(self.metadata_directory,
os.path.basename(metadata_directory))
)
self.metadata_directory = metadata_directory
if os.path.exists(self.metadata_directory):
self.logger.info('Metadata directory: %s',
self.metadata_directory)
meta_base = MetaBase(self.config_space, self.metadata_directory)
metafeature_calculation_time_limit = int(
self.total_walltime_limit / 4)
metafeature_calculation_start_time = time.time()
meta_features = self._calculate_metafeatures_with_limits(
metafeature_calculation_time_limit)
metafeature_calculation_end_time = time.time()
metafeature_calculation_time_limit = \
metafeature_calculation_time_limit - (
metafeature_calculation_end_time -
metafeature_calculation_start_time)
if metafeature_calculation_time_limit < 1:
self.logger.warning(
'Time limit for metafeature calculation less '
'than 1 seconds (%f). Skipping calculation '
'of metafeatures for encoded dataset.',
metafeature_calculation_time_limit)
meta_features_encoded = None
else:
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
self.datamanager.perform1HotEncoding()
meta_features_encoded = \
self._calculate_metafeatures_encoded_with_limits(
metafeature_calculation_time_limit)
# In case there is a problem calculating the encoded meta-features
if meta_features is None:
if meta_features_encoded is not None:
meta_features = meta_features_encoded
else:
if meta_features_encoded is not None:
meta_features.metafeature_values.update(
meta_features_encoded.metafeature_values)
if meta_features is not None:
meta_base.add_dataset(self.dataset_name, meta_features)
# Do mean imputation of the meta-features - should be done specific
# for each prediction model!
all_metafeatures = meta_base.get_metafeatures(
features=list(meta_features.keys()))
all_metafeatures.fillna(all_metafeatures.mean(),
inplace=True)
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
metalearning_configurations = self.collect_metalearning_suggestions(
meta_base)
if metalearning_configurations is None:
metalearning_configurations = []
self.reset_data_manager()
self.logger.info('%s', meta_features)
# Convert meta-features into a dictionary because the scenario
# expects a dictionary
meta_features_dict = {}
for dataset, series in all_metafeatures.iterrows():
meta_features_dict[dataset] = series.values
meta_features_list = []
for meta_feature_name in all_metafeatures.columns:
meta_features_list.append(
meta_features[meta_feature_name].value)
meta_features_list = np.array(meta_features_list).reshape(
(1, -1))
self.logger.info(list(meta_features_dict.keys()))
else:
meta_features = None
self.logger.warning('Could not find meta-data directory %s' %
metadata_directory)
else:
meta_features = None
if meta_features is None:
meta_features_list = []
metalearning_configurations = []
return metalearning_configurations
| 41.742424 | 92 | 0.59147 | import json
import os
import time
import traceback
import warnings
import numpy as np
import pynisher
from smac.facade.smac_facade import SMAC
from smac.optimizer.objective import average_cost
from smac.runhistory.runhistory import RunHistory
from smac.runhistory.runhistory2epm import RunHistory2EPM4Cost
from smac.scenario.scenario import Scenario
from smac.tae.execute_ta_run import StatusType
from smac.optimizer import pSMAC
import autosklearn.metalearning
from autosklearn.constants import MULTILABEL_CLASSIFICATION, \
BINARY_CLASSIFICATION, TASK_TYPES_TO_STRING, CLASSIFICATION_TASKS, \
REGRESSION_TASKS, MULTICLASS_CLASSIFICATION, REGRESSION
from autosklearn.metalearning.mismbo import suggest_via_metalearning
from autosklearn.data.abstract_data_manager import AbstractDataManager
from autosklearn.data.competition_data_manager import CompetitionDataManager
from autosklearn.evaluation import ExecuteTaFuncWithQueue, WORST_POSSIBLE_RESULT
from autosklearn.util import get_logger
from autosklearn.metalearning.metalearning.meta_base import MetaBase
from autosklearn.metalearning.metafeatures.metafeatures import \
calculate_all_metafeatures_with_labels, calculate_all_metafeatures_encoded_labels
EXCLUDE_META_FEATURES_CLASSIFICATION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'PCA'
}
EXCLUDE_META_FEATURES_REGRESSION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'NumberOfClasses',
'ClassOccurences',
'ClassProbabilityMin',
'ClassProbabilityMax',
'ClassProbabilityMean',
'ClassProbabilitySTD',
'ClassEntropy',
'LandmarkRandomNodeLearner',
'PCA',
}
def load_data(dataset_info, backend, max_mem=None):
try:
D = backend.load_datamanager()
except IOError:
D = None
if D is None:
if max_mem is None:
D = CompetitionDataManager(dataset_info)
else:
D = CompetitionDataManager(dataset_info, max_memory_in_mb=max_mem)
return D
# metalearning helpers
def _calculate_metafeatures(data_feat_type, data_info_task, basename,
x_train, y_train, watcher, logger):
# == Calculate metafeatures
task_name = 'CalculateMetafeatures'
watcher.start_task(task_name)
categorical = [True if feat_type.lower() in ['categorical'] else False
for feat_type in data_feat_type]
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if data_info_task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
if data_info_task in [MULTICLASS_CLASSIFICATION, BINARY_CLASSIFICATION,
MULTILABEL_CLASSIFICATION, REGRESSION]:
logger.info('Start calculating metafeatures for %s', basename)
result = calculate_all_metafeatures_with_labels(
x_train, y_train, categorical=categorical,
dataset_name=basename,
dont_calculate=EXCLUDE_META_FEATURES, )
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
else:
result = None
logger.info('Metafeatures not calculated')
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (categorical attributes) took %5.2f',
watcher.wall_elapsed(task_name))
return result
def _calculate_metafeatures_encoded(basename, x_train, y_train, watcher,
task, logger):
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
task_name = 'CalculateMetafeaturesEncoded'
watcher.start_task(task_name)
result = calculate_all_metafeatures_encoded_labels(
x_train, y_train, categorical=[False] * x_train.shape[1],
dataset_name=basename, dont_calculate=EXCLUDE_META_FEATURES)
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (encoded attributes) took %5.2fsec',
watcher.wall_elapsed(task_name))
return result
def _get_metalearning_configurations(meta_base, basename, metric,
configuration_space,
task,
initial_configurations_via_metalearning,
is_sparse,
watcher, logger):
task_name = 'InitialConfigurations'
watcher.start_task(task_name)
try:
metalearning_configurations = suggest_via_metalearning(
meta_base, basename, metric,
task,
is_sparse == 1,
initial_configurations_via_metalearning
)
except Exception as e:
logger.error("Error getting metalearning configurations!")
logger.error(str(e))
logger.error(traceback.format_exc())
metalearning_configurations = []
watcher.stop_task(task_name)
return metalearning_configurations
def _print_debug_info_of_init_configuration(initial_configurations, basename,
time_for_task, logger, watcher):
logger.debug('Initial Configurations: (%d)' % len(initial_configurations))
for initial_configuration in initial_configurations:
logger.debug(initial_configuration)
logger.debug('Looking for initial configurations took %5.2fsec',
watcher.wall_elapsed('InitialConfigurations'))
logger.info(
'Time left for %s after finding initial configurations: %5.2fsec',
basename, time_for_task - watcher.wall_elapsed(basename))
def get_smac_object(
scenario_dict,
seed,
ta,
backend,
metalearning_configurations,
runhistory,
):
scenario_dict['input_psmac_dirs'] = backend.get_smac_output_glob(
smac_run_id=seed if not scenario_dict['shared-model'] else '*',
)
scenario = Scenario(scenario_dict)
if len(metalearning_configurations) > 0:
default_config = scenario.cs.get_default_configuration()
initial_configurations = [default_config] + metalearning_configurations
else:
initial_configurations = None
rh2EPM = RunHistory2EPM4Cost(
num_params=len(scenario.cs.get_hyperparameters()),
scenario=scenario,
success_states=[
StatusType.SUCCESS,
StatusType.MEMOUT,
StatusType.TIMEOUT,
# As long as we don't have a model for crashes yet!
StatusType.CRASHED,
],
impute_censored_data=False,
impute_state=None,
)
return SMAC(
scenario=scenario,
rng=seed,
runhistory2epm=rh2EPM,
tae_runner=ta,
initial_configurations=initial_configurations,
runhistory=runhistory,
run_id=seed,
)
class AutoMLSMBO(object):
def __init__(self, config_space, dataset_name,
backend,
total_walltime_limit,
func_eval_time_limit,
memory_limit,
metric,
watcher, start_num_run=1,
data_memory_limit=None,
num_metalearning_cfgs=25,
config_file=None,
seed=1,
metadata_directory=None,
resampling_strategy='holdout',
resampling_strategy_args=None,
shared_mode=False,
include_estimators=None,
exclude_estimators=None,
include_preprocessors=None,
exclude_preprocessors=None,
disable_file_output=False,
std_scores=False,
smac_scenario_args=None,
get_smac_object_callback=None):
super(AutoMLSMBO, self).__init__()
self.dataset_name = dataset_name
self.datamanager = None
self.metric = metric
self.task = None
self.backend = backend
self.config_space = config_space
self.resampling_strategy = resampling_strategy
if resampling_strategy_args is None:
resampling_strategy_args = {}
self.resampling_strategy_args = resampling_strategy_args
self.total_walltime_limit = int(total_walltime_limit)
self.func_eval_time_limit = int(func_eval_time_limit)
self.memory_limit = memory_limit
self.data_memory_limit = data_memory_limit
self.watcher = watcher
self.num_metalearning_cfgs = num_metalearning_cfgs
self.config_file = config_file
self.seed = seed
self.metadata_directory = metadata_directory
self.start_num_run = start_num_run
self.shared_mode = shared_mode
self.include_estimators = include_estimators
self.exclude_estimators = exclude_estimators
self.include_preprocessors = include_preprocessors
self.exclude_preprocessors = exclude_preprocessors
self.disable_file_output = disable_file_output
self.std_scores = std_scores
self.smac_scenario_args = smac_scenario_args
self.get_smac_object_callback = get_smac_object_callback
logger_name = '%s(%d):%s' % (self.__class__.__name__, self.seed,
":" + dataset_name if dataset_name is
not None else "")
self.logger = get_logger(logger_name)
def _send_warnings_to_log(self, message, category, filename, lineno,
file=None, line=None):
self.logger.debug('%s:%s: %s:%s', filename, lineno, category.__name__,
message)
def reset_data_manager(self, max_mem=None):
if max_mem is None:
max_mem = self.data_memory_limit
if self.datamanager is not None:
del self.datamanager
if isinstance(self.dataset_name, AbstractDataManager):
self.datamanager = self.dataset_name
else:
self.datamanager = load_data(self.dataset_name,
self.backend,
max_mem=max_mem)
self.task = self.datamanager.info['task']
def collect_metalearning_suggestions(self, meta_base):
metalearning_configurations = _get_metalearning_configurations(
meta_base=meta_base,
basename=self.dataset_name,
metric=self.metric,
configuration_space=self.config_space,
task=self.task,
is_sparse=self.datamanager.info['is_sparse'],
initial_configurations_via_metalearning=self.num_metalearning_cfgs,
watcher=self.watcher,
logger=self.logger)
_print_debug_info_of_init_configuration(
metalearning_configurations,
self.dataset_name,
self.total_walltime_limit,
self.logger,
self.watcher)
return metalearning_configurations
def _calculate_metafeatures(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features = _calculate_metafeatures(
data_feat_type=self.datamanager.feat_type,
data_info_task=self.datamanager.info['task'],
x_train=self.datamanager.data['X_train'],
y_train=self.datamanager.data['Y_train'],
basename=self.dataset_name,
watcher=self.watcher,
logger=self.logger)
return meta_features
def _calculate_metafeatures_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures: %s', str(e))
return res
def _calculate_metafeatures_encoded(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features_encoded = _calculate_metafeatures_encoded(
self.dataset_name,
self.datamanager.data['X_train'],
self.datamanager.data['Y_train'],
self.watcher,
self.datamanager.info['task'],
self.logger)
return meta_features_encoded
def _calculate_metafeatures_encoded_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures_encoded)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures (encoded) : %s',
str(e))
return res
def run_smbo(self):
self.watcher.start_task('SMBO')
self.reset_data_manager()
seed = self.seed
self.config_space.seed(seed)
num_params = len(self.config_space.get_hyperparameters())
num_run = self.start_num_run
metalearning_configurations = self.get_metalearning_suggestions()
if self.resampling_strategy in ['partial-cv',
'partial-cv-iterative-fit']:
num_folds = self.resampling_strategy_args['folds']
instances = [[json.dumps({'task_id': self.dataset_name,
'fold': fold_number})]
for fold_number in range(num_folds)]
else:
instances = [[json.dumps({'task_id': self.dataset_name})]]
# evaluator, which takes into account that a run can be killed prior
# to the model being fully fitted; thus putting intermediate results
# into a queue and querying them once the time is over
exclude = dict()
include = dict()
if self.include_preprocessors is not None and \
self.exclude_preprocessors is not None:
raise ValueError('Cannot specify include_preprocessors and '
'exclude_preprocessors.')
elif self.include_preprocessors is not None:
include['preprocessor'] = self.include_preprocessors
elif self.exclude_preprocessors is not None:
exclude['preprocessor'] = self.exclude_preprocessors
if self.include_estimators is not None and \
self.exclude_estimators is not None:
raise ValueError('Cannot specify include_estimators and '
'exclude_estimators.')
elif self.include_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
include['classifier'] = self.include_estimators
elif self.task in REGRESSION_TASKS:
include['regressor'] = self.include_estimators
else:
raise ValueError(self.task)
elif self.exclude_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
exclude['classifier'] = self.exclude_estimators
elif self.task in REGRESSION_TASKS:
exclude['regressor'] = self.exclude_estimators
else:
raise ValueError(self.task)
ta = ExecuteTaFuncWithQueue(backend=self.backend,
autosklearn_seed=seed,
resampling_strategy=self.resampling_strategy,
initial_num_run=num_run,
logger=self.logger,
include=include,
exclude=exclude,
metric=self.metric,
memory_limit=self.memory_limit,
disable_file_output=self.disable_file_output,
std_scores=self.std_scores,
**self.resampling_strategy_args)
startup_time = self.watcher.wall_elapsed(self.dataset_name)
total_walltime_limit = self.total_walltime_limit - startup_time - 5
scenario_dict = {
'abort_on_first_run_crash': False,
'cs': self.config_space,
'cutoff_time': self.func_eval_time_limit,
'deterministic': 'true',
'instances': instances,
'memory_limit': self.memory_limit,
'output-dir':
self.backend.get_smac_output_directory(),
'run_obj': 'quality',
'shared-model': self.shared_mode,
'wallclock_limit': total_walltime_limit,
'cost_for_crash': WORST_POSSIBLE_RESULT,
}
if self.smac_scenario_args is not None:
for arg in [
'abort_on_first_run_crash',
'cs',
'deterministic',
'instances',
'output-dir',
'run_obj',
'shared-model',
'cost_for_crash',
]:
if arg in self.smac_scenario_args:
self.logger.warning('Cannot override scenario argument %s, '
'will ignore this.', arg)
del self.smac_scenario_args[arg]
for arg in [
'cutoff_time',
'memory_limit',
'wallclock_limit',
]:
if arg in self.smac_scenario_args:
self.logger.warning(
'Overriding scenario argument %s: %s with value %s',
arg,
scenario_dict[arg],
self.smac_scenario_args[arg]
)
scenario_dict.update(self.smac_scenario_args)
runhistory = RunHistory(aggregate_func=average_cost)
smac_args = {
'scenario_dict': scenario_dict,
'seed': seed,
'ta': ta,
'backend': self.backend,
'metalearning_configurations': metalearning_configurations,
'runhistory': runhistory,
}
if self.get_smac_object_callback is not None:
smac = self.get_smac_object_callback(**smac_args)
else:
smac = get_smac_object(**smac_args)
smac.optimize()
# Patch SMAC to read in data from parallel runs after the last
# function evaluation
if self.shared_mode:
pSMAC.read(
run_history=smac.solver.runhistory,
output_dirs=smac.solver.scenario.input_psmac_dirs,
configuration_space=smac.solver.config_space,
logger=smac.solver.logger,
)
self.runhistory = smac.solver.runhistory
self.trajectory = smac.solver.intensifier.traj_logger.trajectory
return self.runhistory, self.trajectory
def get_metalearning_suggestions(self):
# == METALEARNING suggestions
# we start by evaluating the defaults on the full dataset again
# and add the suggestions from metalearning behind it
if self.num_metalearning_cfgs > 0:
# If metadata directory is None, use default
if self.metadata_directory is None:
metalearning_directory = os.path.dirname(
autosklearn.metalearning.__file__)
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
metalearning_directory, 'files',
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
self.metadata_directory = metadata_directory
# If metadata directory is specified by user,
# then verify that it exists.
else:
if not os.path.exists(self.metadata_directory):
raise ValueError('The specified metadata directory \'%s\' '
'does not exist!' % self.metadata_directory)
else:
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
self.metadata_directory,
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
# Check that the metadata directory has the correct
# subdirectory needed for this dataset.
if os.path.basename(metadata_directory) not in \
os.listdir(self.metadata_directory):
raise ValueError('The specified metadata directory '
'\'%s\' does not have the correct '
'subdirectory \'%s\'' %
(self.metadata_directory,
os.path.basename(metadata_directory))
)
self.metadata_directory = metadata_directory
if os.path.exists(self.metadata_directory):
self.logger.info('Metadata directory: %s',
self.metadata_directory)
meta_base = MetaBase(self.config_space, self.metadata_directory)
metafeature_calculation_time_limit = int(
self.total_walltime_limit / 4)
metafeature_calculation_start_time = time.time()
meta_features = self._calculate_metafeatures_with_limits(
metafeature_calculation_time_limit)
metafeature_calculation_end_time = time.time()
metafeature_calculation_time_limit = \
metafeature_calculation_time_limit - (
metafeature_calculation_end_time -
metafeature_calculation_start_time)
if metafeature_calculation_time_limit < 1:
self.logger.warning(
'Time limit for metafeature calculation less '
'than 1 seconds (%f). Skipping calculation '
'of metafeatures for encoded dataset.',
metafeature_calculation_time_limit)
meta_features_encoded = None
else:
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
self.datamanager.perform1HotEncoding()
meta_features_encoded = \
self._calculate_metafeatures_encoded_with_limits(
metafeature_calculation_time_limit)
# In case there is a problem calculating the encoded meta-features
if meta_features is None:
if meta_features_encoded is not None:
meta_features = meta_features_encoded
else:
if meta_features_encoded is not None:
meta_features.metafeature_values.update(
meta_features_encoded.metafeature_values)
if meta_features is not None:
meta_base.add_dataset(self.dataset_name, meta_features)
# Do mean imputation of the meta-features - should be done specific
# for each prediction model!
all_metafeatures = meta_base.get_metafeatures(
features=list(meta_features.keys()))
all_metafeatures.fillna(all_metafeatures.mean(),
inplace=True)
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
metalearning_configurations = self.collect_metalearning_suggestions(
meta_base)
if metalearning_configurations is None:
metalearning_configurations = []
self.reset_data_manager()
self.logger.info('%s', meta_features)
# Convert meta-features into a dictionary because the scenario
# expects a dictionary
meta_features_dict = {}
for dataset, series in all_metafeatures.iterrows():
meta_features_dict[dataset] = series.values
meta_features_list = []
for meta_feature_name in all_metafeatures.columns:
meta_features_list.append(
meta_features[meta_feature_name].value)
meta_features_list = np.array(meta_features_list).reshape(
(1, -1))
self.logger.info(list(meta_features_dict.keys()))
else:
meta_features = None
self.logger.warning('Could not find meta-data directory %s' %
metadata_directory)
else:
meta_features = None
if meta_features is None:
meta_features_list = []
metalearning_configurations = []
return metalearning_configurations
| true | true |
f720e4b13eef675ed79b1d8f5021f8b090a3e097 | 3,223 | py | Python | harbor/datadog_checks/harbor/config_models/defaults.py | codylerum/integrations-core | aee18148cebf5026099abde7bc218d3ba8d2e75c | [
"BSD-3-Clause"
] | null | null | null | harbor/datadog_checks/harbor/config_models/defaults.py | codylerum/integrations-core | aee18148cebf5026099abde7bc218d3ba8d2e75c | [
"BSD-3-Clause"
] | null | null | null | harbor/datadog_checks/harbor/config_models/defaults.py | codylerum/integrations-core | aee18148cebf5026099abde7bc218d3ba8d2e75c | [
"BSD-3-Clause"
] | null | null | null | # (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from datadog_checks.base.utils.models.fields import get_default_field_value
def shared_proxy(field, value):
return get_default_field_value(field, value)
def shared_service(field, value):
return get_default_field_value(field, value)
def shared_skip_proxy(field, value):
return False
def shared_timeout(field, value):
return 10
def instance_allow_redirects(field, value):
return True
def instance_auth_token(field, value):
return get_default_field_value(field, value)
def instance_auth_type(field, value):
return 'basic'
def instance_aws_host(field, value):
return get_default_field_value(field, value)
def instance_aws_region(field, value):
return get_default_field_value(field, value)
def instance_aws_service(field, value):
return get_default_field_value(field, value)
def instance_connect_timeout(field, value):
return get_default_field_value(field, value)
def instance_disable_generic_tags(field, value):
return False
def instance_empty_default_hostname(field, value):
return False
def instance_extra_headers(field, value):
return get_default_field_value(field, value)
def instance_headers(field, value):
return get_default_field_value(field, value)
def instance_kerberos_auth(field, value):
return 'disabled'
def instance_kerberos_cache(field, value):
return get_default_field_value(field, value)
def instance_kerberos_delegate(field, value):
return False
def instance_kerberos_force_initiate(field, value):
return False
def instance_kerberos_hostname(field, value):
return get_default_field_value(field, value)
def instance_kerberos_keytab(field, value):
return get_default_field_value(field, value)
def instance_kerberos_principal(field, value):
return get_default_field_value(field, value)
def instance_log_requests(field, value):
return False
def instance_min_collection_interval(field, value):
return 15
def instance_ntlm_domain(field, value):
return get_default_field_value(field, value)
def instance_persist_connections(field, value):
return False
def instance_proxy(field, value):
return get_default_field_value(field, value)
def instance_read_timeout(field, value):
return get_default_field_value(field, value)
def instance_service(field, value):
return get_default_field_value(field, value)
def instance_skip_proxy(field, value):
return False
def instance_tags(field, value):
return get_default_field_value(field, value)
def instance_timeout(field, value):
return 10
def instance_tls_ca_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_ignore_warning(field, value):
return False
def instance_tls_private_key(field, value):
return get_default_field_value(field, value)
def instance_tls_use_host_header(field, value):
return False
def instance_tls_verify(field, value):
return True
def instance_use_legacy_auth_encoding(field, value):
return True
| 20.018634 | 75 | 0.779398 |
from datadog_checks.base.utils.models.fields import get_default_field_value
def shared_proxy(field, value):
return get_default_field_value(field, value)
def shared_service(field, value):
return get_default_field_value(field, value)
def shared_skip_proxy(field, value):
return False
def shared_timeout(field, value):
return 10
def instance_allow_redirects(field, value):
return True
def instance_auth_token(field, value):
return get_default_field_value(field, value)
def instance_auth_type(field, value):
return 'basic'
def instance_aws_host(field, value):
return get_default_field_value(field, value)
def instance_aws_region(field, value):
return get_default_field_value(field, value)
def instance_aws_service(field, value):
return get_default_field_value(field, value)
def instance_connect_timeout(field, value):
return get_default_field_value(field, value)
def instance_disable_generic_tags(field, value):
return False
def instance_empty_default_hostname(field, value):
return False
def instance_extra_headers(field, value):
return get_default_field_value(field, value)
def instance_headers(field, value):
return get_default_field_value(field, value)
def instance_kerberos_auth(field, value):
return 'disabled'
def instance_kerberos_cache(field, value):
return get_default_field_value(field, value)
def instance_kerberos_delegate(field, value):
return False
def instance_kerberos_force_initiate(field, value):
return False
def instance_kerberos_hostname(field, value):
return get_default_field_value(field, value)
def instance_kerberos_keytab(field, value):
return get_default_field_value(field, value)
def instance_kerberos_principal(field, value):
return get_default_field_value(field, value)
def instance_log_requests(field, value):
return False
def instance_min_collection_interval(field, value):
return 15
def instance_ntlm_domain(field, value):
return get_default_field_value(field, value)
def instance_persist_connections(field, value):
return False
def instance_proxy(field, value):
return get_default_field_value(field, value)
def instance_read_timeout(field, value):
return get_default_field_value(field, value)
def instance_service(field, value):
return get_default_field_value(field, value)
def instance_skip_proxy(field, value):
return False
def instance_tags(field, value):
return get_default_field_value(field, value)
def instance_timeout(field, value):
return 10
def instance_tls_ca_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_ignore_warning(field, value):
return False
def instance_tls_private_key(field, value):
return get_default_field_value(field, value)
def instance_tls_use_host_header(field, value):
return False
def instance_tls_verify(field, value):
return True
def instance_use_legacy_auth_encoding(field, value):
return True
| true | true |
f720e4c382207e660d60cd12f08779e19473e3fd | 6,078 | py | Python | extras/mako/lookup.py | konker/pysmsd | ecf7583ca27e0f5e762154ae4e0a5b5601d53fba | [
"MIT"
] | 1 | 2017-09-02T06:48:02.000Z | 2017-09-02T06:48:02.000Z | extras/mako/lookup.py | sizzlelab/pysmsd | b670018fb421229591784faacdc19ec95d49f907 | [
"MIT"
] | null | null | null | extras/mako/lookup.py | sizzlelab/pysmsd | b670018fb421229591784faacdc19ec95d49f907 | [
"MIT"
] | null | null | null | # lookup.py
# Copyright (C) 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import os, stat, posixpath, re
from mako import exceptions, util
from mako.template import Template
try:
import threading
except:
import dummy_threading as threading
class TemplateCollection(object):
def has_template(self, uri):
try:
self.get_template(uri)
return True
except exceptions.TemplateLookupException, e:
return False
def get_template(self, uri, relativeto=None):
raise NotImplementedError()
def filename_to_uri(self, uri, filename):
"""convert the given filename to a uri relative to this TemplateCollection."""
return uri
def adjust_uri(self, uri, filename):
"""adjust the given uri based on the calling filename.
when this method is called from the runtime, the 'filename' parameter
is taken directly to the 'filename' attribute of the calling
template. Therefore a custom TemplateCollection subclass can place any string
identifier desired in the "filename" parameter of the Template objects it constructs
and have them come back here."""
return uri
class TemplateLookup(TemplateCollection):
def __init__(self, directories=None, module_directory=None, filesystem_checks=True, collection_size=-1, format_exceptions=False,
error_handler=None, disable_unicode=False, output_encoding=None, encoding_errors='strict', cache_type=None, cache_dir=None, cache_url=None,
modulename_callable=None, default_filters=None, buffer_filters=[], imports=None, input_encoding=None, preprocessor=None):
if isinstance(directories, basestring):
directories = [directories]
self.directories = [posixpath.normpath(d) for d in directories or []]
self.module_directory = module_directory
self.modulename_callable = modulename_callable
self.filesystem_checks = filesystem_checks
self.collection_size = collection_size
self.template_args = {'format_exceptions':format_exceptions, 'error_handler':error_handler, 'disable_unicode':disable_unicode, 'output_encoding':output_encoding, 'encoding_errors':encoding_errors, 'input_encoding':input_encoding, 'module_directory':module_directory, 'cache_type':cache_type, 'cache_dir':cache_dir or module_directory, 'cache_url':cache_url, 'default_filters':default_filters, 'buffer_filters':buffer_filters, 'imports':imports, 'preprocessor':preprocessor}
if collection_size == -1:
self.__collection = {}
self._uri_cache = {}
else:
self.__collection = util.LRUCache(collection_size)
self._uri_cache = util.LRUCache(collection_size)
self._mutex = threading.Lock()
def get_template(self, uri):
try:
if self.filesystem_checks:
return self.__check(uri, self.__collection[uri])
else:
return self.__collection[uri]
except KeyError:
u = re.sub(r'^\/+', '', uri)
for dir in self.directories:
srcfile = posixpath.normpath(posixpath.join(dir, u))
if os.path.exists(srcfile):
return self.__load(srcfile, uri)
else:
raise exceptions.TopLevelLookupException("Cant locate template for uri '%s'" % uri)
def adjust_uri(self, uri, relativeto):
"""adjust the given uri based on the calling filename."""
if uri[0] != '/':
if relativeto is not None:
return posixpath.join(posixpath.dirname(relativeto), uri)
else:
return '/' + uri
else:
return uri
def filename_to_uri(self, filename):
try:
return self._uri_cache[filename]
except KeyError:
value = self.__relativeize(filename)
self._uri_cache[filename] = value
return value
def __relativeize(self, filename):
"""return the portion of a filename that is 'relative' to the directories in this lookup."""
filename = posixpath.normpath(filename)
for dir in self.directories:
if filename[0:len(dir)] == dir:
return filename[len(dir):]
else:
return None
def __load(self, filename, uri):
self._mutex.acquire()
try:
try:
# try returning from collection one more time in case concurrent thread already loaded
return self.__collection[uri]
except KeyError:
pass
try:
self.__collection[uri] = Template(uri=uri, filename=posixpath.normpath(filename), lookup=self, module_filename=(self.modulename_callable is not None and self.modulename_callable(filename, uri) or None), **self.template_args)
return self.__collection[uri]
except:
self.__collection.pop(uri, None)
raise
finally:
self._mutex.release()
def __check(self, uri, template):
if template.filename is None:
return template
if not os.path.exists(template.filename):
self.__collection.pop(uri, None)
raise exceptions.TemplateLookupException("Cant locate template for uri '%s'" % uri)
elif template.module._modified_time < os.stat(template.filename)[stat.ST_MTIME]:
self.__collection.pop(uri, None)
return self.__load(template.filename, uri)
else:
return template
def put_string(self, uri, text):
self.__collection[uri] = Template(text, lookup=self, uri=uri, **self.template_args)
def put_template(self, uri, template):
self.__collection[uri] = template
| 44.364964 | 482 | 0.636723 |
import os, stat, posixpath, re
from mako import exceptions, util
from mako.template import Template
try:
import threading
except:
import dummy_threading as threading
class TemplateCollection(object):
def has_template(self, uri):
try:
self.get_template(uri)
return True
except exceptions.TemplateLookupException, e:
return False
def get_template(self, uri, relativeto=None):
raise NotImplementedError()
def filename_to_uri(self, uri, filename):
"""convert the given filename to a uri relative to this TemplateCollection."""
return uri
def adjust_uri(self, uri, filename):
"""adjust the given uri based on the calling filename.
when this method is called from the runtime, the 'filename' parameter
is taken directly to the 'filename' attribute of the calling
template. Therefore a custom TemplateCollection subclass can place any string
identifier desired in the "filename" parameter of the Template objects it constructs
and have them come back here."""
return uri
class TemplateLookup(TemplateCollection):
def __init__(self, directories=None, module_directory=None, filesystem_checks=True, collection_size=-1, format_exceptions=False,
error_handler=None, disable_unicode=False, output_encoding=None, encoding_errors='strict', cache_type=None, cache_dir=None, cache_url=None,
modulename_callable=None, default_filters=None, buffer_filters=[], imports=None, input_encoding=None, preprocessor=None):
if isinstance(directories, basestring):
directories = [directories]
self.directories = [posixpath.normpath(d) for d in directories or []]
self.module_directory = module_directory
self.modulename_callable = modulename_callable
self.filesystem_checks = filesystem_checks
self.collection_size = collection_size
self.template_args = {'format_exceptions':format_exceptions, 'error_handler':error_handler, 'disable_unicode':disable_unicode, 'output_encoding':output_encoding, 'encoding_errors':encoding_errors, 'input_encoding':input_encoding, 'module_directory':module_directory, 'cache_type':cache_type, 'cache_dir':cache_dir or module_directory, 'cache_url':cache_url, 'default_filters':default_filters, 'buffer_filters':buffer_filters, 'imports':imports, 'preprocessor':preprocessor}
if collection_size == -1:
self.__collection = {}
self._uri_cache = {}
else:
self.__collection = util.LRUCache(collection_size)
self._uri_cache = util.LRUCache(collection_size)
self._mutex = threading.Lock()
def get_template(self, uri):
try:
if self.filesystem_checks:
return self.__check(uri, self.__collection[uri])
else:
return self.__collection[uri]
except KeyError:
u = re.sub(r'^\/+', '', uri)
for dir in self.directories:
srcfile = posixpath.normpath(posixpath.join(dir, u))
if os.path.exists(srcfile):
return self.__load(srcfile, uri)
else:
raise exceptions.TopLevelLookupException("Cant locate template for uri '%s'" % uri)
def adjust_uri(self, uri, relativeto):
"""adjust the given uri based on the calling filename."""
if uri[0] != '/':
if relativeto is not None:
return posixpath.join(posixpath.dirname(relativeto), uri)
else:
return '/' + uri
else:
return uri
def filename_to_uri(self, filename):
try:
return self._uri_cache[filename]
except KeyError:
value = self.__relativeize(filename)
self._uri_cache[filename] = value
return value
def __relativeize(self, filename):
"""return the portion of a filename that is 'relative' to the directories in this lookup."""
filename = posixpath.normpath(filename)
for dir in self.directories:
if filename[0:len(dir)] == dir:
return filename[len(dir):]
else:
return None
def __load(self, filename, uri):
self._mutex.acquire()
try:
try:
return self.__collection[uri]
except KeyError:
pass
try:
self.__collection[uri] = Template(uri=uri, filename=posixpath.normpath(filename), lookup=self, module_filename=(self.modulename_callable is not None and self.modulename_callable(filename, uri) or None), **self.template_args)
return self.__collection[uri]
except:
self.__collection.pop(uri, None)
raise
finally:
self._mutex.release()
def __check(self, uri, template):
if template.filename is None:
return template
if not os.path.exists(template.filename):
self.__collection.pop(uri, None)
raise exceptions.TemplateLookupException("Cant locate template for uri '%s'" % uri)
elif template.module._modified_time < os.stat(template.filename)[stat.ST_MTIME]:
self.__collection.pop(uri, None)
return self.__load(template.filename, uri)
else:
return template
def put_string(self, uri, text):
self.__collection[uri] = Template(text, lookup=self, uri=uri, **self.template_args)
def put_template(self, uri, template):
self.__collection[uri] = template
| false | true |
f720e54b8a4add55c8bb4945dbfdd8f7cd946e00 | 790 | py | Python | st2common/st2common/exceptions/ssh.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | null | null | null | st2common/st2common/exceptions/ssh.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 15 | 2021-02-11T22:58:54.000Z | 2021-08-06T18:03:47.000Z | st2common/st2common/exceptions/ssh.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 1 | 2021-07-10T15:02:29.000Z | 2021-07-10T15:02:29.000Z | # Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'InvalidCredentialsException'
]
class InvalidCredentialsException(Exception):
pass
class NoHostsConnectedToException(Exception):
pass
| 29.259259 | 74 | 0.764557 |
__all__ = [
'InvalidCredentialsException'
]
class InvalidCredentialsException(Exception):
pass
class NoHostsConnectedToException(Exception):
pass
| true | true |
f720e5c38c523665abca1c94ba91d51a3d76168c | 18,992 | py | Python | flytekit/common/launch_plan.py | tnsetting/flytekit | 4782264ffbc4bfdbaabe7a789a9ad76cb7e5499e | [
"Apache-2.0"
] | null | null | null | flytekit/common/launch_plan.py | tnsetting/flytekit | 4782264ffbc4bfdbaabe7a789a9ad76cb7e5499e | [
"Apache-2.0"
] | null | null | null | flytekit/common/launch_plan.py | tnsetting/flytekit | 4782264ffbc4bfdbaabe7a789a9ad76cb7e5499e | [
"Apache-2.0"
] | null | null | null | import datetime as _datetime
import logging as _logging
import uuid as _uuid
import six as _six
from deprecated import deprecated as _deprecated
from flytekit.common import interface as _interface
from flytekit.common import nodes as _nodes
from flytekit.common import promise as _promises
from flytekit.common import sdk_bases as _sdk_bases
from flytekit.common import workflow_execution as _workflow_execution
from flytekit.common.core import identifier as _identifier
from flytekit.common.exceptions import scopes as _exception_scopes
from flytekit.common.exceptions import user as _user_exceptions
from flytekit.common.mixins import hash as _hash_mixin
from flytekit.common.mixins import launchable as _launchable_mixin
from flytekit.common.mixins import registerable as _registerable
from flytekit.common.types import helpers as _type_helpers
from flytekit.configuration import auth as _auth_config
from flytekit.configuration import sdk as _sdk_config
from flytekit.engines.flyte import engine as _flyte_engine
from flytekit.models import common as _common_models
from flytekit.models import execution as _execution_models
from flytekit.models import interface as _interface_models
from flytekit.models import launch_plan as _launch_plan_models
from flytekit.models import literals as _literal_models
from flytekit.models import schedule as _schedule_model
from flytekit.models.core import identifier as _identifier_model
from flytekit.models.core import workflow as _workflow_models
class SdkLaunchPlan(
_launchable_mixin.LaunchableEntity,
_registerable.HasDependencies,
_registerable.RegisterableEntity,
_launch_plan_models.LaunchPlanSpec,
metaclass=_sdk_bases.ExtendedSdkType,
):
def __init__(self, *args, **kwargs):
super(SdkLaunchPlan, self).__init__(*args, **kwargs)
# Set all the attributes we expect this class to have
self._id = None
# The interface is not set explicitly unless fetched in an engine context
self._interface = None
@classmethod
def promote_from_model(cls, model) -> "SdkLaunchPlan":
"""
:param flytekit.models.launch_plan.LaunchPlanSpec model:
:rtype: SdkLaunchPlan
"""
return cls(
workflow_id=_identifier.Identifier.promote_from_model(model.workflow_id),
default_inputs=_interface_models.ParameterMap(
{
k: _promises.Input.promote_from_model(v).rename_and_return_reference(k)
for k, v in _six.iteritems(model.default_inputs.parameters)
}
),
fixed_inputs=model.fixed_inputs,
entity_metadata=model.entity_metadata,
labels=model.labels,
annotations=model.annotations,
auth_role=model.auth_role,
raw_output_data_config=model.raw_output_data_config,
)
@_exception_scopes.system_entry_point
def register(self, project, domain, name, version):
"""
:param Text project:
:param Text domain:
:param Text name:
:param Text version:
"""
self.validate()
id_to_register = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
client = _flyte_engine.get_client()
try:
client.create_launch_plan(id_to_register, self)
except _user_exceptions.FlyteEntityAlreadyExistsException:
pass
self._id = id_to_register
return str(self.id)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
"""
This function uses the engine loader to call create a hydrated task from Admin.
:param Text project:
:param Text domain:
:param Text name:
:param Text version: [Optional] If not set, the SDK will fetch the active launch plan for the given project,
domain, and name.
:rtype: SdkLaunchPlan
"""
from flytekit.common import workflow as _workflow
launch_plan_id = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
if launch_plan_id.version:
lp = _flyte_engine.get_client().get_launch_plan(launch_plan_id)
else:
named_entity_id = _common_models.NamedEntityIdentifier(
launch_plan_id.project, launch_plan_id.domain, launch_plan_id.name
)
lp = _flyte_engine.get_client().get_active_launch_plan(named_entity_id)
sdk_lp = cls.promote_from_model(lp.spec)
sdk_lp._id = lp.id
# TODO: Add a test for this, and this function as a whole
wf_id = sdk_lp.workflow_id
lp_wf = _workflow.SdkWorkflow.fetch(wf_id.project, wf_id.domain, wf_id.name, wf_id.version)
sdk_lp._interface = lp_wf.interface
sdk_lp._has_registered = True
return sdk_lp
@_exception_scopes.system_entry_point
def serialize(self):
"""
Unlike the SdkWorkflow serialize call, nothing special needs to be done here.
:rtype: flyteidl.admin.launch_plan_pb2.LaunchPlanSpec
"""
return self.to_flyte_idl()
@property
def id(self):
"""
:rtype: flytekit.common.core.identifier.Identifier
"""
return self._id
@property
def is_scheduled(self):
"""
:rtype: bool
"""
if self.entity_metadata.schedule.cron_expression:
return True
elif self.entity_metadata.schedule.rate and self.entity_metadata.schedule.rate.value:
return True
else:
return False
@property
def auth_role(self):
"""
:rtype: flytekit.models.common.AuthRole
"""
fixed_auth = super(SdkLaunchPlan, self).auth_role
if fixed_auth is not None and (
fixed_auth.assumable_iam_role is not None or fixed_auth.kubernetes_service_account is not None
):
return fixed_auth
assumable_iam_role = _auth_config.ASSUMABLE_IAM_ROLE.get()
kubernetes_service_account = _auth_config.KUBERNETES_SERVICE_ACCOUNT.get()
if not (assumable_iam_role or kubernetes_service_account):
_logging.warning(
"Using deprecated `role` from config. Please update your config to use `assumable_iam_role` instead"
)
assumable_iam_role = _sdk_config.ROLE.get()
return _common_models.AuthRole(
assumable_iam_role=assumable_iam_role, kubernetes_service_account=kubernetes_service_account,
)
@property
def workflow_id(self):
"""
:rtype: flytekit.common.core.identifier.Identifier
"""
return self._workflow_id
@property
def interface(self):
"""
The interface is not technically part of the admin.LaunchPlanSpec in the IDL, however the workflow ID is, and
from the workflow ID, fetch will fill in the interface. This is nice because then you can __call__ the=
object and get a node.
:rtype: flytekit.common.interface.TypedInterface
"""
return self._interface
@property
def resource_type(self):
"""
Integer from _identifier.ResourceType enum
:rtype: int
"""
return _identifier_model.ResourceType.LAUNCH_PLAN
@property
def entity_type_text(self):
"""
:rtype: Text
"""
return "Launch Plan"
@property
def raw_output_data_config(self):
"""
:rtype: flytekit.models.common.RawOutputDataConfig
"""
raw_output_data_config = super(SdkLaunchPlan, self).raw_output_data_config
if raw_output_data_config is not None and raw_output_data_config.output_location_prefix != "":
return raw_output_data_config
# If it was not set explicitly then let's use the value found in the configuration.
return _common_models.RawOutputDataConfig(_auth_config.RAW_OUTPUT_DATA_PREFIX.get())
@_exception_scopes.system_entry_point
def validate(self):
# TODO: Validate workflow is satisfied
pass
@_exception_scopes.system_entry_point
def update(self, state):
"""
:param int state: Enum value from flytekit.models.launch_plan.LaunchPlanState
"""
if not self.id:
raise _user_exceptions.FlyteAssertion(
"Failed to update launch plan because the launch plan's ID is not set. Please call register to fetch "
"or register the identifier first"
)
return _flyte_engine.get_client().update_launch_plan(self.id, state)
def _python_std_input_map_to_literal_map(self, inputs):
"""
:param dict[Text,Any] inputs: A dictionary of Python standard inputs that will be type-checked and compiled
to a LiteralMap
:rtype: flytekit.models.literals.LiteralMap
"""
return _type_helpers.pack_python_std_map_to_literal_map(
inputs,
{k: user_input.sdk_type for k, user_input in _six.iteritems(self.default_inputs.parameters) if k in inputs},
)
@_deprecated(reason="Use launch_with_literals instead", version="0.9.0")
def execute_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
"""
Deprecated.
"""
return self.launch_with_literals(
project, domain, literal_inputs, name, notification_overrides, label_overrides, annotation_overrides,
)
@_exception_scopes.system_entry_point
def launch_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
"""
Executes the launch plan and returns the execution identifier. This version of execution is meant for when
you already have a LiteralMap of inputs.
:param Text project:
:param Text domain:
:param flytekit.models.literals.LiteralMap literal_inputs: Inputs to the execution.
:param Text name: [Optional] If specified, an execution will be created with this name. Note: the name must
be unique within the context of the project and domain.
:param list[flytekit.common.notifications.Notification] notification_overrides: [Optional] If specified, these
are the notifications that will be honored for this execution. An empty list signals to disable all
notifications.
:param flytekit.models.common.Labels label_overrides:
:param flytekit.models.common.Annotations annotation_overrides:
:rtype: flytekit.common.workflow_execution.SdkWorkflowExecution
"""
# Kubernetes requires names starting with an alphabet for some resources.
name = name or "f" + _uuid.uuid4().hex[:19]
disable_all = notification_overrides == []
if disable_all:
notification_overrides = None
else:
notification_overrides = _execution_models.NotificationList(notification_overrides or [])
disable_all = None
client = _flyte_engine.get_client()
try:
exec_id = client.create_execution(
project,
domain,
name,
_execution_models.ExecutionSpec(
self.id,
_execution_models.ExecutionMetadata(
_execution_models.ExecutionMetadata.ExecutionMode.MANUAL,
"sdk", # TODO: get principle
0, # TODO: Detect nesting
),
notifications=notification_overrides,
disable_all=disable_all,
labels=label_overrides,
annotations=annotation_overrides,
),
literal_inputs,
)
except _user_exceptions.FlyteEntityAlreadyExistsException:
exec_id = _identifier.WorkflowExecutionIdentifier(project, domain, name)
execution = client.get_execution(exec_id)
return _workflow_execution.SdkWorkflowExecution.promote_from_model(execution)
@_exception_scopes.system_entry_point
def __call__(self, *args, **input_map):
"""
:param list[T] args: Do not specify. Kwargs only are supported for this function.
:param dict[Text,T] input_map: Map of inputs. Can be statically defined or OutputReference links.
:rtype: flytekit.common.nodes.SdkNode
"""
if len(args) > 0:
raise _user_exceptions.FlyteAssertion(
"When adding a launchplan as a node in a workflow, all inputs must be specified with kwargs only. We "
"detected {} positional args.".format(len(args))
)
# Take the default values from the launch plan
default_inputs = {k: v.sdk_default for k, v in _six.iteritems(self.default_inputs.parameters) if not v.required}
default_inputs.update(input_map)
bindings, upstream_nodes = self.interface.create_bindings_for_inputs(default_inputs)
return _nodes.SdkNode(
id=None,
metadata=_workflow_models.NodeMetadata("", _datetime.timedelta(), _literal_models.RetryStrategy(0)),
bindings=sorted(bindings, key=lambda b: b.var),
upstream_nodes=upstream_nodes,
sdk_launch_plan=self,
)
def __repr__(self):
"""
:rtype: Text
"""
return "SdkLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
# The difference between this and the SdkLaunchPlan class is that this runnable class is supposed to only be used for
# launch plans loaded alongside the current Python interpreter.
class SdkRunnableLaunchPlan(_hash_mixin.HashOnReferenceMixin, SdkLaunchPlan):
def __init__(
self,
sdk_workflow,
default_inputs=None,
fixed_inputs=None,
role=None,
schedule=None,
notifications=None,
labels=None,
annotations=None,
auth_role=None,
raw_output_data_config=None,
):
"""
:param flytekit.common.local_workflow.SdkRunnableWorkflow sdk_workflow:
:param dict[Text,flytekit.common.promise.Input] default_inputs:
:param dict[Text,Any] fixed_inputs: These inputs will be fixed and not need to be set when executing this
launch plan.
:param Text role: Deprecated. IAM role to execute this launch plan with.
:param flytekit.models.schedule.Schedule: Schedule to apply to this workflow.
:param list[flytekit.models.common.Notification]: List of notifications to apply to this launch plan.
:param flytekit.models.common.Labels labels: Any custom kubernetes labels to apply to workflows executed by this
launch plan.
:param flytekit.models.common.Annotations annotations: Any custom kubernetes annotations to apply to workflows
executed by this launch plan.
Any custom kubernetes annotations to apply to workflows executed by this launch plan.
:param flytekit.models.common.Authrole auth_role: The auth method with which to execute the workflow.
:param flytekit.models.common.RawOutputDataConfig raw_output_data_config: Config for offloading data
"""
if role and auth_role:
raise ValueError("Cannot set both role and auth. Role is deprecated, use auth instead.")
fixed_inputs = fixed_inputs or {}
default_inputs = default_inputs or {}
if role:
auth_role = _common_models.AuthRole(assumable_iam_role=role)
# The constructor for SdkLaunchPlan sets the id to None anyways so we don't bother passing in an ID. The ID
# should be set in one of three places,
# 1) When the object is registered (in the code above)
# 2) By the dynamic task code after this runnable object has already been __call__'ed. The SdkNode produced
# maintains a link to this object and will set the ID according to the configuration variables present.
# 3) When SdkLaunchPlan.fetch() is run
super(SdkRunnableLaunchPlan, self).__init__(
None,
_launch_plan_models.LaunchPlanMetadata(
schedule=schedule or _schedule_model.Schedule(""), notifications=notifications or [],
),
_interface_models.ParameterMap(default_inputs),
_type_helpers.pack_python_std_map_to_literal_map(
fixed_inputs,
{
k: _type_helpers.get_sdk_type_from_literal_type(var.type)
for k, var in _six.iteritems(sdk_workflow.interface.inputs)
if k in fixed_inputs
},
),
labels or _common_models.Labels({}),
annotations or _common_models.Annotations({}),
auth_role,
raw_output_data_config or _common_models.RawOutputDataConfig(""),
)
self._interface = _interface.TypedInterface(
{k: v.var for k, v in _six.iteritems(default_inputs)}, sdk_workflow.interface.outputs,
)
self._upstream_entities = {sdk_workflow}
self._sdk_workflow = sdk_workflow
@classmethod
def from_flyte_idl(cls, _):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
def promote_from_model(cls, model):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
"""
This function uses the engine loader to call create a hydrated task from Admin.
:param Text project:
:param Text domain:
:param Text name:
:param Text version:
:rtype: SdkRunnableLaunchPlan
"""
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@property
def workflow_id(self):
"""
:rtype: flytekit.common.core.identifier.Identifier
"""
return self._sdk_workflow.id
def __repr__(self):
"""
:rtype: Text
"""
return "SdkRunnableLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
| 40.15222 | 120 | 0.662647 | import datetime as _datetime
import logging as _logging
import uuid as _uuid
import six as _six
from deprecated import deprecated as _deprecated
from flytekit.common import interface as _interface
from flytekit.common import nodes as _nodes
from flytekit.common import promise as _promises
from flytekit.common import sdk_bases as _sdk_bases
from flytekit.common import workflow_execution as _workflow_execution
from flytekit.common.core import identifier as _identifier
from flytekit.common.exceptions import scopes as _exception_scopes
from flytekit.common.exceptions import user as _user_exceptions
from flytekit.common.mixins import hash as _hash_mixin
from flytekit.common.mixins import launchable as _launchable_mixin
from flytekit.common.mixins import registerable as _registerable
from flytekit.common.types import helpers as _type_helpers
from flytekit.configuration import auth as _auth_config
from flytekit.configuration import sdk as _sdk_config
from flytekit.engines.flyte import engine as _flyte_engine
from flytekit.models import common as _common_models
from flytekit.models import execution as _execution_models
from flytekit.models import interface as _interface_models
from flytekit.models import launch_plan as _launch_plan_models
from flytekit.models import literals as _literal_models
from flytekit.models import schedule as _schedule_model
from flytekit.models.core import identifier as _identifier_model
from flytekit.models.core import workflow as _workflow_models
class SdkLaunchPlan(
_launchable_mixin.LaunchableEntity,
_registerable.HasDependencies,
_registerable.RegisterableEntity,
_launch_plan_models.LaunchPlanSpec,
metaclass=_sdk_bases.ExtendedSdkType,
):
def __init__(self, *args, **kwargs):
super(SdkLaunchPlan, self).__init__(*args, **kwargs)
self._id = None
self._interface = None
@classmethod
def promote_from_model(cls, model) -> "SdkLaunchPlan":
return cls(
workflow_id=_identifier.Identifier.promote_from_model(model.workflow_id),
default_inputs=_interface_models.ParameterMap(
{
k: _promises.Input.promote_from_model(v).rename_and_return_reference(k)
for k, v in _six.iteritems(model.default_inputs.parameters)
}
),
fixed_inputs=model.fixed_inputs,
entity_metadata=model.entity_metadata,
labels=model.labels,
annotations=model.annotations,
auth_role=model.auth_role,
raw_output_data_config=model.raw_output_data_config,
)
@_exception_scopes.system_entry_point
def register(self, project, domain, name, version):
self.validate()
id_to_register = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
client = _flyte_engine.get_client()
try:
client.create_launch_plan(id_to_register, self)
except _user_exceptions.FlyteEntityAlreadyExistsException:
pass
self._id = id_to_register
return str(self.id)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
from flytekit.common import workflow as _workflow
launch_plan_id = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
if launch_plan_id.version:
lp = _flyte_engine.get_client().get_launch_plan(launch_plan_id)
else:
named_entity_id = _common_models.NamedEntityIdentifier(
launch_plan_id.project, launch_plan_id.domain, launch_plan_id.name
)
lp = _flyte_engine.get_client().get_active_launch_plan(named_entity_id)
sdk_lp = cls.promote_from_model(lp.spec)
sdk_lp._id = lp.id
wf_id = sdk_lp.workflow_id
lp_wf = _workflow.SdkWorkflow.fetch(wf_id.project, wf_id.domain, wf_id.name, wf_id.version)
sdk_lp._interface = lp_wf.interface
sdk_lp._has_registered = True
return sdk_lp
@_exception_scopes.system_entry_point
def serialize(self):
return self.to_flyte_idl()
@property
def id(self):
return self._id
@property
def is_scheduled(self):
if self.entity_metadata.schedule.cron_expression:
return True
elif self.entity_metadata.schedule.rate and self.entity_metadata.schedule.rate.value:
return True
else:
return False
@property
def auth_role(self):
fixed_auth = super(SdkLaunchPlan, self).auth_role
if fixed_auth is not None and (
fixed_auth.assumable_iam_role is not None or fixed_auth.kubernetes_service_account is not None
):
return fixed_auth
assumable_iam_role = _auth_config.ASSUMABLE_IAM_ROLE.get()
kubernetes_service_account = _auth_config.KUBERNETES_SERVICE_ACCOUNT.get()
if not (assumable_iam_role or kubernetes_service_account):
_logging.warning(
"Using deprecated `role` from config. Please update your config to use `assumable_iam_role` instead"
)
assumable_iam_role = _sdk_config.ROLE.get()
return _common_models.AuthRole(
assumable_iam_role=assumable_iam_role, kubernetes_service_account=kubernetes_service_account,
)
@property
def workflow_id(self):
return self._workflow_id
@property
def interface(self):
return self._interface
@property
def resource_type(self):
return _identifier_model.ResourceType.LAUNCH_PLAN
@property
def entity_type_text(self):
return "Launch Plan"
@property
def raw_output_data_config(self):
raw_output_data_config = super(SdkLaunchPlan, self).raw_output_data_config
if raw_output_data_config is not None and raw_output_data_config.output_location_prefix != "":
return raw_output_data_config
return _common_models.RawOutputDataConfig(_auth_config.RAW_OUTPUT_DATA_PREFIX.get())
@_exception_scopes.system_entry_point
def validate(self):
# TODO: Validate workflow is satisfied
pass
@_exception_scopes.system_entry_point
def update(self, state):
if not self.id:
raise _user_exceptions.FlyteAssertion(
"Failed to update launch plan because the launch plan's ID is not set. Please call register to fetch "
"or register the identifier first"
)
return _flyte_engine.get_client().update_launch_plan(self.id, state)
def _python_std_input_map_to_literal_map(self, inputs):
return _type_helpers.pack_python_std_map_to_literal_map(
inputs,
{k: user_input.sdk_type for k, user_input in _six.iteritems(self.default_inputs.parameters) if k in inputs},
)
@_deprecated(reason="Use launch_with_literals instead", version="0.9.0")
def execute_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
return self.launch_with_literals(
project, domain, literal_inputs, name, notification_overrides, label_overrides, annotation_overrides,
)
@_exception_scopes.system_entry_point
def launch_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
name = name or "f" + _uuid.uuid4().hex[:19]
disable_all = notification_overrides == []
if disable_all:
notification_overrides = None
else:
notification_overrides = _execution_models.NotificationList(notification_overrides or [])
disable_all = None
client = _flyte_engine.get_client()
try:
exec_id = client.create_execution(
project,
domain,
name,
_execution_models.ExecutionSpec(
self.id,
_execution_models.ExecutionMetadata(
_execution_models.ExecutionMetadata.ExecutionMode.MANUAL,
"sdk",
0,
),
notifications=notification_overrides,
disable_all=disable_all,
labels=label_overrides,
annotations=annotation_overrides,
),
literal_inputs,
)
except _user_exceptions.FlyteEntityAlreadyExistsException:
exec_id = _identifier.WorkflowExecutionIdentifier(project, domain, name)
execution = client.get_execution(exec_id)
return _workflow_execution.SdkWorkflowExecution.promote_from_model(execution)
@_exception_scopes.system_entry_point
def __call__(self, *args, **input_map):
if len(args) > 0:
raise _user_exceptions.FlyteAssertion(
"When adding a launchplan as a node in a workflow, all inputs must be specified with kwargs only. We "
"detected {} positional args.".format(len(args))
)
default_inputs = {k: v.sdk_default for k, v in _six.iteritems(self.default_inputs.parameters) if not v.required}
default_inputs.update(input_map)
bindings, upstream_nodes = self.interface.create_bindings_for_inputs(default_inputs)
return _nodes.SdkNode(
id=None,
metadata=_workflow_models.NodeMetadata("", _datetime.timedelta(), _literal_models.RetryStrategy(0)),
bindings=sorted(bindings, key=lambda b: b.var),
upstream_nodes=upstream_nodes,
sdk_launch_plan=self,
)
def __repr__(self):
return "SdkLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
class SdkRunnableLaunchPlan(_hash_mixin.HashOnReferenceMixin, SdkLaunchPlan):
def __init__(
self,
sdk_workflow,
default_inputs=None,
fixed_inputs=None,
role=None,
schedule=None,
notifications=None,
labels=None,
annotations=None,
auth_role=None,
raw_output_data_config=None,
):
if role and auth_role:
raise ValueError("Cannot set both role and auth. Role is deprecated, use auth instead.")
fixed_inputs = fixed_inputs or {}
default_inputs = default_inputs or {}
if role:
auth_role = _common_models.AuthRole(assumable_iam_role=role)
# should be set in one of three places,
# 1) When the object is registered (in the code above)
# 2) By the dynamic task code after this runnable object has already been __call__'ed. The SdkNode produced
super(SdkRunnableLaunchPlan, self).__init__(
None,
_launch_plan_models.LaunchPlanMetadata(
schedule=schedule or _schedule_model.Schedule(""), notifications=notifications or [],
),
_interface_models.ParameterMap(default_inputs),
_type_helpers.pack_python_std_map_to_literal_map(
fixed_inputs,
{
k: _type_helpers.get_sdk_type_from_literal_type(var.type)
for k, var in _six.iteritems(sdk_workflow.interface.inputs)
if k in fixed_inputs
},
),
labels or _common_models.Labels({}),
annotations or _common_models.Annotations({}),
auth_role,
raw_output_data_config or _common_models.RawOutputDataConfig(""),
)
self._interface = _interface.TypedInterface(
{k: v.var for k, v in _six.iteritems(default_inputs)}, sdk_workflow.interface.outputs,
)
self._upstream_entities = {sdk_workflow}
self._sdk_workflow = sdk_workflow
@classmethod
def from_flyte_idl(cls, _):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
def promote_from_model(cls, model):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@property
def workflow_id(self):
return self._sdk_workflow.id
def __repr__(self):
return "SdkRunnableLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
| true | true |
f720e5c62f21e8d5ff58e6fa829b2e05a1daba2e | 3,614 | py | Python | model_v2/synthetic_data.py | suchir/passenger_screening_algorithm_challenge | 65e3e3ce1889e9a100f6b9b6a53fe5c785a84612 | [
"MIT"
] | 7 | 2018-02-05T01:57:30.000Z | 2019-06-25T08:00:40.000Z | model_v2/synthetic_data.py | suchir/passenger_screening_algorithm_challenge | 65e3e3ce1889e9a100f6b9b6a53fe5c785a84612 | [
"MIT"
] | 1 | 2018-05-07T15:28:29.000Z | 2018-05-07T15:28:29.000Z | model_v2/synthetic_data.py | suchir/passenger_screening_algorithm_challenge | 65e3e3ce1889e9a100f6b9b6a53fe5c785a84612 | [
"MIT"
] | 3 | 2018-05-16T03:50:44.000Z | 2018-08-20T12:40:58.000Z | from common.caching import read_input_dir, cached, read_log_dir
from common.dataio import get_aps_data_hdf5, get_passenger_clusters, get_data
from . import dataio
from collections import defaultdict
import numpy as np
import skimage.transform
import skimage.io
import skimage.color
import glob
import os
import tqdm
import h5py
import pickle
import imageio
import math
import time
import subprocess
import json
@cached(version=0)
def generate_random_models(n_models):
with read_input_dir('makehuman/passengers'):
ranges = defaultdict(lambda: [float('inf'), float('-inf')])
for file in glob.glob('*.mhm'):
with open(file, 'r') as f:
modifiers = f.readlines()[4:-5]
for modifier in modifiers:
_, m, x = modifier.split(' ')
x = float(x)
r = ranges[m]
r[0], r[1] = min(r[0], x), max(r[1], x)
np.random.seed(0)
for i in range(n_models):
lines = ['version v1.1.1']
for modifier in ranges:
val = np.random.uniform(*ranges[modifier])
lines.append('modifier %s %s' % (modifier, val))
lines.append('skeleton game_engine.mhskel')
with open('%s.mhm' % i, 'w') as f:
f.write('\n'.join(lines))
BODY_ZONE_COLORS = np.array([
[255, 255, 255],
[255, 115, 35],
[55, 64, 197],
[32, 168, 67],
[116, 116, 116],
[255, 193, 17],
[255, 164, 194],
[172, 226, 28],
[193, 183, 227],
[142, 212, 231],
[255, 240, 3],
[234, 25, 33],
[176, 110, 77],
[232, 219, 164],
[101, 135, 182],
[255, 3, 255],
[125, 0, 21],
[153, 64, 154]
])
def _convert_colors_to_label(image):
highlight = lambda color: np.sum(np.abs(image-color), axis=-1)
dist = np.stack([highlight(color) for color in BODY_ZONE_COLORS], axis=-1)
return np.argmin(dist, axis=-1)
@cached(generate_random_models, subdir='ssd', version=0)
def render_synthetic_zone_data(mode):
assert mode in ('all', 'sample_large', 'sample')
if not os.path.exists('done'):
with read_input_dir('makehuman/generated'):
mesh_paths = sorted(['%s/%s' % (os.getcwd(), x) for x in glob.glob('*.mhx2')])
if mode == 'sample_large':
mesh_paths = mesh_paths[:100]
elif mode == 'sample':
mesh_paths = mesh_paths[:10]
with read_input_dir('hand_labeling/blender'):
texture_path = os.getcwd() + '/zones.png'
with read_input_dir('scripts/blender'):
script_path = os.getcwd() + '/render_synthetic_data.py'
angles = 16
with open('config.json', 'w') as f:
json.dump({
'num_angles': angles,
'texture_path': texture_path,
'mesh_paths': mesh_paths
}, f)
subprocess.check_call(['blender', '--python', script_path, '--background'])
f = h5py.File('data.hdf5', 'w')
dset = f.create_dataset('dset', (len(mesh_paths), angles, 330, 256, 2))
for i, file in enumerate(tqdm.tqdm(glob.glob('*_depth.png'))):
zones_file = file.replace('depth', 'zones')
angle = int(file.split('_')[-2])
dset[i//angles, angle, ..., 0] = skimage.color.rgb2gray(skimage.io.imread(file))
zones = skimage.io.imread(zones_file)
labels = _convert_colors_to_label(zones[..., :3])
dset[i//angles, angle, ..., 1] = labels
open('done', 'w').close()
else:
f = h5py.File('data.hdf5', 'r')
dset = f['dset']
return dset | 31.426087 | 92 | 0.571942 | from common.caching import read_input_dir, cached, read_log_dir
from common.dataio import get_aps_data_hdf5, get_passenger_clusters, get_data
from . import dataio
from collections import defaultdict
import numpy as np
import skimage.transform
import skimage.io
import skimage.color
import glob
import os
import tqdm
import h5py
import pickle
import imageio
import math
import time
import subprocess
import json
@cached(version=0)
def generate_random_models(n_models):
with read_input_dir('makehuman/passengers'):
ranges = defaultdict(lambda: [float('inf'), float('-inf')])
for file in glob.glob('*.mhm'):
with open(file, 'r') as f:
modifiers = f.readlines()[4:-5]
for modifier in modifiers:
_, m, x = modifier.split(' ')
x = float(x)
r = ranges[m]
r[0], r[1] = min(r[0], x), max(r[1], x)
np.random.seed(0)
for i in range(n_models):
lines = ['version v1.1.1']
for modifier in ranges:
val = np.random.uniform(*ranges[modifier])
lines.append('modifier %s %s' % (modifier, val))
lines.append('skeleton game_engine.mhskel')
with open('%s.mhm' % i, 'w') as f:
f.write('\n'.join(lines))
BODY_ZONE_COLORS = np.array([
[255, 255, 255],
[255, 115, 35],
[55, 64, 197],
[32, 168, 67],
[116, 116, 116],
[255, 193, 17],
[255, 164, 194],
[172, 226, 28],
[193, 183, 227],
[142, 212, 231],
[255, 240, 3],
[234, 25, 33],
[176, 110, 77],
[232, 219, 164],
[101, 135, 182],
[255, 3, 255],
[125, 0, 21],
[153, 64, 154]
])
def _convert_colors_to_label(image):
highlight = lambda color: np.sum(np.abs(image-color), axis=-1)
dist = np.stack([highlight(color) for color in BODY_ZONE_COLORS], axis=-1)
return np.argmin(dist, axis=-1)
@cached(generate_random_models, subdir='ssd', version=0)
def render_synthetic_zone_data(mode):
assert mode in ('all', 'sample_large', 'sample')
if not os.path.exists('done'):
with read_input_dir('makehuman/generated'):
mesh_paths = sorted(['%s/%s' % (os.getcwd(), x) for x in glob.glob('*.mhx2')])
if mode == 'sample_large':
mesh_paths = mesh_paths[:100]
elif mode == 'sample':
mesh_paths = mesh_paths[:10]
with read_input_dir('hand_labeling/blender'):
texture_path = os.getcwd() + '/zones.png'
with read_input_dir('scripts/blender'):
script_path = os.getcwd() + '/render_synthetic_data.py'
angles = 16
with open('config.json', 'w') as f:
json.dump({
'num_angles': angles,
'texture_path': texture_path,
'mesh_paths': mesh_paths
}, f)
subprocess.check_call(['blender', '--python', script_path, '--background'])
f = h5py.File('data.hdf5', 'w')
dset = f.create_dataset('dset', (len(mesh_paths), angles, 330, 256, 2))
for i, file in enumerate(tqdm.tqdm(glob.glob('*_depth.png'))):
zones_file = file.replace('depth', 'zones')
angle = int(file.split('_')[-2])
dset[i//angles, angle, ..., 0] = skimage.color.rgb2gray(skimage.io.imread(file))
zones = skimage.io.imread(zones_file)
labels = _convert_colors_to_label(zones[..., :3])
dset[i//angles, angle, ..., 1] = labels
open('done', 'w').close()
else:
f = h5py.File('data.hdf5', 'r')
dset = f['dset']
return dset | true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.