hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71b9fe025bede004ce7e9ec58b828318f04188f | 15,536 | py | Python | src/api/block.py | BluCodeGH/Amulet-Map-Editor | 7d1d2243fc29095b3cffe8aa4979235444ba6738 | [
"MIT"
] | 1 | 2020-08-26T22:42:16.000Z | 2020-08-26T22:42:16.000Z | src/api/block.py | BluCodeGH/Amulet-Map-Editor | 7d1d2243fc29095b3cffe8aa4979235444ba6738 | [
"MIT"
] | null | null | null | src/api/block.py | BluCodeGH/Amulet-Map-Editor | 7d1d2243fc29095b3cffe8aa4979235444ba6738 | [
"MIT"
] | null | null | null | from __future__ import annotations
import copy
from sys import getsizeof
import re
from typing import Dict, Iterable, List, Tuple, Union, overload
from api.errors import InvalidBlockException
from utils import Int
class Block:
"""
Class to handle data about various blockstates and allow for extra blocks to be created and interacted with.
.. important::
Creating version specific block objects via the `Block()` constructor instead of using
:meth:`api.world.World.get_block_instance` is supported but not encouraged. To avoid possible caveats of doing this,
make sure to either only instantiate blocks with Amulet blockstate data or use
:meth:`api.world.World.get_block_instance` instead
Here's a few examples on how create a Block object with extra blocks:
Creating a new Block object with the base of ``stone`` and has an extra block of ``water[level=1]``:
>>> stone = Block(blockstate="minecraft:stone")
>>> water_level_1 = Block(blockstate="minecraft:water[level=1]")
>>> stone_with_extra_block = stone + water_level_1
>>> repr(stone_with_extra_block)
'Block(minecraft:stone, minecraft:water[level=1])'
Creating a new Block object using the namespace and base_name:
>>> granite = Block(namespace="minecraft", base_name="granite")
Creating a new Block object with another layer of extra blocks:
>>> stone_water_granite = stone_with_extra_block + granite # Doesn't modify any of the other objects
>>> repr(stone_water_granite)
'Block(minecraft:stone, minecraft:water[level=1], minecraft:granite)'
Creating a new Block object by removing an extra block from all layers:
*Note: This removes all instances of the Block object from extra blocks*
>>> stone_granite = stone_water_granite - water_level_1 # Doesn't modify any of the other objects either
>>> repr(stone_granite)
'Block(minecraft:stone, minecraft:granite)'
Creating a new Block object by removing a specific layer:
>>> oak_log_axis_x = Block(blockstate="minecraft:oak_log[axis=x]")
>>> stone_water_granite_water_oak_log = stone_water_granite + water_level_1 + oak_log_axis_x
>>> repr(stone_water_granite_water_oak_log)
'Block(minecraft:stone, minecraft:water[level=1], minecraft:granite, minecraft:water[level=1], minecraft:oak_log[axis=x])'
>>> stone_granite_water_oak_log = stone_water_granite_water_oak_log.remove_layer(0)
>>> repr(stone_granite_water_oak_log)
'Block(minecraft:stone, minecraft:granite, minecraft:water[level=1], minecraft:oak_log[axis=x])'
"""
__slots__ = (
"_namespace",
"_base_name",
"_properties",
"_extra_blocks",
"_blockstate",
) # Reduces memory footprint
blockstate_regex = re.compile(
r"(?:(?P<namespace>[a-z0-9_.-]+):)?(?P<base_name>[a-z0-9/._-]+)(?:\[(?P<property_name>[a-z0-9_]+)=(?P<property_value>[a-z0-9_]+)(?P<properties>.*)\])?"
)
parameters_regex = re.compile(r"(?:,(?P<name>[a-z0-9_]+)=(?P<value>[a-z0-9_]+))")
def __init__(
self,
blockstate: str = None,
namespace: str = None,
base_name: str = None,
properties: Dict[str, Union[str, bool, int]] = None,
extra_blocks: Union[Block, Iterable[Block]] = None,
):
self._blockstate = blockstate
self._namespace = namespace
self._base_name = base_name
if namespace is not None and base_name is not None and properties is None:
properties = {}
self._properties = properties
self._extra_blocks = ()
if extra_blocks:
if isinstance(extra_blocks, Block):
extra_blocks = [extra_blocks]
self._extra_blocks = tuple(extra_blocks)
if blockstate:
self._gen_blockstate()
@property
def namespace(self) -> str:
"""
The namespace of the blockstate represented by the Block object (IE: `minecraft`)
:return: The namespace of the blockstate
"""
if self._namespace is None:
self._parse_blockstate_string()
return self._namespace
@property
def base_name(self) -> str:
"""
The base name of the blockstate represented by the Block object (IE: `stone`, `dirt`)
:return: The base name of the blockstate
"""
if self._base_name is None:
self._parse_blockstate_string()
return self._base_name
@property
def properties(self) -> Dict[str, Union[str, bool, int]]:
"""
The mapping of properties of the blockstate represented by the Block object (IE: `{"level": "1"}`)
:return: A dictionary of the properties of the blockstate
"""
if self._properties is None:
self._parse_blockstate_string()
return copy.deepcopy(self._properties)
@property
def blockstate(self) -> str:
"""
The full blockstate string of the blockstate represented by the Block object (IE: `minecraft:stone`, `minecraft:oak_log[axis=x]`)
:return: The blockstate string
"""
if self._blockstate is None:
self._gen_blockstate()
return self._blockstate
@property
def extra_blocks(self) -> Union[Tuple, Tuple[Block]]:
"""
Returns a tuple of the extra blocks contained in the Block instance
:return: A tuple of Block objects
"""
return self._extra_blocks
def _gen_blockstate(self):
self._blockstate = f"{self.namespace}:{self.base_name}"
if self.properties:
props = [f"{key}={value}" for key, value in sorted(self.properties.items())]
self._blockstate = f"{self._blockstate}[{','.join(props)}]"
@staticmethod
def parse_blockstate_string(blockstate: str) -> Tuple[str, str, Dict[str, str]]:
match = Block.blockstate_regex.match(blockstate)
namespace = match.group("namespace") or "minecraft"
base_name = match.group("base_name")
if match.group("property_name") is not None:
properties = {match.group("property_name"): match.group("property_value")}
else:
properties = {}
properties_string = match.group("properties")
if properties_string is not None:
properties_match = Block.parameters_regex.finditer(properties_string)
for match in properties_match:
properties[match.group("name")] = match.group("value")
return namespace, base_name, {k: v for k, v in sorted(properties.items())}
def _parse_blockstate_string(self):
self._namespace, self._base_name, self._properties = self.parse_blockstate_string(
self._blockstate
)
def __str__(self) -> str:
"""
:return: The base blockstate string of the Block object
"""
return self.blockstate
def __repr__(self) -> str:
"""
:return: The base blockstate string of the Block object along with the blockstate strings of included extra blocks
"""
return f"Block({', '.join([str(b) for b in (self, *self.extra_blocks)])})"
def __len__(self):
return len(self._extra_blocks) + 1
def _compare_extra_blocks(self, other: Block) -> bool:
if len(self.extra_blocks) != len(other.extra_blocks):
return False
if len(self.extra_blocks) == 0:
return True
for our_extra_block, their_extra_block in zip(
self.extra_blocks, other.extra_blocks
):
if our_extra_block != their_extra_block:
return False
return True
def __eq__(self, other: Block) -> bool:
"""
Checks the equality of this Block object to another Block object
:param other: The Block object to check against
:return: True if the Blocks objects are equal, False otherwise
"""
if self.__class__ != other.__class__:
return False
return self.blockstate == other.blockstate and self._compare_extra_blocks(other)
def __hash__(self) -> int:
"""
Hashes the Block object
:return: A hash of the Block object
"""
current_hash = hash(self.blockstate)
if self.extra_blocks:
current_hash = current_hash + hash(self.extra_blocks)
return current_hash
def __add__(self, other: Block) -> Block:
"""
Allows for other Block objects to be added to this Block object's ``extra_blocks``
:param other: The Block object to add to the end of this Block object's `extra_blocks`
:return: A new Block object with the same data but with an additional Block at the end of ``extra_blocks``
"""
if not isinstance(other, Block):
return NotImplemented
if (
len(other.extra_blocks) == 0
): # Reduces the amount of extra objects/references created
other_cpy = other
else:
other_cpy = Block(
namespace=other.namespace,
base_name=other.base_name,
properties=other.properties,
)
other_extras = []
for eb in other.extra_blocks:
if (
len(eb.extra_blocks) == 0
): # Reduces the amount of extra objects/references created
other_extras.append(eb)
else:
other_extras.append(
Block(
namespace=eb.namespace,
base_name=eb.base_name,
properties=eb.properties,
)
)
return Block(
namespace=self.namespace,
base_name=self.base_name,
properties=self.properties,
extra_blocks=[*self.extra_blocks, other_cpy, *other_extras],
)
def __sub__(self, other: Block) -> Block:
"""
Allows for other Block objects to be subtracted from this Block object's ``extra_blocks``
:param other: The Block object to subtract from this Block objects' ``extra_blocks``
:return: A new Block object without any instances of the subtracted block in ``extra_blocks``
"""
if not isinstance(other, Block):
return NotImplemented
if (
len(other.extra_blocks) == 0
): # Reduces the amount of extra objects/references created
other_cpy = other
else:
other_cpy = Block(
namespace=other.namespace,
base_name=other.base_name,
properties=other.properties,
)
other_extras = []
for eb in other.extra_blocks:
if len(eb.extra_blocks) == 0:
other_extras.append(eb)
else:
other_extras.append(
Block(
namespace=eb.namespace,
base_name=eb.base_name,
properties=eb.properties,
)
)
# Sets are unordered, so a regular set subtraction doesn't always return the order we want (it sometimes will!)
# So we loop through all of our extra blocks and only append those to the new_extras list if they aren't in
# extra_blocks_to_remove
new_extras = []
extra_blocks_to_remove = (other_cpy, *other_extras)
for eb in self.extra_blocks:
if eb not in extra_blocks_to_remove:
new_extras.append(eb)
return Block(
namespace=self.namespace,
base_name=self.base_name,
properties=self.properties,
extra_blocks=new_extras,
)
def remove_layer(self, layer: int) -> Block:
"""
Removes the Block object from the specified layer and returns the resulting new Block object
:param layer: The layer of extra block to remove
:return: A new instance of Block with the same data but with the extra block at specified layer removed
:raises `InvalidBlockException`: Raised when you remove the base block from a Block with no other extra blocks
"""
if (
layer == 0
and len(self.extra_blocks) > 0
and layer <= len(self.extra_blocks)
):
new_base = self._extra_blocks[0]
return Block(
namespace=new_base.namespace,
base_name=new_base.base_name,
properties=new_base.properties,
extra_blocks=[*self._extra_blocks[1:]],
)
elif layer > len(self.extra_blocks):
raise InvalidBlockException("You cannot remove a non-existant layer")
elif layer == 0:
raise InvalidBlockException(
"Removing the base block with no extra blocks is not supported"
)
return Block(
namespace=self.namespace,
base_name=self.base_name,
properties=self.properties,
extra_blocks=[*self.extra_blocks[: layer - 1], *self.extra_blocks[layer:]],
)
def __sizeof__(self):
size = (
getsizeof(self.namespace)
+ getsizeof(self.base_name)
+ getsizeof(self.properties)
+ getsizeof(self.blockstate)
)
for eb in self.extra_blocks:
size += getsizeof(eb)
return size
class BlockManager:
"""
Class to handle the mappings between Block objects and their index-based internal IDs
"""
def __init__(self):
"""
Creates a new BlockManager object
"""
self._index_to_block: List[Block] = []
self._block_to_index_map: Dict[Block, int] = {}
def __len__(self):
return len(self._index_to_block)
def __contains__(self, item: Block) -> bool:
return item in self._block_to_index_map
@overload
def __getitem__(self, item: Block) -> int:
...
@overload
def __getitem__(self, item: Int) -> Block:
...
def __getitem__(self, item):
"""
If a Block object is passed to this function, it'll return the internal ID/index of the
blockstate. If an int is given, this method will return the Block object at that specified index.
:param item: The Block object or int to get the mapping data of
:return: An int if a Block object was supplied, a Block object if an int was supplied
"""
try:
if isinstance(item, Block):
return self._block_to_index_map[item]
return self._index_to_block[item]
except (KeyError, IndexError):
raise KeyError(
f"There is no {item} in the BlockManager. "
f"You might want to use the `add_block` function for your blocks before accessing them."
)
def get_add_block(self, block: Block) -> int:
"""
Adds a Block object to the internal Block object/ID mappings. If the Block already exists in the mappings,
then the existing ID is returned
:param block: The Block to add to the manager
:return: The internal ID of the Block
"""
if block in self._block_to_index_map:
return self._block_to_index_map[block]
self._block_to_index_map[block] = i = len(self._block_to_index_map)
self._index_to_block.append(block)
return i
| 35.149321 | 159 | 0.613092 | from __future__ import annotations
import copy
from sys import getsizeof
import re
from typing import Dict, Iterable, List, Tuple, Union, overload
from api.errors import InvalidBlockException
from utils import Int
class Block:
__slots__ = (
"_namespace",
"_base_name",
"_properties",
"_extra_blocks",
"_blockstate",
)
blockstate_regex = re.compile(
r"(?:(?P<namespace>[a-z0-9_.-]+):)?(?P<base_name>[a-z0-9/._-]+)(?:\[(?P<property_name>[a-z0-9_]+)=(?P<property_value>[a-z0-9_]+)(?P<properties>.*)\])?"
)
parameters_regex = re.compile(r"(?:,(?P<name>[a-z0-9_]+)=(?P<value>[a-z0-9_]+))")
def __init__(
self,
blockstate: str = None,
namespace: str = None,
base_name: str = None,
properties: Dict[str, Union[str, bool, int]] = None,
extra_blocks: Union[Block, Iterable[Block]] = None,
):
self._blockstate = blockstate
self._namespace = namespace
self._base_name = base_name
if namespace is not None and base_name is not None and properties is None:
properties = {}
self._properties = properties
self._extra_blocks = ()
if extra_blocks:
if isinstance(extra_blocks, Block):
extra_blocks = [extra_blocks]
self._extra_blocks = tuple(extra_blocks)
if blockstate:
self._gen_blockstate()
@property
def namespace(self) -> str:
if self._namespace is None:
self._parse_blockstate_string()
return self._namespace
@property
def base_name(self) -> str:
if self._base_name is None:
self._parse_blockstate_string()
return self._base_name
@property
def properties(self) -> Dict[str, Union[str, bool, int]]:
if self._properties is None:
self._parse_blockstate_string()
return copy.deepcopy(self._properties)
@property
def blockstate(self) -> str:
if self._blockstate is None:
self._gen_blockstate()
return self._blockstate
@property
def extra_blocks(self) -> Union[Tuple, Tuple[Block]]:
return self._extra_blocks
def _gen_blockstate(self):
self._blockstate = f"{self.namespace}:{self.base_name}"
if self.properties:
props = [f"{key}={value}" for key, value in sorted(self.properties.items())]
self._blockstate = f"{self._blockstate}[{','.join(props)}]"
@staticmethod
def parse_blockstate_string(blockstate: str) -> Tuple[str, str, Dict[str, str]]:
match = Block.blockstate_regex.match(blockstate)
namespace = match.group("namespace") or "minecraft"
base_name = match.group("base_name")
if match.group("property_name") is not None:
properties = {match.group("property_name"): match.group("property_value")}
else:
properties = {}
properties_string = match.group("properties")
if properties_string is not None:
properties_match = Block.parameters_regex.finditer(properties_string)
for match in properties_match:
properties[match.group("name")] = match.group("value")
return namespace, base_name, {k: v for k, v in sorted(properties.items())}
def _parse_blockstate_string(self):
self._namespace, self._base_name, self._properties = self.parse_blockstate_string(
self._blockstate
)
def __str__(self) -> str:
return self.blockstate
def __repr__(self) -> str:
return f"Block({', '.join([str(b) for b in (self, *self.extra_blocks)])})"
def __len__(self):
return len(self._extra_blocks) + 1
def _compare_extra_blocks(self, other: Block) -> bool:
if len(self.extra_blocks) != len(other.extra_blocks):
return False
if len(self.extra_blocks) == 0:
return True
for our_extra_block, their_extra_block in zip(
self.extra_blocks, other.extra_blocks
):
if our_extra_block != their_extra_block:
return False
return True
def __eq__(self, other: Block) -> bool:
if self.__class__ != other.__class__:
return False
return self.blockstate == other.blockstate and self._compare_extra_blocks(other)
def __hash__(self) -> int:
current_hash = hash(self.blockstate)
if self.extra_blocks:
current_hash = current_hash + hash(self.extra_blocks)
return current_hash
def __add__(self, other: Block) -> Block:
if not isinstance(other, Block):
return NotImplemented
if (
len(other.extra_blocks) == 0
):
other_cpy = other
else:
other_cpy = Block(
namespace=other.namespace,
base_name=other.base_name,
properties=other.properties,
)
other_extras = []
for eb in other.extra_blocks:
if (
len(eb.extra_blocks) == 0
):
other_extras.append(eb)
else:
other_extras.append(
Block(
namespace=eb.namespace,
base_name=eb.base_name,
properties=eb.properties,
)
)
return Block(
namespace=self.namespace,
base_name=self.base_name,
properties=self.properties,
extra_blocks=[*self.extra_blocks, other_cpy, *other_extras],
)
def __sub__(self, other: Block) -> Block:
if not isinstance(other, Block):
return NotImplemented
if (
len(other.extra_blocks) == 0
):
other_cpy = other
else:
other_cpy = Block(
namespace=other.namespace,
base_name=other.base_name,
properties=other.properties,
)
other_extras = []
for eb in other.extra_blocks:
if len(eb.extra_blocks) == 0:
other_extras.append(eb)
else:
other_extras.append(
Block(
namespace=eb.namespace,
base_name=eb.base_name,
properties=eb.properties,
)
)
# So we loop through all of our extra blocks and only append those to the new_extras list if they aren't in
new_extras = []
extra_blocks_to_remove = (other_cpy, *other_extras)
for eb in self.extra_blocks:
if eb not in extra_blocks_to_remove:
new_extras.append(eb)
return Block(
namespace=self.namespace,
base_name=self.base_name,
properties=self.properties,
extra_blocks=new_extras,
)
def remove_layer(self, layer: int) -> Block:
if (
layer == 0
and len(self.extra_blocks) > 0
and layer <= len(self.extra_blocks)
):
new_base = self._extra_blocks[0]
return Block(
namespace=new_base.namespace,
base_name=new_base.base_name,
properties=new_base.properties,
extra_blocks=[*self._extra_blocks[1:]],
)
elif layer > len(self.extra_blocks):
raise InvalidBlockException("You cannot remove a non-existant layer")
elif layer == 0:
raise InvalidBlockException(
"Removing the base block with no extra blocks is not supported"
)
return Block(
namespace=self.namespace,
base_name=self.base_name,
properties=self.properties,
extra_blocks=[*self.extra_blocks[: layer - 1], *self.extra_blocks[layer:]],
)
def __sizeof__(self):
size = (
getsizeof(self.namespace)
+ getsizeof(self.base_name)
+ getsizeof(self.properties)
+ getsizeof(self.blockstate)
)
for eb in self.extra_blocks:
size += getsizeof(eb)
return size
class BlockManager:
def __init__(self):
self._index_to_block: List[Block] = []
self._block_to_index_map: Dict[Block, int] = {}
def __len__(self):
return len(self._index_to_block)
def __contains__(self, item: Block) -> bool:
return item in self._block_to_index_map
@overload
def __getitem__(self, item: Block) -> int:
...
@overload
def __getitem__(self, item: Int) -> Block:
...
def __getitem__(self, item):
try:
if isinstance(item, Block):
return self._block_to_index_map[item]
return self._index_to_block[item]
except (KeyError, IndexError):
raise KeyError(
f"There is no {item} in the BlockManager. "
f"You might want to use the `add_block` function for your blocks before accessing them."
)
def get_add_block(self, block: Block) -> int:
if block in self._block_to_index_map:
return self._block_to_index_map[block]
self._block_to_index_map[block] = i = len(self._block_to_index_map)
self._index_to_block.append(block)
return i
| true | true |
f71ba01410b6baed59ee6dcd47fff57fa191f5b4 | 22,545 | py | Python | tests/test_setup.py | PiotrMachowski/core | b9d7d0cae2ccd2d88e90e49cc09e154a27ed809b | [
"Apache-2.0"
] | 3 | 2020-11-27T06:26:27.000Z | 2020-12-09T14:55:16.000Z | tests/test_setup.py | PiotrMachowski/core | b9d7d0cae2ccd2d88e90e49cc09e154a27ed809b | [
"Apache-2.0"
] | 18 | 2021-11-24T06:26:13.000Z | 2022-03-31T06:25:15.000Z | tests/test_setup.py | PiotrMachowski/core | b9d7d0cae2ccd2d88e90e49cc09e154a27ed809b | [
"Apache-2.0"
] | 3 | 2021-11-14T13:29:33.000Z | 2021-12-27T17:05:22.000Z | """Test component/platform setup."""
# pylint: disable=protected-access
import asyncio
import datetime
import threading
from unittest.mock import AsyncMock, Mock, patch
import pytest
import voluptuous as vol
from homeassistant import config_entries, setup
from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START
from homeassistant.core import callback
from homeassistant.helpers import discovery
from homeassistant.helpers.config_validation import (
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from tests.common import (
MockConfigEntry,
MockModule,
MockPlatform,
assert_setup_component,
mock_entity_platform,
mock_integration,
)
@pytest.fixture
def mock_handlers():
"""Mock config flows."""
class MockFlowHandler(config_entries.ConfigFlow):
"""Define a mock flow handler."""
VERSION = 1
with patch.dict(config_entries.HANDLERS, {"comp": MockFlowHandler}):
yield
async def test_validate_component_config(hass):
"""Test validating component configuration."""
config_schema = vol.Schema({"comp_conf": {"hello": str}}, required=True)
mock_integration(hass, MockModule("comp_conf", config_schema=config_schema))
with assert_setup_component(0):
assert not await setup.async_setup_component(hass, "comp_conf", {})
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(0):
assert not await setup.async_setup_component(
hass, "comp_conf", {"comp_conf": None}
)
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(0):
assert not await setup.async_setup_component(
hass, "comp_conf", {"comp_conf": {}}
)
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(0):
assert not await setup.async_setup_component(
hass,
"comp_conf",
{"comp_conf": {"hello": "world", "invalid": "extra"}},
)
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass, "comp_conf", {"comp_conf": {"hello": "world"}}
)
async def test_validate_platform_config(hass, caplog):
"""Test validating platform configuration."""
platform_schema = PLATFORM_SCHEMA.extend({"hello": str})
platform_schema_base = PLATFORM_SCHEMA_BASE.extend({})
mock_integration(
hass,
MockModule("platform_conf", platform_schema_base=platform_schema_base),
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform(platform_schema=platform_schema),
)
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
"platform_conf",
{"platform_conf": {"platform": "not_existing", "hello": "world"}},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{"platform_conf": {"platform": "whatever", "hello": "world"}},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{"platform_conf": [{"platform": "whatever", "hello": "world"}]},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
# Any falsey platform config will be ignored (None, {}, etc)
with assert_setup_component(0) as config:
assert await setup.async_setup_component(
hass, "platform_conf", {"platform_conf": None}
)
assert "platform_conf" in hass.config.components
assert not config["platform_conf"] # empty
assert await setup.async_setup_component(
hass, "platform_conf", {"platform_conf": {}}
)
assert "platform_conf" in hass.config.components
assert not config["platform_conf"] # empty
async def test_validate_platform_config_2(hass, caplog):
"""Test component PLATFORM_SCHEMA_BASE prio over PLATFORM_SCHEMA."""
platform_schema = PLATFORM_SCHEMA.extend({"hello": str})
platform_schema_base = PLATFORM_SCHEMA_BASE.extend({"hello": "world"})
mock_integration(
hass,
MockModule(
"platform_conf",
platform_schema=platform_schema,
platform_schema_base=platform_schema_base,
),
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform("whatever", platform_schema=platform_schema),
)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{
# pass
"platform_conf": {"platform": "whatever", "hello": "world"},
# fail: key hello violates component platform_schema_base
"platform_conf 2": {"platform": "whatever", "hello": "there"},
},
)
async def test_validate_platform_config_3(hass, caplog):
"""Test fallback to component PLATFORM_SCHEMA."""
component_schema = PLATFORM_SCHEMA_BASE.extend({"hello": str})
platform_schema = PLATFORM_SCHEMA.extend({"cheers": str, "hello": "world"})
mock_integration(
hass, MockModule("platform_conf", platform_schema=component_schema)
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform("whatever", platform_schema=platform_schema),
)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{
# pass
"platform_conf": {"platform": "whatever", "hello": "world"},
# fail: key hello violates component platform_schema
"platform_conf 2": {"platform": "whatever", "hello": "there"},
},
)
async def test_validate_platform_config_4(hass):
"""Test entity_namespace in PLATFORM_SCHEMA."""
component_schema = PLATFORM_SCHEMA_BASE
platform_schema = PLATFORM_SCHEMA
mock_integration(
hass,
MockModule("platform_conf", platform_schema_base=component_schema),
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform(platform_schema=platform_schema),
)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{
"platform_conf": {
# pass: entity_namespace accepted by PLATFORM_SCHEMA
"platform": "whatever",
"entity_namespace": "yummy",
}
},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
async def test_component_not_found(hass):
"""setup_component should not crash if component doesn't exist."""
assert await setup.async_setup_component(hass, "non_existing", {}) is False
async def test_component_not_double_initialized(hass):
"""Test we do not set up a component twice."""
mock_setup = Mock(return_value=True)
mock_integration(hass, MockModule("comp", setup=mock_setup))
assert await setup.async_setup_component(hass, "comp", {})
assert mock_setup.called
mock_setup.reset_mock()
assert await setup.async_setup_component(hass, "comp", {})
assert not mock_setup.called
async def test_component_not_installed_if_requirement_fails(hass):
"""Component setup should fail if requirement can't install."""
hass.config.skip_pip = False
mock_integration(hass, MockModule("comp", requirements=["package==0.0.1"]))
with patch("homeassistant.util.package.install_package", return_value=False):
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
async def test_component_not_setup_twice_if_loaded_during_other_setup(hass):
"""Test component setup while waiting for lock is not set up twice."""
result = []
async def async_setup(hass, config):
"""Tracking Setup."""
result.append(1)
mock_integration(hass, MockModule("comp", async_setup=async_setup))
def setup_component():
"""Set up the component."""
setup.setup_component(hass, "comp", {})
thread = threading.Thread(target=setup_component)
thread.start()
await setup.async_setup_component(hass, "comp", {})
await hass.async_add_executor_job(thread.join)
assert len(result) == 1
async def test_component_not_setup_missing_dependencies(hass):
"""Test we do not set up a component if not all dependencies loaded."""
deps = ["maybe_existing"]
mock_integration(hass, MockModule("comp", dependencies=deps))
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
hass.data.pop(setup.DATA_SETUP)
mock_integration(hass, MockModule("comp2", dependencies=deps))
mock_integration(hass, MockModule("maybe_existing"))
assert await setup.async_setup_component(hass, "comp2", {})
async def test_component_failing_setup(hass):
"""Test component that fails setup."""
mock_integration(hass, MockModule("comp", setup=lambda hass, config: False))
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
async def test_component_exception_setup(hass):
"""Test component that raises exception during setup."""
def exception_setup(hass, config):
"""Raise exception."""
raise Exception("fail!")
mock_integration(hass, MockModule("comp", setup=exception_setup))
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
async def test_component_setup_with_validation_and_dependency(hass):
"""Test all config is passed to dependencies."""
def config_check_setup(hass, config):
"""Test that config is passed in."""
if config.get("comp_a", {}).get("valid", False):
return True
raise Exception(f"Config not passed in: {config}")
platform = MockPlatform()
mock_integration(hass, MockModule("comp_a", setup=config_check_setup))
mock_integration(
hass,
MockModule("platform_a", setup=config_check_setup, dependencies=["comp_a"]),
)
mock_entity_platform(hass, "switch.platform_a", platform)
await setup.async_setup_component(
hass,
"switch",
{"comp_a": {"valid": True}, "switch": {"platform": "platform_a"}},
)
await hass.async_block_till_done()
assert "comp_a" in hass.config.components
async def test_platform_specific_config_validation(hass):
"""Test platform that specifies config."""
platform_schema = PLATFORM_SCHEMA.extend({"valid": True}, extra=vol.PREVENT_EXTRA)
mock_setup = Mock(spec_set=True)
mock_entity_platform(
hass,
"switch.platform_a",
MockPlatform(platform_schema=platform_schema, setup_platform=mock_setup),
)
with assert_setup_component(0, "switch"):
assert await setup.async_setup_component(
hass,
"switch",
{"switch": {"platform": "platform_a", "invalid": True}},
)
await hass.async_block_till_done()
assert mock_setup.call_count == 0
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("switch")
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "platform_a",
"valid": True,
"invalid_extra": True,
}
},
)
await hass.async_block_till_done()
assert mock_setup.call_count == 0
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("switch")
with assert_setup_component(1, "switch"):
assert await setup.async_setup_component(
hass,
"switch",
{"switch": {"platform": "platform_a", "valid": True}},
)
await hass.async_block_till_done()
assert mock_setup.call_count == 1
async def test_disable_component_if_invalid_return(hass):
"""Test disabling component if invalid return."""
mock_integration(
hass, MockModule("disabled_component", setup=lambda hass, config: None)
)
assert not await setup.async_setup_component(hass, "disabled_component", {})
assert "disabled_component" not in hass.config.components
hass.data.pop(setup.DATA_SETUP)
mock_integration(
hass,
MockModule("disabled_component", setup=lambda hass, config: False),
)
assert not await setup.async_setup_component(hass, "disabled_component", {})
assert "disabled_component" not in hass.config.components
hass.data.pop(setup.DATA_SETUP)
mock_integration(
hass, MockModule("disabled_component", setup=lambda hass, config: True)
)
assert await setup.async_setup_component(hass, "disabled_component", {})
assert "disabled_component" in hass.config.components
async def test_all_work_done_before_start(hass):
"""Test all init work done till start."""
call_order = []
async def component1_setup(hass, config):
"""Set up mock component."""
await discovery.async_discover(
hass, "test_component2", {}, "test_component2", {}
)
await discovery.async_discover(
hass, "test_component3", {}, "test_component3", {}
)
return True
def component_track_setup(hass, config):
"""Set up mock component."""
call_order.append(1)
return True
mock_integration(hass, MockModule("test_component1", async_setup=component1_setup))
mock_integration(hass, MockModule("test_component2", setup=component_track_setup))
mock_integration(hass, MockModule("test_component3", setup=component_track_setup))
@callback
def track_start(event):
"""Track start event."""
call_order.append(2)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, track_start)
hass.add_job(setup.async_setup_component(hass, "test_component1", {}))
await hass.async_block_till_done()
await hass.async_start()
assert call_order == [1, 1, 2]
async def test_component_warn_slow_setup(hass):
"""Warn we log when a component setup takes a long time."""
mock_integration(hass, MockModule("test_component1"))
with patch.object(hass.loop, "call_later") as mock_call:
result = await setup.async_setup_component(hass, "test_component1", {})
assert result
assert mock_call.called
assert len(mock_call.mock_calls) == 3
timeout, logger_method = mock_call.mock_calls[0][1][:2]
assert timeout == setup.SLOW_SETUP_WARNING
assert logger_method == setup._LOGGER.warning
assert mock_call().cancel.called
async def test_platform_no_warn_slow(hass):
"""Do not warn for long entity setup time."""
mock_integration(
hass, MockModule("test_component1", platform_schema=PLATFORM_SCHEMA)
)
with patch.object(hass.loop, "call_later") as mock_call:
result = await setup.async_setup_component(hass, "test_component1", {})
assert result
assert len(mock_call.mock_calls) == 0
async def test_platform_error_slow_setup(hass, caplog):
"""Don't block startup more than SLOW_SETUP_MAX_WAIT."""
with patch.object(setup, "SLOW_SETUP_MAX_WAIT", 0.1):
called = []
async def async_setup(*args):
"""Tracking Setup."""
called.append(1)
await asyncio.sleep(2)
mock_integration(hass, MockModule("test_component1", async_setup=async_setup))
result = await setup.async_setup_component(hass, "test_component1", {})
assert len(called) == 1
assert not result
assert "test_component1 is taking longer than 0.1 seconds" in caplog.text
async def test_when_setup_already_loaded(hass):
"""Test when setup."""
calls = []
async def mock_callback(hass, component):
"""Mock callback."""
calls.append(component)
setup.async_when_setup(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == []
hass.config.components.add("test")
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
# Event listener should be gone
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
# Should be called right away
setup.async_when_setup(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == ["test", "test"]
async def test_async_when_setup_or_start_already_loaded(hass):
"""Test when setup or start."""
calls = []
async def mock_callback(hass, component):
"""Mock callback."""
calls.append(component)
setup.async_when_setup_or_start(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == []
hass.config.components.add("test")
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
# Event listener should be gone
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
# Should be called right away
setup.async_when_setup_or_start(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == ["test", "test"]
setup.async_when_setup_or_start(hass, "not_loaded", mock_callback)
await hass.async_block_till_done()
assert calls == ["test", "test"]
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert calls == ["test", "test", "not_loaded"]
async def test_setup_import_blows_up(hass):
"""Test that we handle it correctly when importing integration blows up."""
with patch(
"homeassistant.loader.Integration.get_component", side_effect=ImportError
):
assert not await setup.async_setup_component(hass, "sun", {})
async def test_parallel_entry_setup(hass, mock_handlers):
"""Test config entries are set up in parallel."""
MockConfigEntry(domain="comp", data={"value": 1}).add_to_hass(hass)
MockConfigEntry(domain="comp", data={"value": 2}).add_to_hass(hass)
calls = []
async def mock_async_setup_entry(hass, entry):
"""Mock setting up an entry."""
calls.append(entry.data["value"])
await asyncio.sleep(0)
calls.append(entry.data["value"])
return True
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_async_setup_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
await setup.async_setup_component(hass, "comp", {})
assert calls == [1, 2, 1, 2]
async def test_integration_disabled(hass, caplog):
"""Test we can disable an integration."""
disabled_reason = "Dependency contains code that breaks Home Assistant"
mock_integration(
hass,
MockModule("test_component1", partial_manifest={"disabled": disabled_reason}),
)
result = await setup.async_setup_component(hass, "test_component1", {})
assert not result
assert disabled_reason in caplog.text
async def test_async_get_loaded_integrations(hass):
"""Test we can enumerate loaded integations."""
hass.config.components.add("notbase")
hass.config.components.add("switch")
hass.config.components.add("notbase.switch")
hass.config.components.add("myintegration")
hass.config.components.add("device_tracker")
hass.config.components.add("device_tracker.other")
hass.config.components.add("myintegration.light")
assert setup.async_get_loaded_integrations(hass) == {
"other",
"switch",
"notbase",
"myintegration",
"device_tracker",
}
async def test_integration_no_setup(hass, caplog):
"""Test we fail integration setup without setup functions."""
mock_integration(
hass,
MockModule("test_integration_without_setup", setup=False),
)
result = await setup.async_setup_component(
hass, "test_integration_without_setup", {}
)
assert not result
assert "No setup or config entry setup function defined" in caplog.text
async def test_integration_only_setup_entry(hass):
"""Test we have an integration with only a setup entry method."""
mock_integration(
hass,
MockModule(
"test_integration_only_entry",
setup=False,
async_setup_entry=AsyncMock(return_value=True),
),
)
assert await setup.async_setup_component(hass, "test_integration_only_entry", {})
async def test_async_start_setup(hass):
"""Test setup started context manager keeps track of setup times."""
with setup.async_start_setup(hass, ["august"]):
assert isinstance(
hass.data[setup.DATA_SETUP_STARTED]["august"], datetime.datetime
)
with setup.async_start_setup(hass, ["august"]):
assert isinstance(
hass.data[setup.DATA_SETUP_STARTED]["august_2"], datetime.datetime
)
assert "august" not in hass.data[setup.DATA_SETUP_STARTED]
assert isinstance(hass.data[setup.DATA_SETUP_TIME]["august"], datetime.timedelta)
assert "august_2" not in hass.data[setup.DATA_SETUP_TIME]
async def test_async_start_setup_platforms(hass):
"""Test setup started context manager keeps track of setup times for platforms."""
with setup.async_start_setup(hass, ["sensor.august"]):
assert isinstance(
hass.data[setup.DATA_SETUP_STARTED]["sensor.august"], datetime.datetime
)
assert "august" not in hass.data[setup.DATA_SETUP_STARTED]
assert isinstance(hass.data[setup.DATA_SETUP_TIME]["august"], datetime.timedelta)
assert "sensor" not in hass.data[setup.DATA_SETUP_TIME]
| 32.485591 | 87 | 0.668751 |
import asyncio
import datetime
import threading
from unittest.mock import AsyncMock, Mock, patch
import pytest
import voluptuous as vol
from homeassistant import config_entries, setup
from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START
from homeassistant.core import callback
from homeassistant.helpers import discovery
from homeassistant.helpers.config_validation import (
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from tests.common import (
MockConfigEntry,
MockModule,
MockPlatform,
assert_setup_component,
mock_entity_platform,
mock_integration,
)
@pytest.fixture
def mock_handlers():
class MockFlowHandler(config_entries.ConfigFlow):
VERSION = 1
with patch.dict(config_entries.HANDLERS, {"comp": MockFlowHandler}):
yield
async def test_validate_component_config(hass):
config_schema = vol.Schema({"comp_conf": {"hello": str}}, required=True)
mock_integration(hass, MockModule("comp_conf", config_schema=config_schema))
with assert_setup_component(0):
assert not await setup.async_setup_component(hass, "comp_conf", {})
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(0):
assert not await setup.async_setup_component(
hass, "comp_conf", {"comp_conf": None}
)
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(0):
assert not await setup.async_setup_component(
hass, "comp_conf", {"comp_conf": {}}
)
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(0):
assert not await setup.async_setup_component(
hass,
"comp_conf",
{"comp_conf": {"hello": "world", "invalid": "extra"}},
)
hass.data.pop(setup.DATA_SETUP)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass, "comp_conf", {"comp_conf": {"hello": "world"}}
)
async def test_validate_platform_config(hass, caplog):
platform_schema = PLATFORM_SCHEMA.extend({"hello": str})
platform_schema_base = PLATFORM_SCHEMA_BASE.extend({})
mock_integration(
hass,
MockModule("platform_conf", platform_schema_base=platform_schema_base),
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform(platform_schema=platform_schema),
)
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
"platform_conf",
{"platform_conf": {"platform": "not_existing", "hello": "world"}},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{"platform_conf": {"platform": "whatever", "hello": "world"}},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{"platform_conf": [{"platform": "whatever", "hello": "world"}]},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
with assert_setup_component(0) as config:
assert await setup.async_setup_component(
hass, "platform_conf", {"platform_conf": None}
)
assert "platform_conf" in hass.config.components
assert not config["platform_conf"]
assert await setup.async_setup_component(
hass, "platform_conf", {"platform_conf": {}}
)
assert "platform_conf" in hass.config.components
assert not config["platform_conf"]
async def test_validate_platform_config_2(hass, caplog):
platform_schema = PLATFORM_SCHEMA.extend({"hello": str})
platform_schema_base = PLATFORM_SCHEMA_BASE.extend({"hello": "world"})
mock_integration(
hass,
MockModule(
"platform_conf",
platform_schema=platform_schema,
platform_schema_base=platform_schema_base,
),
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform("whatever", platform_schema=platform_schema),
)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{
"platform_conf": {"platform": "whatever", "hello": "world"},
"platform_conf 2": {"platform": "whatever", "hello": "there"},
},
)
async def test_validate_platform_config_3(hass, caplog):
component_schema = PLATFORM_SCHEMA_BASE.extend({"hello": str})
platform_schema = PLATFORM_SCHEMA.extend({"cheers": str, "hello": "world"})
mock_integration(
hass, MockModule("platform_conf", platform_schema=component_schema)
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform("whatever", platform_schema=platform_schema),
)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{
"platform_conf": {"platform": "whatever", "hello": "world"},
"platform_conf 2": {"platform": "whatever", "hello": "there"},
},
)
async def test_validate_platform_config_4(hass):
component_schema = PLATFORM_SCHEMA_BASE
platform_schema = PLATFORM_SCHEMA
mock_integration(
hass,
MockModule("platform_conf", platform_schema_base=component_schema),
)
mock_entity_platform(
hass,
"platform_conf.whatever",
MockPlatform(platform_schema=platform_schema),
)
with assert_setup_component(1):
assert await setup.async_setup_component(
hass,
"platform_conf",
{
"platform_conf": {
"platform": "whatever",
"entity_namespace": "yummy",
}
},
)
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("platform_conf")
async def test_component_not_found(hass):
assert await setup.async_setup_component(hass, "non_existing", {}) is False
async def test_component_not_double_initialized(hass):
mock_setup = Mock(return_value=True)
mock_integration(hass, MockModule("comp", setup=mock_setup))
assert await setup.async_setup_component(hass, "comp", {})
assert mock_setup.called
mock_setup.reset_mock()
assert await setup.async_setup_component(hass, "comp", {})
assert not mock_setup.called
async def test_component_not_installed_if_requirement_fails(hass):
hass.config.skip_pip = False
mock_integration(hass, MockModule("comp", requirements=["package==0.0.1"]))
with patch("homeassistant.util.package.install_package", return_value=False):
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
async def test_component_not_setup_twice_if_loaded_during_other_setup(hass):
result = []
async def async_setup(hass, config):
result.append(1)
mock_integration(hass, MockModule("comp", async_setup=async_setup))
def setup_component():
setup.setup_component(hass, "comp", {})
thread = threading.Thread(target=setup_component)
thread.start()
await setup.async_setup_component(hass, "comp", {})
await hass.async_add_executor_job(thread.join)
assert len(result) == 1
async def test_component_not_setup_missing_dependencies(hass):
deps = ["maybe_existing"]
mock_integration(hass, MockModule("comp", dependencies=deps))
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
hass.data.pop(setup.DATA_SETUP)
mock_integration(hass, MockModule("comp2", dependencies=deps))
mock_integration(hass, MockModule("maybe_existing"))
assert await setup.async_setup_component(hass, "comp2", {})
async def test_component_failing_setup(hass):
mock_integration(hass, MockModule("comp", setup=lambda hass, config: False))
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
async def test_component_exception_setup(hass):
def exception_setup(hass, config):
raise Exception("fail!")
mock_integration(hass, MockModule("comp", setup=exception_setup))
assert not await setup.async_setup_component(hass, "comp", {})
assert "comp" not in hass.config.components
async def test_component_setup_with_validation_and_dependency(hass):
def config_check_setup(hass, config):
if config.get("comp_a", {}).get("valid", False):
return True
raise Exception(f"Config not passed in: {config}")
platform = MockPlatform()
mock_integration(hass, MockModule("comp_a", setup=config_check_setup))
mock_integration(
hass,
MockModule("platform_a", setup=config_check_setup, dependencies=["comp_a"]),
)
mock_entity_platform(hass, "switch.platform_a", platform)
await setup.async_setup_component(
hass,
"switch",
{"comp_a": {"valid": True}, "switch": {"platform": "platform_a"}},
)
await hass.async_block_till_done()
assert "comp_a" in hass.config.components
async def test_platform_specific_config_validation(hass):
platform_schema = PLATFORM_SCHEMA.extend({"valid": True}, extra=vol.PREVENT_EXTRA)
mock_setup = Mock(spec_set=True)
mock_entity_platform(
hass,
"switch.platform_a",
MockPlatform(platform_schema=platform_schema, setup_platform=mock_setup),
)
with assert_setup_component(0, "switch"):
assert await setup.async_setup_component(
hass,
"switch",
{"switch": {"platform": "platform_a", "invalid": True}},
)
await hass.async_block_till_done()
assert mock_setup.call_count == 0
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("switch")
with assert_setup_component(0):
assert await setup.async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "platform_a",
"valid": True,
"invalid_extra": True,
}
},
)
await hass.async_block_till_done()
assert mock_setup.call_count == 0
hass.data.pop(setup.DATA_SETUP)
hass.config.components.remove("switch")
with assert_setup_component(1, "switch"):
assert await setup.async_setup_component(
hass,
"switch",
{"switch": {"platform": "platform_a", "valid": True}},
)
await hass.async_block_till_done()
assert mock_setup.call_count == 1
async def test_disable_component_if_invalid_return(hass):
mock_integration(
hass, MockModule("disabled_component", setup=lambda hass, config: None)
)
assert not await setup.async_setup_component(hass, "disabled_component", {})
assert "disabled_component" not in hass.config.components
hass.data.pop(setup.DATA_SETUP)
mock_integration(
hass,
MockModule("disabled_component", setup=lambda hass, config: False),
)
assert not await setup.async_setup_component(hass, "disabled_component", {})
assert "disabled_component" not in hass.config.components
hass.data.pop(setup.DATA_SETUP)
mock_integration(
hass, MockModule("disabled_component", setup=lambda hass, config: True)
)
assert await setup.async_setup_component(hass, "disabled_component", {})
assert "disabled_component" in hass.config.components
async def test_all_work_done_before_start(hass):
call_order = []
async def component1_setup(hass, config):
await discovery.async_discover(
hass, "test_component2", {}, "test_component2", {}
)
await discovery.async_discover(
hass, "test_component3", {}, "test_component3", {}
)
return True
def component_track_setup(hass, config):
call_order.append(1)
return True
mock_integration(hass, MockModule("test_component1", async_setup=component1_setup))
mock_integration(hass, MockModule("test_component2", setup=component_track_setup))
mock_integration(hass, MockModule("test_component3", setup=component_track_setup))
@callback
def track_start(event):
call_order.append(2)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, track_start)
hass.add_job(setup.async_setup_component(hass, "test_component1", {}))
await hass.async_block_till_done()
await hass.async_start()
assert call_order == [1, 1, 2]
async def test_component_warn_slow_setup(hass):
mock_integration(hass, MockModule("test_component1"))
with patch.object(hass.loop, "call_later") as mock_call:
result = await setup.async_setup_component(hass, "test_component1", {})
assert result
assert mock_call.called
assert len(mock_call.mock_calls) == 3
timeout, logger_method = mock_call.mock_calls[0][1][:2]
assert timeout == setup.SLOW_SETUP_WARNING
assert logger_method == setup._LOGGER.warning
assert mock_call().cancel.called
async def test_platform_no_warn_slow(hass):
mock_integration(
hass, MockModule("test_component1", platform_schema=PLATFORM_SCHEMA)
)
with patch.object(hass.loop, "call_later") as mock_call:
result = await setup.async_setup_component(hass, "test_component1", {})
assert result
assert len(mock_call.mock_calls) == 0
async def test_platform_error_slow_setup(hass, caplog):
with patch.object(setup, "SLOW_SETUP_MAX_WAIT", 0.1):
called = []
async def async_setup(*args):
called.append(1)
await asyncio.sleep(2)
mock_integration(hass, MockModule("test_component1", async_setup=async_setup))
result = await setup.async_setup_component(hass, "test_component1", {})
assert len(called) == 1
assert not result
assert "test_component1 is taking longer than 0.1 seconds" in caplog.text
async def test_when_setup_already_loaded(hass):
calls = []
async def mock_callback(hass, component):
calls.append(component)
setup.async_when_setup(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == []
hass.config.components.add("test")
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
setup.async_when_setup(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == ["test", "test"]
async def test_async_when_setup_or_start_already_loaded(hass):
calls = []
async def mock_callback(hass, component):
calls.append(component)
setup.async_when_setup_or_start(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == []
hass.config.components.add("test")
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {"component": "test"})
await hass.async_block_till_done()
assert calls == ["test"]
setup.async_when_setup_or_start(hass, "test", mock_callback)
await hass.async_block_till_done()
assert calls == ["test", "test"]
setup.async_when_setup_or_start(hass, "not_loaded", mock_callback)
await hass.async_block_till_done()
assert calls == ["test", "test"]
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert calls == ["test", "test", "not_loaded"]
async def test_setup_import_blows_up(hass):
with patch(
"homeassistant.loader.Integration.get_component", side_effect=ImportError
):
assert not await setup.async_setup_component(hass, "sun", {})
async def test_parallel_entry_setup(hass, mock_handlers):
MockConfigEntry(domain="comp", data={"value": 1}).add_to_hass(hass)
MockConfigEntry(domain="comp", data={"value": 2}).add_to_hass(hass)
calls = []
async def mock_async_setup_entry(hass, entry):
calls.append(entry.data["value"])
await asyncio.sleep(0)
calls.append(entry.data["value"])
return True
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_async_setup_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
await setup.async_setup_component(hass, "comp", {})
assert calls == [1, 2, 1, 2]
async def test_integration_disabled(hass, caplog):
disabled_reason = "Dependency contains code that breaks Home Assistant"
mock_integration(
hass,
MockModule("test_component1", partial_manifest={"disabled": disabled_reason}),
)
result = await setup.async_setup_component(hass, "test_component1", {})
assert not result
assert disabled_reason in caplog.text
async def test_async_get_loaded_integrations(hass):
hass.config.components.add("notbase")
hass.config.components.add("switch")
hass.config.components.add("notbase.switch")
hass.config.components.add("myintegration")
hass.config.components.add("device_tracker")
hass.config.components.add("device_tracker.other")
hass.config.components.add("myintegration.light")
assert setup.async_get_loaded_integrations(hass) == {
"other",
"switch",
"notbase",
"myintegration",
"device_tracker",
}
async def test_integration_no_setup(hass, caplog):
mock_integration(
hass,
MockModule("test_integration_without_setup", setup=False),
)
result = await setup.async_setup_component(
hass, "test_integration_without_setup", {}
)
assert not result
assert "No setup or config entry setup function defined" in caplog.text
async def test_integration_only_setup_entry(hass):
mock_integration(
hass,
MockModule(
"test_integration_only_entry",
setup=False,
async_setup_entry=AsyncMock(return_value=True),
),
)
assert await setup.async_setup_component(hass, "test_integration_only_entry", {})
async def test_async_start_setup(hass):
with setup.async_start_setup(hass, ["august"]):
assert isinstance(
hass.data[setup.DATA_SETUP_STARTED]["august"], datetime.datetime
)
with setup.async_start_setup(hass, ["august"]):
assert isinstance(
hass.data[setup.DATA_SETUP_STARTED]["august_2"], datetime.datetime
)
assert "august" not in hass.data[setup.DATA_SETUP_STARTED]
assert isinstance(hass.data[setup.DATA_SETUP_TIME]["august"], datetime.timedelta)
assert "august_2" not in hass.data[setup.DATA_SETUP_TIME]
async def test_async_start_setup_platforms(hass):
with setup.async_start_setup(hass, ["sensor.august"]):
assert isinstance(
hass.data[setup.DATA_SETUP_STARTED]["sensor.august"], datetime.datetime
)
assert "august" not in hass.data[setup.DATA_SETUP_STARTED]
assert isinstance(hass.data[setup.DATA_SETUP_TIME]["august"], datetime.timedelta)
assert "sensor" not in hass.data[setup.DATA_SETUP_TIME]
| true | true |
f71ba0d3b326ae8b8647aa96334c5b3c71a2f678 | 865 | py | Python | epycom/univariate/__init__.py | ICRC-BME/epycom | 5bfa3fb9020f04536b7a08382533c8abf56ca85f | [
"Apache-2.0"
] | null | null | null | epycom/univariate/__init__.py | ICRC-BME/epycom | 5bfa3fb9020f04536b7a08382533c8abf56ca85f | [
"Apache-2.0"
] | 1 | 2020-10-22T19:10:57.000Z | 2020-10-22T21:09:02.000Z | epycom/univariate/__init__.py | ICRC-BME/epycom | 5bfa3fb9020f04536b7a08382533c8abf56ca85f | [
"Apache-2.0"
] | 1 | 2021-02-24T10:07:32.000Z | 2021-02-24T10:07:32.000Z | from .signal_stats import compute_signal_stats, SignalStats
from .hjorth_mobility import compute_hjorth_mobility, HjorthMobility
from .hjorth_complexity import compute_hjorth_complexity, HjorthComplexity
from .lyapunov_exponent import compute_lyapunov_exponent, LyapunovExponent
from .power_spectral_entropy import compute_pse, PowerSpectralEntropy
from .modulation_index import compute_mi_count, ModulationIndex
from .mean_vector_length import compute_mvl_count, MeanVectorLength
from .phase_locking_value import compute_plv_count, PhaseLockingValue
from .arr import compute_arr, AutoregressiveResidualModulation
from .shannon_entropy import compute_shanon_entropy, ShannonEntropy
from .approximate_entropy import (compute_approximate_entropy,
ApproximateEntropy)
from .sample_entropy import compute_sample_entropy, SampleEntropy
| 61.785714 | 74 | 0.861272 | from .signal_stats import compute_signal_stats, SignalStats
from .hjorth_mobility import compute_hjorth_mobility, HjorthMobility
from .hjorth_complexity import compute_hjorth_complexity, HjorthComplexity
from .lyapunov_exponent import compute_lyapunov_exponent, LyapunovExponent
from .power_spectral_entropy import compute_pse, PowerSpectralEntropy
from .modulation_index import compute_mi_count, ModulationIndex
from .mean_vector_length import compute_mvl_count, MeanVectorLength
from .phase_locking_value import compute_plv_count, PhaseLockingValue
from .arr import compute_arr, AutoregressiveResidualModulation
from .shannon_entropy import compute_shanon_entropy, ShannonEntropy
from .approximate_entropy import (compute_approximate_entropy,
ApproximateEntropy)
from .sample_entropy import compute_sample_entropy, SampleEntropy
| true | true |
f71ba1197905e6582410e47931bec23c9ece6799 | 8,846 | py | Python | Code/CarbonEquiv_Talmy.py | gshowalt/VirusPopModel | 8d41294fa06a44e8fa22ef390d6db14fba7818a1 | [
"CC0-1.0"
] | null | null | null | Code/CarbonEquiv_Talmy.py | gshowalt/VirusPopModel | 8d41294fa06a44e8fa22ef390d6db14fba7818a1 | [
"CC0-1.0"
] | null | null | null | Code/CarbonEquiv_Talmy.py | gshowalt/VirusPopModel | 8d41294fa06a44e8fa22ef390d6db14fba7818a1 | [
"CC0-1.0"
] | null | null | null |
# importing all modules
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from matplotlib import cm
import matplotlib.tri as tri
from matplotlib.colors import LogNorm
import matplotlib.patches as mpatches
from matplotlib.ticker import LogFormatter
from collections import Counter
from functools import wraps
import csv
import sys
import itertools
from itertools import islice, cycle, chain
import scipy as sp
from scipy.interpolate import griddata
from scipy import interpolate
from scipy.integrate import odeint
from scipy.stats import pareto
from scipy.stats import loguniform
import seaborn as sns
import pandas as pd
import statistics as stats
import lhsmdu
from math import nan
from SALib.sample import saltelli, latin, ff
from SALib.analyze import sobol
import random
# define the function which includes the differential equations
# this was adapted from the leak/lyse experiment so I just left that in and set it to a final value later
def f2(s,t, leak, lyse, temp):
# first define the relative contact rate (RCR) and brine concentrating factor (BCF) by temp
if temp < -1:
RCR = 0.0716*temp**4 + 2.9311*temp**3 + 34.108*temp**2 + 45.826*temp + 3.5125 #Fit from Wells and Deming, 2006
BCF = -0.0106 * temp **2 - 0.519 * temp + 0.2977
sal = 32 * BCF
else:
RCR = 1
sal = 32
# these are our scaling factors for the temperature-dependent parameter distributions
mux = 1 # for growth rate
betx = 1 # for burst size
phix = 1e-5 # for adsorption rate
gamx = 1 # for lytic fraction
# Temp-dependent parameter distribution for burst size
beta = betx*(0.0064 * temp**3 - 0.3047 * temp ** 2 + 0.7701 * temp + 93.605)
# also parameterized as a curve with a standard deviation (std) for other experiments
# but here was simply a set curve for reproducibility
""" beta_std = 0.0095 * temp **3 - 0.5184 * temp**2 + 2.2456 * temp + 126.59
if beta_std < 0:
beta_std = 0.
beta = np.random.normal(beta_mu, beta_std)"""
# Temp-dependent parameter distribution for growth rate
# (we had two different distributions, but I went with the exponential one)
# mu = mux*(2e-5*temp**3 + 0.0008 * temp **2 + 0.0091 * temp + 0.0386)
# mu = 3e-6*temp**4 + 0.0001*temp**3+0.0014*temp**2 + 0.0092 * temp +0.0333
mu = 0.0441*np.exp(0.4991*temp)
"""mu_std = 0.1*2e-5*temp**3 + 0.0009 * temp **2 + 0.0144 * temp + 0.0818
if mu_std<0:
mu_std = 0.001
mu = np.random.normal(mu_mu, mu_std)"""
# Temp-dependent parameter distribution for adsorption rate
# I also tried it as a function of salinity (immediately below), but chose temp for consistency
#phi = phix * -1e-11*sal**2 +4e-9*sal - 9e-8
phi = phix * (6e-13 * temp **5 - 2e-11 * temp ** 4 + 1e-10 * temp ** 3 + 3e-9 * temp ** 2 - 3e-8 * temp + 5e-8)
"""phi_std = -2e-11*sal**2 + 4e-9*sal - 9e-8
if phi_std < 0:
phi_std = 0
phi = np.random.normal(phi_mu, phi_std)"""
# set conditions for when curve goes below zero
if mu <= 0:
mu = 0.000
if beta < 0:
beta = 1
if phi < 0:
phi = 1e-15
# now we want to scale adsorption rate by RCR to incorporate the sea ice
phi = phi * RCR
# SET PARAMETERS
alpha = 1.2e-7*3**((temp-23)/10)#4.2e-7 at +8, or 1.2e-7 at lower temps, at -5 --> mu = 0.25/day = 0.01/hr = 1e-8
# alpha is a coefficient that we'd like to change with temperature? Or change eta?
#nutrient transfer coefficient to bacteria (ug/cell * hr)
Q = 0.022
#half saturation constant (ug/mL)
d = 1e-8
#constant of bacterial death (1/hr)
m = 1e-6
#constant of viral decay (1/hr)
g = leak
#POM transfer coefficient from bacteria (ug/cell*hr)
n = lyse
#POM transfer coefficient from viral lysis ug/[burst]cell
#gamma is a lysogeny value
gamma = 1 #-1/temp #*mu
# set up solution matrix
N = s[0]
B = s[1]
V = s[2]
P = s[3]
#systems of equations below
dNdt = - alpha * (N / (N + Q)) * B + g * (alpha * (N/(N+Q))*B) + (n * 1e-7 * (gamma) * phi * V * B)
if N < 0:
N = 0
dBdt = (mu) * (N/(Q + N)) * B - gamma * phi * V * B - d*B
if B < 1:
B = 1
dVdt = gamma*beta * B * phi*V - phi * V * B - m*V
if V < 1:
V = 1
#dPdt = (g * (0.0083*1e-7))*B + (n * 1e-7 * phi * V * B*RCR) + 1e-10*m*V + 1.0e-7*d*B - (P/(P+Q))*alpha * B
dPdt = g * alpha * (N/ (N+Q))*B + n * 1e-7 * (gamma)*phi*B*V
# according to Jover, 2014 - virus has 0.02 to 0.05 fg carbon/virion => translate into ug Carbon = 5e-11
VCarbonEQ = 5e-11
BCarbonEQ = 1e-7 #from Bionumbers
# building the carbon equivalent for viruses, lysate as per Talmy et al 2019
rv = 90 #virus radius (nm)
Qv = (41 * (rv - 2.5)**3 + 130*(7.5*(rv)**2 - 18.74 * rv + 15.63)) * (10e6/(6.022 * 10**23)) # virus carbon eq
phiEQ = (phi)/(Qv)
Qh = 1e-7
etav = beta * (Qv/Qh)
TotalVCarbon = (phiEQ * (gamma) * (V*VCarbonEQ) * (B*BCarbonEQ))
VirusCarbon = etav * (phiEQ * (gamma) * (V*VCarbonEQ) * (B*BCarbonEQ))
LysateCarbon = (1-etav)*(phiEQ * (gamma) * (V*VCarbonEQ) * (B*BCarbonEQ))
LeakCarbon = g * (alpha * (N/(N+Q))*B)
#print (mu, beta, phi, gamma)
return [dNdt, dBdt, dVdt, dPdt, TotalVCarbon, VirusCarbon, LysateCarbon, LeakCarbon]
# define time, temperature scale
time = 5000
temp_list = [-12.5,-10, -8, -6, -4, -2]
t = np.linspace(1,time,1000)
# set up empty matricies
DOMX = []
DOMA = []
DOMB = []
DOMC = []
DOM1 = []
DOM10 = []
DOM100 = []
RCRlist = []
Mulist = []
endvals1 = []
endvals2 = []
endvals3 = []
endvals4 = []
Burstlist = []
Adsorplist = []
count = 0
plt.rcParams["font.family"] = "sans-serif"
fig1 = plt.figure(figsize=(20,15))
fig1.tight_layout()
plt.rcParams.update({'font.size': 15})
for xx in temp_list:
temp = xx
count +=1
mu = 0.0441*np.exp(0.4991*temp)
gamma = 1
#print ("gamma is:", gamma, "and mu is:", mu)
if temp < -1:
RCR = 0.0716*temp**4 + 2.9311*temp**3 + 34.108*temp**2 + 45.826*temp + 3.5125 #Fit from Wells and Deming, 2006
BCF = -0.0106 * temp **2 - 0.519 * temp + 0.2977
sal = 32 * BCF
else:
BCF = 1
sal = 32
s0=[0.12*BCF,1e4*BCF, 1e5*BCF,0,0,0,0,0]
s = odeint(f2,s0,t, args = (0.4,0.99, temp))
xend.append(sum(s[:,3]))
y1 = s[:,4]/(0.12)
y2 = s[:,5]/(0.12)
y3 = s[:,6]/(0.12)
y4 = s[:,7]/(0.12)
plt.subplot(3, 3, count)
colors1 = ['cadetblue', '#FF6F61'] #, 'darkblue']
plt.stackplot(t,y2,y3, colors = colors1,labels=['To Virus','To Lysate'])
plt.legend(loc='lower right')
plt.xlabel('Temperature: {} (˚C)'.format(temp))
plt.yscale('log')
plt.ylabel('% Initial Nutrient')
# take last value of each returned number for the temp-dependent plot
endvals1.append(y1[-1])
endvals2.append(y2[-1])
endvals3.append(y3[-1])
endvals4.append(y4[-1])
# make lists of calculated temp-dependent parameters if we want to plot against them alter
RCRlist.append(RCR)
Mulist.append(mu)
beta = 1*(0.0064 * temp**3 - 0.3047 * temp ** 2 + 0.7701 * temp + 93.605)
Burstlist.append(beta)
phi = RCR* 1 * (6e-13 * temp **5 - 2e-11 * temp ** 4 + 1e-10 * temp ** 3 + 3e-9 * temp ** 2 - 3e-8 * temp + 5e-8)
Adsorplist.append(phi)
plt.subplots_adjust(hspace = 1)
fig1.suptitle("Cumulative organic carbon recycled into Virions or Lysate ",fontsize=15)
# Plot as a funciton of temperature
plt.rcParams["font.family"] = "sans-serif"
plt.rcParams.update({'font.size': 20})
fig2 = plt.figure(figsize=(10,5))
fig2.tight_layout()
endvals1_b = [i/max(endvals1) for i in endvals1]
endvals2_b = [i/max(endvals2) for i in endvals2]
endvals3_b = [i/max(endvals3) for i in endvals3]
endvals4_b = [i/max(endvals4) for i in endvals4]
#ax1 = plt.stackplot(temp_list, endvals2_b, endvals3, colors = colors1) #, labels=['To Virus','To Lysate', 'Cell exudate'])
#ax1 = plt.plot(temp_list, Burstlist)
plt.plot(temp_list,endvals2_b, c = 'cadetblue', marker = 'o', markeredgecolor='white', markersize=15, label='to Virions')
plt.plot(temp_list, endvals3_b, c = '#FA7268', marker = 'o', markeredgecolor='white', markersize=15, label='to Lysate')
plt.xlabel('Temperature (˚C)')
plt.ylabel('Carbon Flow (Relative to Maximum)')
plt.legend(loc='lower right')
fig2.suptitle("Cumulative organic carbon recycled into \nVirions or Lysate as a function of temperature\n",fontsize=15)
# In[88]:
#fig1.savefig('CE_Grid_withRCR_runaway.jpeg', bbox_inches="tight", dpi=300,transparent=True)
#fig2.savefig('CE_Temp_noRCR_line.jpeg', bbox_inches="tight", dpi=300,transparent=True)
| 31.592857 | 123 | 0.618698 |
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from matplotlib import cm
import matplotlib.tri as tri
from matplotlib.colors import LogNorm
import matplotlib.patches as mpatches
from matplotlib.ticker import LogFormatter
from collections import Counter
from functools import wraps
import csv
import sys
import itertools
from itertools import islice, cycle, chain
import scipy as sp
from scipy.interpolate import griddata
from scipy import interpolate
from scipy.integrate import odeint
from scipy.stats import pareto
from scipy.stats import loguniform
import seaborn as sns
import pandas as pd
import statistics as stats
import lhsmdu
from math import nan
from SALib.sample import saltelli, latin, ff
from SALib.analyze import sobol
import random
def f2(s,t, leak, lyse, temp):
if temp < -1:
RCR = 0.0716*temp**4 + 2.9311*temp**3 + 34.108*temp**2 + 45.826*temp + 3.5125
BCF = -0.0106 * temp **2 - 0.519 * temp + 0.2977
sal = 32 * BCF
else:
RCR = 1
sal = 32
mux = 1
betx = 1
phix = 1e-5
gamx = 1
beta = betx*(0.0064 * temp**3 - 0.3047 * temp ** 2 + 0.7701 * temp + 93.605)
mu = 0.0441*np.exp(0.4991*temp)
phi = phix * (6e-13 * temp **5 - 2e-11 * temp ** 4 + 1e-10 * temp ** 3 + 3e-9 * temp ** 2 - 3e-8 * temp + 5e-8)
if mu <= 0:
mu = 0.000
if beta < 0:
beta = 1
if phi < 0:
phi = 1e-15
phi = phi * RCR
alpha = 1.2e-7*3**((temp-23)/10)
#nutrient transfer coefficient to bacteria (ug/cell * hr)
Q = 0.022
#half saturation constant (ug/mL)
d = 1e-8
#constant of bacterial death (1/hr)
m = 1e-6
#constant of viral decay (1/hr)
g = leak
#POM transfer coefficient from bacteria (ug/cell*hr)
n = lyse
#POM transfer coefficient from viral lysis ug/[burst]cell
#gamma is a lysogeny value
gamma = 1 #-1/temp #*mu
# set up solution matrix
N = s[0]
B = s[1]
V = s[2]
P = s[3]
#systems of equations below
dNdt = - alpha * (N / (N + Q)) * B + g * (alpha * (N/(N+Q))*B) + (n * 1e-7 * (gamma) * phi * V * B)
if N < 0:
N = 0
dBdt = (mu) * (N/(Q + N)) * B - gamma * phi * V * B - d*B
if B < 1:
B = 1
dVdt = gamma*beta * B * phi*V - phi * V * B - m*V
if V < 1:
V = 1
#dPdt = (g * (0.0083*1e-7))*B + (n * 1e-7 * phi * V * B*RCR) + 1e-10*m*V + 1.0e-7*d*B - (P/(P+Q))*alpha * B
dPdt = g * alpha * (N/ (N+Q))*B + n * 1e-7 * (gamma)*phi*B*V
# according to Jover, 2014 - virus has 0.02 to 0.05 fg carbon/virion => translate into ug Carbon = 5e-11
VCarbonEQ = 5e-11
BCarbonEQ = 1e-7 #from Bionumbers
# building the carbon equivalent for viruses, lysate as per Talmy et al 2019
rv = 90 #virus radius (nm)
Qv = (41 * (rv - 2.5)**3 + 130*(7.5*(rv)**2 - 18.74 * rv + 15.63)) * (10e6/(6.022 * 10**23)) # virus carbon eq
phiEQ = (phi)/(Qv)
Qh = 1e-7
etav = beta * (Qv/Qh)
TotalVCarbon = (phiEQ * (gamma) * (V*VCarbonEQ) * (B*BCarbonEQ))
VirusCarbon = etav * (phiEQ * (gamma) * (V*VCarbonEQ) * (B*BCarbonEQ))
LysateCarbon = (1-etav)*(phiEQ * (gamma) * (V*VCarbonEQ) * (B*BCarbonEQ))
LeakCarbon = g * (alpha * (N/(N+Q))*B)
#print (mu, beta, phi, gamma)
return [dNdt, dBdt, dVdt, dPdt, TotalVCarbon, VirusCarbon, LysateCarbon, LeakCarbon]
# define time, temperature scale
time = 5000
temp_list = [-12.5,-10, -8, -6, -4, -2]
t = np.linspace(1,time,1000)
# set up empty matricies
DOMX = []
DOMA = []
DOMB = []
DOMC = []
DOM1 = []
DOM10 = []
DOM100 = []
RCRlist = []
Mulist = []
endvals1 = []
endvals2 = []
endvals3 = []
endvals4 = []
Burstlist = []
Adsorplist = []
count = 0
plt.rcParams["font.family"] = "sans-serif"
fig1 = plt.figure(figsize=(20,15))
fig1.tight_layout()
plt.rcParams.update({'font.size': 15})
for xx in temp_list:
temp = xx
count +=1
mu = 0.0441*np.exp(0.4991*temp)
gamma = 1
#print ("gamma is:", gamma, "and mu is:", mu)
if temp < -1:
RCR = 0.0716*temp**4 + 2.9311*temp**3 + 34.108*temp**2 + 45.826*temp + 3.5125 #Fit from Wells and Deming, 2006
BCF = -0.0106 * temp **2 - 0.519 * temp + 0.2977
sal = 32 * BCF
else:
BCF = 1
sal = 32
s0=[0.12*BCF,1e4*BCF, 1e5*BCF,0,0,0,0,0]
s = odeint(f2,s0,t, args = (0.4,0.99, temp))
xend.append(sum(s[:,3]))
y1 = s[:,4]/(0.12)
y2 = s[:,5]/(0.12)
y3 = s[:,6]/(0.12)
y4 = s[:,7]/(0.12)
plt.subplot(3, 3, count)
colors1 = ['cadetblue', '
plt.stackplot(t,y2,y3, colors = colors1,labels=['To Virus','To Lysate'])
plt.legend(loc='lower right')
plt.xlabel('Temperature: {} (˚C)'.format(temp))
plt.yscale('log')
plt.ylabel('% Initial Nutrient')
# take last value of each returned number for the temp-dependent plot
endvals1.append(y1[-1])
endvals2.append(y2[-1])
endvals3.append(y3[-1])
endvals4.append(y4[-1])
# make lists of calculated temp-dependent parameters if we want to plot against them alter
RCRlist.append(RCR)
Mulist.append(mu)
beta = 1*(0.0064 * temp**3 - 0.3047 * temp ** 2 + 0.7701 * temp + 93.605)
Burstlist.append(beta)
phi = RCR* 1 * (6e-13 * temp **5 - 2e-11 * temp ** 4 + 1e-10 * temp ** 3 + 3e-9 * temp ** 2 - 3e-8 * temp + 5e-8)
Adsorplist.append(phi)
plt.subplots_adjust(hspace = 1)
fig1.suptitle("Cumulative organic carbon recycled into Virions or Lysate ",fontsize=15)
# Plot as a funciton of temperature
plt.rcParams["font.family"] = "sans-serif"
plt.rcParams.update({'font.size': 20})
fig2 = plt.figure(figsize=(10,5))
fig2.tight_layout()
endvals1_b = [i/max(endvals1) for i in endvals1]
endvals2_b = [i/max(endvals2) for i in endvals2]
endvals3_b = [i/max(endvals3) for i in endvals3]
endvals4_b = [i/max(endvals4) for i in endvals4]
#ax1 = plt.stackplot(temp_list, endvals2_b, endvals3, colors = colors1) #, labels=['To Virus','To Lysate', 'Cell exudate'])
#ax1 = plt.plot(temp_list, Burstlist)
plt.plot(temp_list,endvals2_b, c = 'cadetblue', marker = 'o', markeredgecolor='white', markersize=15, label='to Virions')
plt.plot(temp_list, endvals3_b, c = '
plt.xlabel('Temperature (˚C)')
plt.ylabel('Carbon Flow (Relative to Maximum)')
plt.legend(loc='lower right')
fig2.suptitle("Cumulative organic carbon recycled into \nVirions or Lysate as a function of temperature\n",fontsize=15)
# In[88]:
#fig1.savefig('CE_Grid_withRCR_runaway.jpeg', bbox_inches="tight", dpi=300,transparent=True)
#fig2.savefig('CE_Temp_noRCR_line.jpeg', bbox_inches="tight", dpi=300,transparent=True)
| true | true |
f71ba11fdfcf3709595b11c90277b8596127a219 | 3,479 | py | Python | WonderPy/components/wwCommandHead.py | avrabe/WonderPy | 60d81340bed1085c32803b32209fbbd4c291310a | [
"MIT"
] | 1 | 2019-05-25T16:55:32.000Z | 2019-05-25T16:55:32.000Z | WonderPy/components/wwCommandHead.py | avrabe/WonderPy | 60d81340bed1085c32803b32209fbbd4c291310a | [
"MIT"
] | null | null | null | WonderPy/components/wwCommandHead.py | avrabe/WonderPy | 60d81340bed1085c32803b32209fbbd4c291310a | [
"MIT"
] | null | null | null | from WonderPy.core.wwConstants import WWRobotConstants
from WonderPy.util import wwMath
from .wwCommandBase import WWCommandBase, do_not_call_within_connect_or_sensors
_rc = WWRobotConstants.RobotComponent
_rcv = WWRobotConstants.RobotComponentValues
_rp = WWRobotConstants.RobotProperties
class WWCommandHead(WWCommandBase):
_TIME_ANGLE = 0.2
_TIME_VOLTAGE = 0.6
def __init__(self, robot):
super(WWCommandHead, self).__init__(robot)
def stage_pan_angle(self, pan_degrees):
self._robot.stage_cmds(self.compose_angle(_rc.WW_COMMAND_HEAD_POSITION_PAN,
wwMath.coords_api_to_json_pan(pan_degrees)))
def stage_tilt_angle(self, tilt_degrees):
self._robot.stage_cmds(self.compose_angle(_rc.WW_COMMAND_HEAD_POSITION_TILT,
wwMath.coords_api_to_json_tilt(tilt_degrees)))
def stage_pan_tilt_angle(self, pan_degrees, tilt_degrees):
self.stage_pan_angle(pan_degrees)
self.stage_tilt_angle(tilt_degrees)
def stage_pan_voltage(self, pan_voltage_percent):
self._robot.stage_cmds(self.compose_voltage(_rc.WW_COMMAND_HEAD_PAN_VOLTAGE,
wwMath.coords_api_to_json_pan(pan_voltage_percent)))
def stage_tilt_voltage(self, tilt_voltage_percent):
self._robot.stage_cmds(self.compose_voltage(_rc.WW_COMMAND_HEAD_TILT_VOLTAGE,
wwMath.coords_api_to_json_tilt(tilt_voltage_percent)))
def stage_pan_tilt_voltage(self, pan_voltage_percent, tilt_voltage_percent):
self.stage_pan_voltage(pan_voltage_percent)
self.stage_tilt_voltage(tilt_voltage_percent)
@do_not_call_within_connect_or_sensors
def do_pan_angle(self, pan_degrees, timeout=None):
self.stage_pan_angle(pan_degrees)
self._block_for_simple_timeout(self._TIME_ANGLE, timeout)
@do_not_call_within_connect_or_sensors
def do_tilt_angle(self, tilt_degrees, timeout=None):
self.stage_tilt_angle(tilt_degrees)
self._block_for_simple_timeout(self._TIME_ANGLE, timeout)
@do_not_call_within_connect_or_sensors
def do_pan_tilt_angle(self, pan_degrees, tilt_degrees, timeout=None):
self.stage_pan_tilt_angle(pan_degrees, tilt_degrees)
self._block_for_simple_timeout(0.2, timeout)
@do_not_call_within_connect_or_sensors
def do_pan_voltage(self, pan_voltage_percent, timeout=None):
self.stage_pan_voltage(pan_voltage_percent)
self._block_for_simple_timeout(self._TIME_VOLTAGE, timeout)
@do_not_call_within_connect_or_sensors
def do_tilt_voltage(self, tilt_voltage_percent, timeout=None):
self.stage_tilt_voltage(tilt_voltage_percent)
self._block_for_simple_timeout(self._TIME_VOLTAGE, timeout)
@do_not_call_within_connect_or_sensors
def do_pan_tilt_voltage(self, pan_voltage_percent, tilt_voltage_percent, timeout=None):
self.stage_pan_tilt_voltage(pan_voltage_percent, tilt_voltage_percent)
self._block_for_simple_timeout(self._TIME_VOLTAGE, timeout)
@staticmethod
def compose_angle(component_id, degrees):
args = {_rcv.WW_COMMAND_VALUE_ANGLE_DEGREE: degrees}
return {component_id: args}
@staticmethod
def compose_voltage(component_id, voltage_percent):
args = {_rcv.WW_COMMAND_VALUE_PERCENTAGE: voltage_percent}
return {component_id: args}
| 43.4875 | 106 | 0.742167 | from WonderPy.core.wwConstants import WWRobotConstants
from WonderPy.util import wwMath
from .wwCommandBase import WWCommandBase, do_not_call_within_connect_or_sensors
_rc = WWRobotConstants.RobotComponent
_rcv = WWRobotConstants.RobotComponentValues
_rp = WWRobotConstants.RobotProperties
class WWCommandHead(WWCommandBase):
_TIME_ANGLE = 0.2
_TIME_VOLTAGE = 0.6
def __init__(self, robot):
super(WWCommandHead, self).__init__(robot)
def stage_pan_angle(self, pan_degrees):
self._robot.stage_cmds(self.compose_angle(_rc.WW_COMMAND_HEAD_POSITION_PAN,
wwMath.coords_api_to_json_pan(pan_degrees)))
def stage_tilt_angle(self, tilt_degrees):
self._robot.stage_cmds(self.compose_angle(_rc.WW_COMMAND_HEAD_POSITION_TILT,
wwMath.coords_api_to_json_tilt(tilt_degrees)))
def stage_pan_tilt_angle(self, pan_degrees, tilt_degrees):
self.stage_pan_angle(pan_degrees)
self.stage_tilt_angle(tilt_degrees)
def stage_pan_voltage(self, pan_voltage_percent):
self._robot.stage_cmds(self.compose_voltage(_rc.WW_COMMAND_HEAD_PAN_VOLTAGE,
wwMath.coords_api_to_json_pan(pan_voltage_percent)))
def stage_tilt_voltage(self, tilt_voltage_percent):
self._robot.stage_cmds(self.compose_voltage(_rc.WW_COMMAND_HEAD_TILT_VOLTAGE,
wwMath.coords_api_to_json_tilt(tilt_voltage_percent)))
def stage_pan_tilt_voltage(self, pan_voltage_percent, tilt_voltage_percent):
self.stage_pan_voltage(pan_voltage_percent)
self.stage_tilt_voltage(tilt_voltage_percent)
@do_not_call_within_connect_or_sensors
def do_pan_angle(self, pan_degrees, timeout=None):
self.stage_pan_angle(pan_degrees)
self._block_for_simple_timeout(self._TIME_ANGLE, timeout)
@do_not_call_within_connect_or_sensors
def do_tilt_angle(self, tilt_degrees, timeout=None):
self.stage_tilt_angle(tilt_degrees)
self._block_for_simple_timeout(self._TIME_ANGLE, timeout)
@do_not_call_within_connect_or_sensors
def do_pan_tilt_angle(self, pan_degrees, tilt_degrees, timeout=None):
self.stage_pan_tilt_angle(pan_degrees, tilt_degrees)
self._block_for_simple_timeout(0.2, timeout)
@do_not_call_within_connect_or_sensors
def do_pan_voltage(self, pan_voltage_percent, timeout=None):
self.stage_pan_voltage(pan_voltage_percent)
self._block_for_simple_timeout(self._TIME_VOLTAGE, timeout)
@do_not_call_within_connect_or_sensors
def do_tilt_voltage(self, tilt_voltage_percent, timeout=None):
self.stage_tilt_voltage(tilt_voltage_percent)
self._block_for_simple_timeout(self._TIME_VOLTAGE, timeout)
@do_not_call_within_connect_or_sensors
def do_pan_tilt_voltage(self, pan_voltage_percent, tilt_voltage_percent, timeout=None):
self.stage_pan_tilt_voltage(pan_voltage_percent, tilt_voltage_percent)
self._block_for_simple_timeout(self._TIME_VOLTAGE, timeout)
@staticmethod
def compose_angle(component_id, degrees):
args = {_rcv.WW_COMMAND_VALUE_ANGLE_DEGREE: degrees}
return {component_id: args}
@staticmethod
def compose_voltage(component_id, voltage_percent):
args = {_rcv.WW_COMMAND_VALUE_PERCENTAGE: voltage_percent}
return {component_id: args}
| true | true |
f71ba187051fb0b138a6e1bd429718edeada1fc7 | 38,002 | py | Python | src/azure-cli/azure/cli/command_modules/monitor/grammar/MetricAlertConditionParser.py | psignoret/azure-cli | 1a4a043750315f9a7f2894b4287126089978b615 | [
"MIT"
] | 1 | 2019-12-12T19:55:26.000Z | 2019-12-12T19:55:26.000Z | src/azure-cli/azure/cli/command_modules/monitor/grammar/MetricAlertConditionParser.py | psignoret/azure-cli | 1a4a043750315f9a7f2894b4287126089978b615 | [
"MIT"
] | 2 | 2021-01-15T09:24:07.000Z | 2021-01-15T09:30:10.000Z | src/azure-cli/azure/cli/command_modules/monitor/grammar/MetricAlertConditionParser.py | psignoret/azure-cli | 1a4a043750315f9a7f2894b4287126089978b615 | [
"MIT"
] | 1 | 2019-11-25T19:33:05.000Z | 2019-11-25T19:33:05.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=all
# Generated from MetricAlertCondition.g4 by ANTLR 4.7.2
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3")
buf.write(u"\26~\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write(u"\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4")
buf.write(u"\16\t\16\4\17\t\17\4\20\t\20\3\2\3\2\3\2\3\2\7\2%\n\2")
buf.write(u"\f\2\16\2(\13\2\3\2\3\2\3\2\3\2\3\2\3\2\5\2\60\n\2\3")
buf.write(u"\2\3\2\3\2\3\2\7\2\66\n\2\f\2\16\29\13\2\3\2\7\2<\n\2")
buf.write(u"\f\2\16\2?\13\2\3\3\3\3\3\3\3\4\6\4E\n\4\r\4\16\4F\3")
buf.write(u"\5\6\5J\n\5\r\5\16\5K\3\6\3\6\3\6\3\7\3\7\3\b\3\b\3\b")
buf.write(u"\3\t\3\t\3\t\3\t\3\t\7\t[\n\t\f\t\16\t^\13\t\3\n\3\n")
buf.write(u"\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\r\3\r\3\r\3\16")
buf.write(u"\3\16\3\16\3\17\3\17\3\17\3\17\7\17t\n\17\f\17\16\17")
buf.write(u"w\13\17\3\20\6\20z\n\20\r\20\16\20{\3\20\2\2\21\2\4\6")
buf.write(u"\b\n\f\16\20\22\24\26\30\32\34\36\2\b\4\2\3\4\26\26\5")
buf.write(u"\2\3\b\24\24\26\26\4\2\t\t\r\r\3\2\16\17\4\2\t\t\20\20")
buf.write(u"\b\2\3\3\7\7\n\13\22\22\24\24\26\26\2w\2 \3\2\2\2\4@")
buf.write(u"\3\2\2\2\6D\3\2\2\2\bI\3\2\2\2\nM\3\2\2\2\fP\3\2\2\2")
buf.write(u"\16R\3\2\2\2\20U\3\2\2\2\22_\3\2\2\2\24c\3\2\2\2\26f")
buf.write(u"\3\2\2\2\30i\3\2\2\2\32l\3\2\2\2\34o\3\2\2\2\36y\3\2")
buf.write(u"\2\2 &\5\4\3\2!\"\5\6\4\2\"#\7\3\2\2#%\3\2\2\2$!\3\2")
buf.write(u"\2\2%(\3\2\2\2&$\3\2\2\2&\'\3\2\2\2\'/\3\2\2\2(&\3\2")
buf.write(u"\2\2)*\7\23\2\2*+\5\b\5\2+,\7\23\2\2,-\7\24\2\2-\60\3")
buf.write(u"\2\2\2.\60\5\b\5\2/)\3\2\2\2/.\3\2\2\2\60\61\3\2\2\2")
buf.write(u"\61\62\5\n\6\2\62\67\5\f\7\2\63\64\7\24\2\2\64\66\5\20")
buf.write(u"\t\2\65\63\3\2\2\2\669\3\2\2\2\67\65\3\2\2\2\678\3\2")
buf.write(u"\2\28=\3\2\2\29\67\3\2\2\2:<\7\25\2\2;:\3\2\2\2<?\3\2")
buf.write(u"\2\2=;\3\2\2\2=>\3\2\2\2>\3\3\2\2\2?=\3\2\2\2@A\7\26")
buf.write(u"\2\2AB\7\24\2\2B\5\3\2\2\2CE\t\2\2\2DC\3\2\2\2EF\3\2")
buf.write(u"\2\2FD\3\2\2\2FG\3\2\2\2G\7\3\2\2\2HJ\t\3\2\2IH\3\2\2")
buf.write(u"\2JK\3\2\2\2KI\3\2\2\2KL\3\2\2\2L\t\3\2\2\2MN\7\21\2")
buf.write(u"\2NO\7\24\2\2O\13\3\2\2\2PQ\7\22\2\2Q\r\3\2\2\2RS\7\f")
buf.write(u"\2\2ST\7\24\2\2T\17\3\2\2\2UV\5\16\b\2V\\\5\22\n\2WX")
buf.write(u"\5\24\13\2XY\5\22\n\2Y[\3\2\2\2ZW\3\2\2\2[^\3\2\2\2\\")
buf.write(u"Z\3\2\2\2\\]\3\2\2\2]\21\3\2\2\2^\\\3\2\2\2_`\5\32\16")
buf.write(u"\2`a\5\26\f\2ab\5\34\17\2b\23\3\2\2\2cd\t\4\2\2de\7\24")
buf.write(u"\2\2e\25\3\2\2\2fg\t\5\2\2gh\7\24\2\2h\27\3\2\2\2ij\t")
buf.write(u"\6\2\2jk\7\24\2\2k\31\3\2\2\2lm\7\26\2\2mn\7\24\2\2n")
buf.write(u"\33\3\2\2\2ou\5\36\20\2pq\5\30\r\2qr\5\36\20\2rt\3\2")
buf.write(u"\2\2sp\3\2\2\2tw\3\2\2\2us\3\2\2\2uv\3\2\2\2v\35\3\2")
buf.write(u"\2\2wu\3\2\2\2xz\t\7\2\2yx\3\2\2\2z{\3\2\2\2{y\3\2\2")
buf.write(u"\2{|\3\2\2\2|\37\3\2\2\2\13&/\67=FK\\u{")
return buf.getvalue()
class MetricAlertConditionParser ( Parser ):
grammarFileName = "MetricAlertCondition.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ u"<INVALID>", u"'.'", u"'/'", u"'_'", u"'\\'", u"':'",
u"'%'", u"','", u"'-'", u"'*'" ]
symbolicNames = [ u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"<INVALID>", u"<INVALID>", u"WHERE", u"AND", u"INCLUDES",
u"EXCLUDES", u"OR", u"OPERATOR", u"NUMBER", u"QUOTE",
u"WHITESPACE", u"NEWLINE", u"WORD" ]
RULE_expression = 0
RULE_aggregation = 1
RULE_namespace = 2
RULE_metric = 3
RULE_operator = 4
RULE_threshold = 5
RULE_where = 6
RULE_dimensions = 7
RULE_dimension = 8
RULE_dim_separator = 9
RULE_dim_operator = 10
RULE_dim_val_separator = 11
RULE_dim_name = 12
RULE_dim_values = 13
RULE_dim_value = 14
ruleNames = [ u"expression", u"aggregation", u"namespace", u"metric",
u"operator", u"threshold", u"where", u"dimensions", u"dimension",
u"dim_separator", u"dim_operator", u"dim_val_separator",
u"dim_name", u"dim_values", u"dim_value" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
WHERE=10
AND=11
INCLUDES=12
EXCLUDES=13
OR=14
OPERATOR=15
NUMBER=16
QUOTE=17
WHITESPACE=18
NEWLINE=19
WORD=20
def __init__(self, input, output=sys.stdout):
super(MetricAlertConditionParser, self).__init__(input, output=output)
self.checkVersion("4.7.2")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.ExpressionContext, self).__init__(parent, invokingState)
self.parser = parser
def aggregation(self):
return self.getTypedRuleContext(MetricAlertConditionParser.AggregationContext,0)
def operator(self):
return self.getTypedRuleContext(MetricAlertConditionParser.OperatorContext,0)
def threshold(self):
return self.getTypedRuleContext(MetricAlertConditionParser.ThresholdContext,0)
def QUOTE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.QUOTE)
else:
return self.getToken(MetricAlertConditionParser.QUOTE, i)
def metric(self):
return self.getTypedRuleContext(MetricAlertConditionParser.MetricContext,0)
def WHITESPACE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WHITESPACE)
else:
return self.getToken(MetricAlertConditionParser.WHITESPACE, i)
def namespace(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.NamespaceContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.NamespaceContext,i)
def dimensions(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.DimensionsContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.DimensionsContext,i)
def NEWLINE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.NEWLINE)
else:
return self.getToken(MetricAlertConditionParser.NEWLINE, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_expression
def enterRule(self, listener):
if hasattr(listener, "enterExpression"):
listener.enterExpression(self)
def exitRule(self, listener):
if hasattr(listener, "exitExpression"):
listener.exitExpression(self)
def expression(self):
localctx = MetricAlertConditionParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 30
self.aggregation()
self.state = 36
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,0,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 31
self.namespace()
self.state = 32
self.match(MetricAlertConditionParser.T__0)
self.state = 38
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,0,self._ctx)
self.state = 45
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [MetricAlertConditionParser.QUOTE]:
self.state = 39
self.match(MetricAlertConditionParser.QUOTE)
self.state = 40
self.metric()
self.state = 41
self.match(MetricAlertConditionParser.QUOTE)
self.state = 42
self.match(MetricAlertConditionParser.WHITESPACE)
pass
elif token in [MetricAlertConditionParser.T__0, MetricAlertConditionParser.T__1, MetricAlertConditionParser.T__2, MetricAlertConditionParser.T__3, MetricAlertConditionParser.T__4, MetricAlertConditionParser.T__5, MetricAlertConditionParser.WHITESPACE, MetricAlertConditionParser.WORD]:
self.state = 44
self.metric()
pass
else:
raise NoViableAltException(self)
self.state = 47
self.operator()
self.state = 48
self.threshold()
self.state = 53
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==MetricAlertConditionParser.WHITESPACE:
self.state = 49
self.match(MetricAlertConditionParser.WHITESPACE)
self.state = 50
self.dimensions()
self.state = 55
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 59
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==MetricAlertConditionParser.NEWLINE:
self.state = 56
self.match(MetricAlertConditionParser.NEWLINE)
self.state = 61
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AggregationContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.AggregationContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self):
return self.getToken(MetricAlertConditionParser.WORD, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_aggregation
def enterRule(self, listener):
if hasattr(listener, "enterAggregation"):
listener.enterAggregation(self)
def exitRule(self, listener):
if hasattr(listener, "exitAggregation"):
listener.exitAggregation(self)
def aggregation(self):
localctx = MetricAlertConditionParser.AggregationContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_aggregation)
try:
self.enterOuterAlt(localctx, 1)
self.state = 62
self.match(MetricAlertConditionParser.WORD)
self.state = 63
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespaceContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.NamespaceContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WORD)
else:
return self.getToken(MetricAlertConditionParser.WORD, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_namespace
def enterRule(self, listener):
if hasattr(listener, "enterNamespace"):
listener.enterNamespace(self)
def exitRule(self, listener):
if hasattr(listener, "exitNamespace"):
listener.exitNamespace(self)
def namespace(self):
localctx = MetricAlertConditionParser.NamespaceContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_namespace)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 66
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 65
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__1) | (1 << MetricAlertConditionParser.WORD))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
else:
raise NoViableAltException(self)
self.state = 68
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MetricContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.MetricContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WORD)
else:
return self.getToken(MetricAlertConditionParser.WORD, i)
def WHITESPACE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WHITESPACE)
else:
return self.getToken(MetricAlertConditionParser.WHITESPACE, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_metric
def enterRule(self, listener):
if hasattr(listener, "enterMetric"):
listener.enterMetric(self)
def exitRule(self, listener):
if hasattr(listener, "exitMetric"):
listener.exitMetric(self)
def metric(self):
localctx = MetricAlertConditionParser.MetricContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_metric)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 71
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 70
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__1) | (1 << MetricAlertConditionParser.T__2) | (1 << MetricAlertConditionParser.T__3) | (1 << MetricAlertConditionParser.T__4) | (1 << MetricAlertConditionParser.T__5) | (1 << MetricAlertConditionParser.WHITESPACE) | (1 << MetricAlertConditionParser.WORD))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 73
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__1) | (1 << MetricAlertConditionParser.T__2) | (1 << MetricAlertConditionParser.T__3) | (1 << MetricAlertConditionParser.T__4) | (1 << MetricAlertConditionParser.T__5) | (1 << MetricAlertConditionParser.WHITESPACE) | (1 << MetricAlertConditionParser.WORD))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OperatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.OperatorContext, self).__init__(parent, invokingState)
self.parser = parser
def OPERATOR(self):
return self.getToken(MetricAlertConditionParser.OPERATOR, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_operator
def enterRule(self, listener):
if hasattr(listener, "enterOperator"):
listener.enterOperator(self)
def exitRule(self, listener):
if hasattr(listener, "exitOperator"):
listener.exitOperator(self)
def operator(self):
localctx = MetricAlertConditionParser.OperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_operator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 75
self.match(MetricAlertConditionParser.OPERATOR)
self.state = 76
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ThresholdContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.ThresholdContext, self).__init__(parent, invokingState)
self.parser = parser
def NUMBER(self):
return self.getToken(MetricAlertConditionParser.NUMBER, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_threshold
def enterRule(self, listener):
if hasattr(listener, "enterThreshold"):
listener.enterThreshold(self)
def exitRule(self, listener):
if hasattr(listener, "exitThreshold"):
listener.exitThreshold(self)
def threshold(self):
localctx = MetricAlertConditionParser.ThresholdContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_threshold)
try:
self.enterOuterAlt(localctx, 1)
self.state = 78
self.match(MetricAlertConditionParser.NUMBER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class WhereContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.WhereContext, self).__init__(parent, invokingState)
self.parser = parser
def WHERE(self):
return self.getToken(MetricAlertConditionParser.WHERE, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_where
def enterRule(self, listener):
if hasattr(listener, "enterWhere"):
listener.enterWhere(self)
def exitRule(self, listener):
if hasattr(listener, "exitWhere"):
listener.exitWhere(self)
def where(self):
localctx = MetricAlertConditionParser.WhereContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_where)
try:
self.enterOuterAlt(localctx, 1)
self.state = 80
self.match(MetricAlertConditionParser.WHERE)
self.state = 81
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DimensionsContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.DimensionsContext, self).__init__(parent, invokingState)
self.parser = parser
def where(self):
return self.getTypedRuleContext(MetricAlertConditionParser.WhereContext,0)
def dimension(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.DimensionContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.DimensionContext,i)
def dim_separator(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.Dim_separatorContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_separatorContext,i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dimensions
def enterRule(self, listener):
if hasattr(listener, "enterDimensions"):
listener.enterDimensions(self)
def exitRule(self, listener):
if hasattr(listener, "exitDimensions"):
listener.exitDimensions(self)
def dimensions(self):
localctx = MetricAlertConditionParser.DimensionsContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_dimensions)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 83
self.where()
self.state = 84
self.dimension()
self.state = 90
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==MetricAlertConditionParser.T__6 or _la==MetricAlertConditionParser.AND:
self.state = 85
self.dim_separator()
self.state = 86
self.dimension()
self.state = 92
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DimensionContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.DimensionContext, self).__init__(parent, invokingState)
self.parser = parser
def dim_name(self):
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_nameContext,0)
def dim_operator(self):
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_operatorContext,0)
def dim_values(self):
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_valuesContext,0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dimension
def enterRule(self, listener):
if hasattr(listener, "enterDimension"):
listener.enterDimension(self)
def exitRule(self, listener):
if hasattr(listener, "exitDimension"):
listener.exitDimension(self)
def dimension(self):
localctx = MetricAlertConditionParser.DimensionContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_dimension)
try:
self.enterOuterAlt(localctx, 1)
self.state = 93
self.dim_name()
self.state = 94
self.dim_operator()
self.state = 95
self.dim_values()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_separatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_separatorContext, self).__init__(parent, invokingState)
self.parser = parser
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def AND(self):
return self.getToken(MetricAlertConditionParser.AND, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_separator
def enterRule(self, listener):
if hasattr(listener, "enterDim_separator"):
listener.enterDim_separator(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_separator"):
listener.exitDim_separator(self)
def dim_separator(self):
localctx = MetricAlertConditionParser.Dim_separatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_dim_separator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 97
_la = self._input.LA(1)
if not(_la==MetricAlertConditionParser.T__6 or _la==MetricAlertConditionParser.AND):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 98
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_operatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_operatorContext, self).__init__(parent, invokingState)
self.parser = parser
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def INCLUDES(self):
return self.getToken(MetricAlertConditionParser.INCLUDES, 0)
def EXCLUDES(self):
return self.getToken(MetricAlertConditionParser.EXCLUDES, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_operator
def enterRule(self, listener):
if hasattr(listener, "enterDim_operator"):
listener.enterDim_operator(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_operator"):
listener.exitDim_operator(self)
def dim_operator(self):
localctx = MetricAlertConditionParser.Dim_operatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_dim_operator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 100
_la = self._input.LA(1)
if not(_la==MetricAlertConditionParser.INCLUDES or _la==MetricAlertConditionParser.EXCLUDES):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 101
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_val_separatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_val_separatorContext, self).__init__(parent, invokingState)
self.parser = parser
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def OR(self):
return self.getToken(MetricAlertConditionParser.OR, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_val_separator
def enterRule(self, listener):
if hasattr(listener, "enterDim_val_separator"):
listener.enterDim_val_separator(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_val_separator"):
listener.exitDim_val_separator(self)
def dim_val_separator(self):
localctx = MetricAlertConditionParser.Dim_val_separatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_dim_val_separator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 103
_la = self._input.LA(1)
if not(_la==MetricAlertConditionParser.T__6 or _la==MetricAlertConditionParser.OR):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 104
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_nameContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_nameContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self):
return self.getToken(MetricAlertConditionParser.WORD, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_name
def enterRule(self, listener):
if hasattr(listener, "enterDim_name"):
listener.enterDim_name(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_name"):
listener.exitDim_name(self)
def dim_name(self):
localctx = MetricAlertConditionParser.Dim_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_dim_name)
try:
self.enterOuterAlt(localctx, 1)
self.state = 106
self.match(MetricAlertConditionParser.WORD)
self.state = 107
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_valuesContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_valuesContext, self).__init__(parent, invokingState)
self.parser = parser
def dim_value(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.Dim_valueContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_valueContext,i)
def dim_val_separator(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.Dim_val_separatorContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_val_separatorContext,i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_values
def enterRule(self, listener):
if hasattr(listener, "enterDim_values"):
listener.enterDim_values(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_values"):
listener.exitDim_values(self)
def dim_values(self):
localctx = MetricAlertConditionParser.Dim_valuesContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_dim_values)
try:
self.enterOuterAlt(localctx, 1)
self.state = 109
self.dim_value()
self.state = 115
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 110
self.dim_val_separator()
self.state = 111
self.dim_value()
self.state = 117
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_valueContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_valueContext, self).__init__(parent, invokingState)
self.parser = parser
def NUMBER(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.NUMBER)
else:
return self.getToken(MetricAlertConditionParser.NUMBER, i)
def WORD(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WORD)
else:
return self.getToken(MetricAlertConditionParser.WORD, i)
def WHITESPACE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WHITESPACE)
else:
return self.getToken(MetricAlertConditionParser.WHITESPACE, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_value
def enterRule(self, listener):
if hasattr(listener, "enterDim_value"):
listener.enterDim_value(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_value"):
listener.exitDim_value(self)
def dim_value(self):
localctx = MetricAlertConditionParser.Dim_valueContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_dim_value)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 119
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 118
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__4) | (1 << MetricAlertConditionParser.T__7) | (1 << MetricAlertConditionParser.T__8) | (1 << MetricAlertConditionParser.NUMBER) | (1 << MetricAlertConditionParser.WHITESPACE) | (1 << MetricAlertConditionParser.WORD))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
else:
raise NoViableAltException(self)
self.state = 121
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,8,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
| 36.47025 | 406 | 0.599416 |
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3")
buf.write(u"\26~\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write(u"\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4")
buf.write(u"\16\t\16\4\17\t\17\4\20\t\20\3\2\3\2\3\2\3\2\7\2%\n\2")
buf.write(u"\f\2\16\2(\13\2\3\2\3\2\3\2\3\2\3\2\3\2\5\2\60\n\2\3")
buf.write(u"\2\3\2\3\2\3\2\7\2\66\n\2\f\2\16\29\13\2\3\2\7\2<\n\2")
buf.write(u"\f\2\16\2?\13\2\3\3\3\3\3\3\3\4\6\4E\n\4\r\4\16\4F\3")
buf.write(u"\5\6\5J\n\5\r\5\16\5K\3\6\3\6\3\6\3\7\3\7\3\b\3\b\3\b")
buf.write(u"\3\t\3\t\3\t\3\t\3\t\7\t[\n\t\f\t\16\t^\13\t\3\n\3\n")
buf.write(u"\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\r\3\r\3\r\3\16")
buf.write(u"\3\16\3\16\3\17\3\17\3\17\3\17\7\17t\n\17\f\17\16\17")
buf.write(u"w\13\17\3\20\6\20z\n\20\r\20\16\20{\3\20\2\2\21\2\4\6")
buf.write(u"\b\n\f\16\20\22\24\26\30\32\34\36\2\b\4\2\3\4\26\26\5")
buf.write(u"\2\3\b\24\24\26\26\4\2\t\t\r\r\3\2\16\17\4\2\t\t\20\20")
buf.write(u"\b\2\3\3\7\7\n\13\22\22\24\24\26\26\2w\2 \3\2\2\2\4@")
buf.write(u"\3\2\2\2\6D\3\2\2\2\bI\3\2\2\2\nM\3\2\2\2\fP\3\2\2\2")
buf.write(u"\16R\3\2\2\2\20U\3\2\2\2\22_\3\2\2\2\24c\3\2\2\2\26f")
buf.write(u"\3\2\2\2\30i\3\2\2\2\32l\3\2\2\2\34o\3\2\2\2\36y\3\2")
buf.write(u"\2\2 &\5\4\3\2!\"\5\6\4\2\"#\7\3\2\2#%\3\2\2\2$!\3\2")
buf.write(u"\2\2%(\3\2\2\2&$\3\2\2\2&\'\3\2\2\2\'/\3\2\2\2(&\3\2")
buf.write(u"\2\2)*\7\23\2\2*+\5\b\5\2+,\7\23\2\2,-\7\24\2\2-\60\3")
buf.write(u"\2\2\2.\60\5\b\5\2/)\3\2\2\2/.\3\2\2\2\60\61\3\2\2\2")
buf.write(u"\61\62\5\n\6\2\62\67\5\f\7\2\63\64\7\24\2\2\64\66\5\20")
buf.write(u"\t\2\65\63\3\2\2\2\669\3\2\2\2\67\65\3\2\2\2\678\3\2")
buf.write(u"\2\28=\3\2\2\29\67\3\2\2\2:<\7\25\2\2;:\3\2\2\2<?\3\2")
buf.write(u"\2\2=;\3\2\2\2=>\3\2\2\2>\3\3\2\2\2?=\3\2\2\2@A\7\26")
buf.write(u"\2\2AB\7\24\2\2B\5\3\2\2\2CE\t\2\2\2DC\3\2\2\2EF\3\2")
buf.write(u"\2\2FD\3\2\2\2FG\3\2\2\2G\7\3\2\2\2HJ\t\3\2\2IH\3\2\2")
buf.write(u"\2JK\3\2\2\2KI\3\2\2\2KL\3\2\2\2L\t\3\2\2\2MN\7\21\2")
buf.write(u"\2NO\7\24\2\2O\13\3\2\2\2PQ\7\22\2\2Q\r\3\2\2\2RS\7\f")
buf.write(u"\2\2ST\7\24\2\2T\17\3\2\2\2UV\5\16\b\2V\\\5\22\n\2WX")
buf.write(u"\5\24\13\2XY\5\22\n\2Y[\3\2\2\2ZW\3\2\2\2[^\3\2\2\2\\")
buf.write(u"Z\3\2\2\2\\]\3\2\2\2]\21\3\2\2\2^\\\3\2\2\2_`\5\32\16")
buf.write(u"\2`a\5\26\f\2ab\5\34\17\2b\23\3\2\2\2cd\t\4\2\2de\7\24")
buf.write(u"\2\2e\25\3\2\2\2fg\t\5\2\2gh\7\24\2\2h\27\3\2\2\2ij\t")
buf.write(u"\6\2\2jk\7\24\2\2k\31\3\2\2\2lm\7\26\2\2mn\7\24\2\2n")
buf.write(u"\33\3\2\2\2ou\5\36\20\2pq\5\30\r\2qr\5\36\20\2rt\3\2")
buf.write(u"\2\2sp\3\2\2\2tw\3\2\2\2us\3\2\2\2uv\3\2\2\2v\35\3\2")
buf.write(u"\2\2wu\3\2\2\2xz\t\7\2\2yx\3\2\2\2z{\3\2\2\2{y\3\2\2")
buf.write(u"\2{|\3\2\2\2|\37\3\2\2\2\13&/\67=FK\\u{")
return buf.getvalue()
class MetricAlertConditionParser ( Parser ):
grammarFileName = "MetricAlertCondition.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ u"<INVALID>", u"'.'", u"'/'", u"'_'", u"'\\'", u"':'",
u"'%'", u"','", u"'-'", u"'*'" ]
symbolicNames = [ u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"<INVALID>", u"<INVALID>", u"<INVALID>", u"<INVALID>",
u"<INVALID>", u"<INVALID>", u"WHERE", u"AND", u"INCLUDES",
u"EXCLUDES", u"OR", u"OPERATOR", u"NUMBER", u"QUOTE",
u"WHITESPACE", u"NEWLINE", u"WORD" ]
RULE_expression = 0
RULE_aggregation = 1
RULE_namespace = 2
RULE_metric = 3
RULE_operator = 4
RULE_threshold = 5
RULE_where = 6
RULE_dimensions = 7
RULE_dimension = 8
RULE_dim_separator = 9
RULE_dim_operator = 10
RULE_dim_val_separator = 11
RULE_dim_name = 12
RULE_dim_values = 13
RULE_dim_value = 14
ruleNames = [ u"expression", u"aggregation", u"namespace", u"metric",
u"operator", u"threshold", u"where", u"dimensions", u"dimension",
u"dim_separator", u"dim_operator", u"dim_val_separator",
u"dim_name", u"dim_values", u"dim_value" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
WHERE=10
AND=11
INCLUDES=12
EXCLUDES=13
OR=14
OPERATOR=15
NUMBER=16
QUOTE=17
WHITESPACE=18
NEWLINE=19
WORD=20
def __init__(self, input, output=sys.stdout):
super(MetricAlertConditionParser, self).__init__(input, output=output)
self.checkVersion("4.7.2")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.ExpressionContext, self).__init__(parent, invokingState)
self.parser = parser
def aggregation(self):
return self.getTypedRuleContext(MetricAlertConditionParser.AggregationContext,0)
def operator(self):
return self.getTypedRuleContext(MetricAlertConditionParser.OperatorContext,0)
def threshold(self):
return self.getTypedRuleContext(MetricAlertConditionParser.ThresholdContext,0)
def QUOTE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.QUOTE)
else:
return self.getToken(MetricAlertConditionParser.QUOTE, i)
def metric(self):
return self.getTypedRuleContext(MetricAlertConditionParser.MetricContext,0)
def WHITESPACE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WHITESPACE)
else:
return self.getToken(MetricAlertConditionParser.WHITESPACE, i)
def namespace(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.NamespaceContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.NamespaceContext,i)
def dimensions(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.DimensionsContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.DimensionsContext,i)
def NEWLINE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.NEWLINE)
else:
return self.getToken(MetricAlertConditionParser.NEWLINE, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_expression
def enterRule(self, listener):
if hasattr(listener, "enterExpression"):
listener.enterExpression(self)
def exitRule(self, listener):
if hasattr(listener, "exitExpression"):
listener.exitExpression(self)
def expression(self):
localctx = MetricAlertConditionParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_expression)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 30
self.aggregation()
self.state = 36
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,0,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 31
self.namespace()
self.state = 32
self.match(MetricAlertConditionParser.T__0)
self.state = 38
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,0,self._ctx)
self.state = 45
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [MetricAlertConditionParser.QUOTE]:
self.state = 39
self.match(MetricAlertConditionParser.QUOTE)
self.state = 40
self.metric()
self.state = 41
self.match(MetricAlertConditionParser.QUOTE)
self.state = 42
self.match(MetricAlertConditionParser.WHITESPACE)
pass
elif token in [MetricAlertConditionParser.T__0, MetricAlertConditionParser.T__1, MetricAlertConditionParser.T__2, MetricAlertConditionParser.T__3, MetricAlertConditionParser.T__4, MetricAlertConditionParser.T__5, MetricAlertConditionParser.WHITESPACE, MetricAlertConditionParser.WORD]:
self.state = 44
self.metric()
pass
else:
raise NoViableAltException(self)
self.state = 47
self.operator()
self.state = 48
self.threshold()
self.state = 53
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==MetricAlertConditionParser.WHITESPACE:
self.state = 49
self.match(MetricAlertConditionParser.WHITESPACE)
self.state = 50
self.dimensions()
self.state = 55
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 59
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==MetricAlertConditionParser.NEWLINE:
self.state = 56
self.match(MetricAlertConditionParser.NEWLINE)
self.state = 61
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AggregationContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.AggregationContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self):
return self.getToken(MetricAlertConditionParser.WORD, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_aggregation
def enterRule(self, listener):
if hasattr(listener, "enterAggregation"):
listener.enterAggregation(self)
def exitRule(self, listener):
if hasattr(listener, "exitAggregation"):
listener.exitAggregation(self)
def aggregation(self):
localctx = MetricAlertConditionParser.AggregationContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_aggregation)
try:
self.enterOuterAlt(localctx, 1)
self.state = 62
self.match(MetricAlertConditionParser.WORD)
self.state = 63
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NamespaceContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.NamespaceContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WORD)
else:
return self.getToken(MetricAlertConditionParser.WORD, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_namespace
def enterRule(self, listener):
if hasattr(listener, "enterNamespace"):
listener.enterNamespace(self)
def exitRule(self, listener):
if hasattr(listener, "exitNamespace"):
listener.exitNamespace(self)
def namespace(self):
localctx = MetricAlertConditionParser.NamespaceContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_namespace)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 66
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 65
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__1) | (1 << MetricAlertConditionParser.WORD))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
else:
raise NoViableAltException(self)
self.state = 68
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MetricContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.MetricContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WORD)
else:
return self.getToken(MetricAlertConditionParser.WORD, i)
def WHITESPACE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WHITESPACE)
else:
return self.getToken(MetricAlertConditionParser.WHITESPACE, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_metric
def enterRule(self, listener):
if hasattr(listener, "enterMetric"):
listener.enterMetric(self)
def exitRule(self, listener):
if hasattr(listener, "exitMetric"):
listener.exitMetric(self)
def metric(self):
localctx = MetricAlertConditionParser.MetricContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_metric)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 71
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 70
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__1) | (1 << MetricAlertConditionParser.T__2) | (1 << MetricAlertConditionParser.T__3) | (1 << MetricAlertConditionParser.T__4) | (1 << MetricAlertConditionParser.T__5) | (1 << MetricAlertConditionParser.WHITESPACE) | (1 << MetricAlertConditionParser.WORD))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 73
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__1) | (1 << MetricAlertConditionParser.T__2) | (1 << MetricAlertConditionParser.T__3) | (1 << MetricAlertConditionParser.T__4) | (1 << MetricAlertConditionParser.T__5) | (1 << MetricAlertConditionParser.WHITESPACE) | (1 << MetricAlertConditionParser.WORD))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class OperatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.OperatorContext, self).__init__(parent, invokingState)
self.parser = parser
def OPERATOR(self):
return self.getToken(MetricAlertConditionParser.OPERATOR, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_operator
def enterRule(self, listener):
if hasattr(listener, "enterOperator"):
listener.enterOperator(self)
def exitRule(self, listener):
if hasattr(listener, "exitOperator"):
listener.exitOperator(self)
def operator(self):
localctx = MetricAlertConditionParser.OperatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_operator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 75
self.match(MetricAlertConditionParser.OPERATOR)
self.state = 76
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ThresholdContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.ThresholdContext, self).__init__(parent, invokingState)
self.parser = parser
def NUMBER(self):
return self.getToken(MetricAlertConditionParser.NUMBER, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_threshold
def enterRule(self, listener):
if hasattr(listener, "enterThreshold"):
listener.enterThreshold(self)
def exitRule(self, listener):
if hasattr(listener, "exitThreshold"):
listener.exitThreshold(self)
def threshold(self):
localctx = MetricAlertConditionParser.ThresholdContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_threshold)
try:
self.enterOuterAlt(localctx, 1)
self.state = 78
self.match(MetricAlertConditionParser.NUMBER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class WhereContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.WhereContext, self).__init__(parent, invokingState)
self.parser = parser
def WHERE(self):
return self.getToken(MetricAlertConditionParser.WHERE, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_where
def enterRule(self, listener):
if hasattr(listener, "enterWhere"):
listener.enterWhere(self)
def exitRule(self, listener):
if hasattr(listener, "exitWhere"):
listener.exitWhere(self)
def where(self):
localctx = MetricAlertConditionParser.WhereContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_where)
try:
self.enterOuterAlt(localctx, 1)
self.state = 80
self.match(MetricAlertConditionParser.WHERE)
self.state = 81
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DimensionsContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.DimensionsContext, self).__init__(parent, invokingState)
self.parser = parser
def where(self):
return self.getTypedRuleContext(MetricAlertConditionParser.WhereContext,0)
def dimension(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.DimensionContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.DimensionContext,i)
def dim_separator(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.Dim_separatorContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_separatorContext,i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dimensions
def enterRule(self, listener):
if hasattr(listener, "enterDimensions"):
listener.enterDimensions(self)
def exitRule(self, listener):
if hasattr(listener, "exitDimensions"):
listener.exitDimensions(self)
def dimensions(self):
localctx = MetricAlertConditionParser.DimensionsContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_dimensions)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 83
self.where()
self.state = 84
self.dimension()
self.state = 90
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==MetricAlertConditionParser.T__6 or _la==MetricAlertConditionParser.AND:
self.state = 85
self.dim_separator()
self.state = 86
self.dimension()
self.state = 92
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DimensionContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.DimensionContext, self).__init__(parent, invokingState)
self.parser = parser
def dim_name(self):
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_nameContext,0)
def dim_operator(self):
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_operatorContext,0)
def dim_values(self):
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_valuesContext,0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dimension
def enterRule(self, listener):
if hasattr(listener, "enterDimension"):
listener.enterDimension(self)
def exitRule(self, listener):
if hasattr(listener, "exitDimension"):
listener.exitDimension(self)
def dimension(self):
localctx = MetricAlertConditionParser.DimensionContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_dimension)
try:
self.enterOuterAlt(localctx, 1)
self.state = 93
self.dim_name()
self.state = 94
self.dim_operator()
self.state = 95
self.dim_values()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_separatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_separatorContext, self).__init__(parent, invokingState)
self.parser = parser
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def AND(self):
return self.getToken(MetricAlertConditionParser.AND, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_separator
def enterRule(self, listener):
if hasattr(listener, "enterDim_separator"):
listener.enterDim_separator(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_separator"):
listener.exitDim_separator(self)
def dim_separator(self):
localctx = MetricAlertConditionParser.Dim_separatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_dim_separator)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 97
_la = self._input.LA(1)
if not(_la==MetricAlertConditionParser.T__6 or _la==MetricAlertConditionParser.AND):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 98
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_operatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_operatorContext, self).__init__(parent, invokingState)
self.parser = parser
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def INCLUDES(self):
return self.getToken(MetricAlertConditionParser.INCLUDES, 0)
def EXCLUDES(self):
return self.getToken(MetricAlertConditionParser.EXCLUDES, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_operator
def enterRule(self, listener):
if hasattr(listener, "enterDim_operator"):
listener.enterDim_operator(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_operator"):
listener.exitDim_operator(self)
def dim_operator(self):
localctx = MetricAlertConditionParser.Dim_operatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_dim_operator)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 100
_la = self._input.LA(1)
if not(_la==MetricAlertConditionParser.INCLUDES or _la==MetricAlertConditionParser.EXCLUDES):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 101
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_val_separatorContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_val_separatorContext, self).__init__(parent, invokingState)
self.parser = parser
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def OR(self):
return self.getToken(MetricAlertConditionParser.OR, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_val_separator
def enterRule(self, listener):
if hasattr(listener, "enterDim_val_separator"):
listener.enterDim_val_separator(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_val_separator"):
listener.exitDim_val_separator(self)
def dim_val_separator(self):
localctx = MetricAlertConditionParser.Dim_val_separatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_dim_val_separator)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 103
_la = self._input.LA(1)
if not(_la==MetricAlertConditionParser.T__6 or _la==MetricAlertConditionParser.OR):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 104
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_nameContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_nameContext, self).__init__(parent, invokingState)
self.parser = parser
def WORD(self):
return self.getToken(MetricAlertConditionParser.WORD, 0)
def WHITESPACE(self):
return self.getToken(MetricAlertConditionParser.WHITESPACE, 0)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_name
def enterRule(self, listener):
if hasattr(listener, "enterDim_name"):
listener.enterDim_name(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_name"):
listener.exitDim_name(self)
def dim_name(self):
localctx = MetricAlertConditionParser.Dim_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_dim_name)
try:
self.enterOuterAlt(localctx, 1)
self.state = 106
self.match(MetricAlertConditionParser.WORD)
self.state = 107
self.match(MetricAlertConditionParser.WHITESPACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_valuesContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_valuesContext, self).__init__(parent, invokingState)
self.parser = parser
def dim_value(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.Dim_valueContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_valueContext,i)
def dim_val_separator(self, i=None):
if i is None:
return self.getTypedRuleContexts(MetricAlertConditionParser.Dim_val_separatorContext)
else:
return self.getTypedRuleContext(MetricAlertConditionParser.Dim_val_separatorContext,i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_values
def enterRule(self, listener):
if hasattr(listener, "enterDim_values"):
listener.enterDim_values(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_values"):
listener.exitDim_values(self)
def dim_values(self):
localctx = MetricAlertConditionParser.Dim_valuesContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_dim_values)
try:
self.enterOuterAlt(localctx, 1)
self.state = 109
self.dim_value()
self.state = 115
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 110
self.dim_val_separator()
self.state = 111
self.dim_value()
self.state = 117
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dim_valueContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(MetricAlertConditionParser.Dim_valueContext, self).__init__(parent, invokingState)
self.parser = parser
def NUMBER(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.NUMBER)
else:
return self.getToken(MetricAlertConditionParser.NUMBER, i)
def WORD(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WORD)
else:
return self.getToken(MetricAlertConditionParser.WORD, i)
def WHITESPACE(self, i=None):
if i is None:
return self.getTokens(MetricAlertConditionParser.WHITESPACE)
else:
return self.getToken(MetricAlertConditionParser.WHITESPACE, i)
def getRuleIndex(self):
return MetricAlertConditionParser.RULE_dim_value
def enterRule(self, listener):
if hasattr(listener, "enterDim_value"):
listener.enterDim_value(self)
def exitRule(self, listener):
if hasattr(listener, "exitDim_value"):
listener.exitDim_value(self)
def dim_value(self):
localctx = MetricAlertConditionParser.Dim_valueContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_dim_value)
self._la = 0
try:
self.enterOuterAlt(localctx, 1)
self.state = 119
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 118
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << MetricAlertConditionParser.T__0) | (1 << MetricAlertConditionParser.T__4) | (1 << MetricAlertConditionParser.T__7) | (1 << MetricAlertConditionParser.T__8) | (1 << MetricAlertConditionParser.NUMBER) | (1 << MetricAlertConditionParser.WHITESPACE) | (1 << MetricAlertConditionParser.WORD))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
else:
raise NoViableAltException(self)
self.state = 121
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,8,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
| true | true |
f71ba2edf1eecc10a8e2f66fde3fb97bc55e8af1 | 6,386 | py | Python | examples/pwr_run/checkpointing/debug/ovhd_profile/job6.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | examples/pwr_run/checkpointing/debug/ovhd_profile/job6.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | examples/pwr_run/checkpointing/debug/ovhd_profile/job6.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | """
#Trains a ResNet on the CIFAR10 dataset.
"""
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
from keras.applications.vgg16 import VGG16
from keras.applications.vgg19 import VGG19
from keras import models, layers, optimizers
from datetime import datetime
import tensorflow as tf
import numpy as np
import os
import pdb
import sys
import argparse
import time
import signal
import glob
import json
import send_signal
load_start = time.time()
parser = argparse.ArgumentParser(description='Tensorflow Cifar10 Training')
parser.add_argument('--tc', metavar='TESTCASE', type=str, help='specific testcase name')
parser.add_argument('--resume', dest='resume', action='store_true', help='if True, resume training from a checkpoint')
parser.add_argument('--gpu_num', metavar='GPU_NUMBER', type=str, help='select which gpu to use')
parser.add_argument('--node', metavar='HOST_NODE', type=str, help='node of the host (scheduler)')
parser.set_defaults(resume=False)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu_num
# Training parameters
batch_size = 256
args_lr = 0.005
args_model = 'vgg16'
epoch_begin_time = 0
job_name = sys.argv[0].split('.')[0]
save_files = '/scratch/li.baol/checkpoint_test/' + job_name + '*'
total_epochs = 9
starting_epoch = 0
if args.resume:
save_file = glob.glob(save_files)[0]
# epochs = int(save_file.split('/')[4].split('_')[1].split('.')[0])
starting_epoch = int(save_file.split('/')[4].split('.')[0].split('_')[-1])
data_augmentation = True
num_classes = 10
# Subtracting pixel mean improves accuracy
subtract_pixel_mean = True
n = 3
# Model name, depth and version
model_type = args.tc #'P100_resnet50_he_256_1'
# Load the CIFAR10 data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Normalize data.
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# If subtract pixel mean is enabled
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
if args.resume:
print('resume from checkpoint')
model = keras.models.load_model(save_file)
else:
print('train from start')
model = models.Sequential()
if '16' in args_model:
base_model = VGG16(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
elif '19' in args_model:
base_model = VGG19(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
#base_model.summary()
#pdb.set_trace()
model.add(base_model)
model.add(layers.Flatten())
model.add(layers.BatchNormalization())
model.add(layers.Dense(128, activation='relu'))#, kernel_initializer='he_uniform'))
#model.add(layers.Dropout(0.2))
model.add(layers.BatchNormalization())
model.add(layers.Dense(64, activation='relu'))#, kernel_initializer='he_uniform'))
#model.add(layers.Dropout(0.2))
model.add(layers.BatchNormalization())
model.add(layers.Dense(10, activation='softmax'))#, kernel_initializer='he_uniform'))
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=args_lr),
metrics=['accuracy'])
#model.summary()
print(model_type)
#pdb.set_trace()
current_epoch = 0
################### connects interrupt signal to the process #####################
def terminateProcess():
save_start = time.time()
# first record the wasted epoch time
global epoch_begin_time
if epoch_begin_time == 0:
epoch_waste_time = 0
else:
epoch_waste_time = int(time.time() - epoch_begin_time)
print('checkpointing the model triggered by kill -15 signal')
# delete whatever checkpoint that already exists
for f in glob.glob(save_files):
os.remove(f)
model.save('/scratch/li.baol/checkpoint_test/' + job_name + '_' + str(current_epoch) + '.h5')
print ('(SIGTERM) terminating the process')
save_time = int(time.time() - save_start)
message = job_name + ' save ' + str(save_time)
send_signal.send(args.node, 10002, message)
sys.exit()
signal.signal(signal.SIGTERM, terminateProcess)
#################################################################################
logdir = '/scratch/li.baol/tsrbrd_log/job_runs/' + model_type + '/' + job_name
tensorboard_callback = TensorBoard(log_dir=logdir)#, update_freq='batch')
class PrintEpoch(keras.callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
global current_epoch
#remaining_epochs = epochs - epoch
current_epoch = epoch
print('current epoch ' + str(current_epoch))
global epoch_begin_time
epoch_begin_time = time.time()
my_callback = PrintEpoch()
callbacks = [tensorboard_callback, my_callback]
load_time = int(time.time() - load_start)
if args.resume:
message = job_name + ' load ' + str(load_time)
send_signal.send(args.node, 10002, message)
# Score trained model.
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
# send signal to indicate job has finished
message = job_name + ' finish'
send_signal.send(args.node, 10002, message)
sys.exit()
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=1,
validation_data=(x_test, y_test),
shuffle=True,
callbacks=callbacks,
initial_epoch=starting_epoch,
verbose=1
)
if not args.resume:
terminateProcess()
| 31 | 118 | 0.699812 |
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
from keras.applications.vgg16 import VGG16
from keras.applications.vgg19 import VGG19
from keras import models, layers, optimizers
from datetime import datetime
import tensorflow as tf
import numpy as np
import os
import pdb
import sys
import argparse
import time
import signal
import glob
import json
import send_signal
load_start = time.time()
parser = argparse.ArgumentParser(description='Tensorflow Cifar10 Training')
parser.add_argument('--tc', metavar='TESTCASE', type=str, help='specific testcase name')
parser.add_argument('--resume', dest='resume', action='store_true', help='if True, resume training from a checkpoint')
parser.add_argument('--gpu_num', metavar='GPU_NUMBER', type=str, help='select which gpu to use')
parser.add_argument('--node', metavar='HOST_NODE', type=str, help='node of the host (scheduler)')
parser.set_defaults(resume=False)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu_num
batch_size = 256
args_lr = 0.005
args_model = 'vgg16'
epoch_begin_time = 0
job_name = sys.argv[0].split('.')[0]
save_files = '/scratch/li.baol/checkpoint_test/' + job_name + '*'
total_epochs = 9
starting_epoch = 0
if args.resume:
save_file = glob.glob(save_files)[0]
starting_epoch = int(save_file.split('/')[4].split('.')[0].split('_')[-1])
data_augmentation = True
num_classes = 10
subtract_pixel_mean = True
n = 3
model_type = args.tc
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
if args.resume:
print('resume from checkpoint')
model = keras.models.load_model(save_file)
else:
print('train from start')
model = models.Sequential()
if '16' in args_model:
base_model = VGG16(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
elif '19' in args_model:
base_model = VGG19(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
model.add(base_model)
model.add(layers.Flatten())
model.add(layers.BatchNormalization())
model.add(layers.Dense(128, activation='relu'))
model.add(layers.BatchNormalization())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.BatchNormalization())
model.add(layers.Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=args_lr),
metrics=['accuracy'])
print(model_type)
current_epoch = 0
| true | true |
f71ba41051966044cb90f99889d007761687b585 | 1,003 | py | Python | bldr/cmd/init.py | bldr-cmd/bldr-cmd | 300750fbccc2987efd23f69b7b2d76d8563e2995 | [
"Apache-2.0"
] | null | null | null | bldr/cmd/init.py | bldr-cmd/bldr-cmd | 300750fbccc2987efd23f69b7b2d76d8563e2995 | [
"Apache-2.0"
] | null | null | null | bldr/cmd/init.py | bldr-cmd/bldr-cmd | 300750fbccc2987efd23f69b7b2d76d8563e2995 | [
"Apache-2.0"
] | null | null | null | """
`init` Command
"""
import os
import click
import bldr
import bldr.dep
import bldr.gen.render
from bldr.environment import Environment
from bldr.gen.render import CopyTemplatesRender
from bldr.cli import pass_environment, run_cmd
dotbldr_path = os.path.join(os.path.abspath(os.path.dirname(bldr.__file__)), "dotbldr")
@click.command("init", short_help="Initializes a project.")
@click.argument("path", required=False, type=click.Path(resolve_path=True))
@pass_environment
def cli(ctx : Environment, path):
"""Initializes a project."""
if path is None:
path = ctx.cwd
ctx.log(f"Initialized the project in {click.format_filename(path)}")
new_dir = os.path.join(os.path.curdir, ".bldr")
ctx.vlog(f" {click.format_filename(dotbldr_path)} -> {new_dir}")
copy_render = CopyTemplatesRender(ctx, True)
copy_render.walk(dotbldr_path, new_dir)
# NOTE: ctx cannot be used prior to this point!!
run_cmd(ctx, 'gen.up')
| 29.5 | 88 | 0.694915 | import os
import click
import bldr
import bldr.dep
import bldr.gen.render
from bldr.environment import Environment
from bldr.gen.render import CopyTemplatesRender
from bldr.cli import pass_environment, run_cmd
dotbldr_path = os.path.join(os.path.abspath(os.path.dirname(bldr.__file__)), "dotbldr")
@click.command("init", short_help="Initializes a project.")
@click.argument("path", required=False, type=click.Path(resolve_path=True))
@pass_environment
def cli(ctx : Environment, path):
if path is None:
path = ctx.cwd
ctx.log(f"Initialized the project in {click.format_filename(path)}")
new_dir = os.path.join(os.path.curdir, ".bldr")
ctx.vlog(f" {click.format_filename(dotbldr_path)} -> {new_dir}")
copy_render = CopyTemplatesRender(ctx, True)
copy_render.walk(dotbldr_path, new_dir)
run_cmd(ctx, 'gen.up')
| true | true |
f71ba50a7212de1e86d1518d29a3a1762b8ef020 | 1,709 | py | Python | ooobuild/lo/datatransfer/x_transferable_ex.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/datatransfer/x_transferable_ex.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/datatransfer/x_transferable_ex.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.datatransfer
import typing
from abc import abstractmethod
from ..uno.x_interface import XInterface as XInterface_8f010a43
if typing.TYPE_CHECKING:
from .data_flavor import DataFlavor as DataFlavor_ffd30deb
class XTransferableEx(XInterface_8f010a43):
"""
Interface to be implemented by objects used to provide data for a transfer operation.
See Also:
`API XTransferableEx <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1datatransfer_1_1XTransferableEx.html>`_
"""
__ooo_ns__: str = 'com.sun.star.datatransfer'
__ooo_full_ns__: str = 'com.sun.star.datatransfer.XTransferableEx'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.datatransfer.XTransferableEx'
@abstractmethod
def queryTransferDataFlavors(self, requestedFlavors: 'typing.Tuple[DataFlavor_ffd30deb, ...]') -> 'typing.Tuple[DataFlavor_ffd30deb, ...]':
"""
"""
__all__ = ['XTransferableEx']
| 37.152174 | 143 | 0.752487 |
import typing
from abc import abstractmethod
from ..uno.x_interface import XInterface as XInterface_8f010a43
if typing.TYPE_CHECKING:
from .data_flavor import DataFlavor as DataFlavor_ffd30deb
class XTransferableEx(XInterface_8f010a43):
__ooo_ns__: str = 'com.sun.star.datatransfer'
__ooo_full_ns__: str = 'com.sun.star.datatransfer.XTransferableEx'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.datatransfer.XTransferableEx'
@abstractmethod
def queryTransferDataFlavors(self, requestedFlavors: 'typing.Tuple[DataFlavor_ffd30deb, ...]') -> 'typing.Tuple[DataFlavor_ffd30deb, ...]':
__all__ = ['XTransferableEx']
| true | true |
f71ba695de947ffa31af9c54c82d879f96136684 | 20,681 | py | Python | sdk/cdn/azure-mgmt-cdn/tests/test_cli_mgmt_cdn.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/cdn/azure-mgmt-cdn/tests/test_cli_mgmt_cdn.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/cdn/azure-mgmt-cdn/tests/test_cli_mgmt_cdn.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# TEST SCENARIO COVERAGE
# ----------------------
# Methods Total : 41
# Methods Covered : 41
# Examples Total : 42
# Examples Tested : 42
# Coverage % : 100
# ----------------------
import os
import unittest
import azure.mgmt.cdn
from devtools_testutils import AzureMgmtTestCase, ResourceGroupPreparer
AZURE_LOCATION = 'eastus'
class MgmtCdnTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtCdnTest, self).setUp()
self.mgmt_client = self.create_mgmt_client(
azure.mgmt.cdn.CdnManagementClient
)
@ResourceGroupPreparer(location=AZURE_LOCATION)
def test_cdn(self, resource_group):
SUBSCRIPTION_ID = None
if self.is_live:
SUBSCRIPTION_ID = os.environ.get("AZURE_SUBSCRIPTION_ID", None)
if not SUBSCRIPTION_ID:
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
RESOURCE_GROUP = resource_group.name
PROFILE_NAME = "profilename"
CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME = "policyname"
ENDPOINT_NAME = "endpoint9527x"
CUSTOM_DOMAIN_NAME = "someDomain"
ORIGIN_NAME = "origin1"
# Profiles_Create[put]
BODY = {
"location": "WestUs",
"sku": {
"name": "Standard_Verizon"
}
}
result = self.mgmt_client.profiles.begin_create(resource_group.name, PROFILE_NAME, BODY)
result = result.result()
"""
# Creates specific policy[put]
BODY = {
"location": "global",
"sku": {
"name": "Standard_Microsoft"
},
"policy_settings": {
"default_redirect_url": "http://www.bing.com",
"default_custom_block_response_status_code": "499",
"default_custom_block_response_body": "PGh0bWw+CjxoZWFkZXI+PHRpdGxlPkhlbGxvPC90aXRsZT48L2hlYWRlcj4KPGJvZHk+CkhlbGxvIHdvcmxkCjwvYm9keT4KPC9odG1sPg=="
},
"rate_limit_rules": {
"rules": [
{
"name": "RateLimitRule1",
"priority": "1",
"enabled_state": "Enabled",
"rate_limit_duration_in_minutes": "0",
"rate_limit_threshold": "1000",
"match_conditions": [
{
"match_variable": "RemoteAddr",
"operator": "IPMatch",
"negate_condition": False,
"transforms": [],
"match_value": [
"192.168.1.0/24",
"10.0.0.0/24"
]
}
],
"action": "Block"
}
]
},
"custom_rules": {
"rules": [
{
"name": "CustomRule1",
"priority": "2",
"enabled_state": "Enabled",
"match_conditions": [
{
"match_variable": "RemoteAddr",
"operator": "GeoMatch",
"negate_condition": False,
"transforms": [],
"match_value": [
"CH"
]
},
{
"match_variable": "RequestHeader",
"selector": "UserAgent",
"operator": "Contains",
"negate_condition": False,
"transforms": [],
"match_value": [
"windows"
]
},
{
"match_variable": "QueryString",
"selector": "search",
"operator": "Contains",
"negate_condition": False,
"transforms": [
"UrlDecode",
"Lowercase"
],
"match_value": [
"<?php",
"?>"
]
}
],
"action": "Block"
}
]
},
"managed_rules": {
"managed_rule_sets": [
{
"rule_set_type": "DefaultRuleSet",
"rule_set_version": "preview-1.0",
"rule_group_overrides": [
{
"rule_group_name": "Group1",
"rules": [
{
"rule_id": "GROUP1-0001",
"enabled_state": "Enabled",
"action": "Redirect"
},
{
"rule_id": "GROUP1-0002",
"enabled_state": "Disabled"
}
]
}
]
}
]
}
}
result = self.mgmt_client.policies.create_or_update(resource_group.name, CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME, BODY)
result = result.result()
"""
# Endpoints_Create[put]
BODY = {
"origin_host_header": "www.bing.com",
"origin_path": "/image",
"content_types_to_compress": [
"text/html",
"application/octet-stream"
],
"is_compression_enabled": True,
"is_http_allowed": True,
"is_https_allowed": True,
"query_string_caching_behavior": "BypassCaching",
# "delivery_policy": {
# "description": "Test description for a policy.",
# "rules": [
# {
# "name": "rule1",
# "order": "1",
# "conditions": [
# {
# "name": "RemoteAddress",
# "parameters": {
# "operator": "IPMatch",
# "negate_condition": True,
# "match_values": [
# "192.168.1.0/24",
# "10.0.0.0/24"
# ],
# "@odata.type": "#Microsoft.Azure.Cdn.Models.DeliveryRuleRemoteAddressConditionParameters"
# }
# }
# ],
# "actions": [
# {
# "name": "CacheExpiration",
# "parameters": {
# "cache_behavior": "Override",
# "cache_duration": "10:10:09",
# "@odata.type": "#Microsoft.Azure.Cdn.Models.DeliveryRuleCacheExpirationActionParameters",
# "cache_type": "All"
# }
# },
# {
# "name": "ModifyResponseHeader",
# "parameters": {
# "header_action": "Overwrite",
# "header_name": "Access-Control-Allow-Origin",
# "value": "*",
# "@odata.type": "#Microsoft.Azure.Cdn.Models.DeliveryRuleHeaderActionParameters"
# }
# },
# {
# "name": "ModifyRequestHeader",
# "parameters": {
# "header_action": "Overwrite",
# "header_name": "Accept-Encoding",
# "value": "gzip",
# "@odata.type": "#Microsoft.Azure.Cdn.Models.DeliveryRuleHeaderActionParameters"
# }
# }
# ]
# }
# ]
# },
"origins": [
{
"name": "origin1",
"host_name": "host1.hello.com"
}
],
# "web_application_firewall_policy_link": {
# "id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Cdn/CdnWebApplicationFirewallPolicies/" + CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME + ""
# },
"location": "WestUs",
"tags": {
"kay1": "value1"
}
}
result = self.mgmt_client.endpoints.begin_create(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
"""
# CustomDomains_Create[put]
# BODY = {
# "host_name": "www.someDomain.net"
# }
HOST_NAME = "www.someDomain.net"
result = self.mgmt_client.custom_domains.create(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME, HOST_NAME)
result = result.result()
# CustomDomains_Get[get]
result = self.mgmt_client.custom_domains.get(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME)
"""
# Origins_Get[get]
result = self.mgmt_client.origins.get(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, ORIGIN_NAME)
"""
# Get Policy[get]
result = self.mgmt_client.policies.get(resource_group.name, CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME)
"""
# CustomDomains_ListByEndpoint[get]
result = self.mgmt_client.custom_domains.list_by_endpoint(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
# Origins_ListByEndpoint[get]
result = self.mgmt_client.origins.list_by_endpoint(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
# Endpoints_Get[get]
result = self.mgmt_client.endpoints.get(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
# Endpoints_ListByProfile[get]
result = self.mgmt_client.endpoints.list_by_profile(resource_group.name, PROFILE_NAME)
# List Policies in a Resource Group[get]
result = self.mgmt_client.policies.list(resource_group.name)
# Profiles_Get[get]
result = self.mgmt_client.profiles.get(resource_group.name, PROFILE_NAME)
# Profiles_ListByResourceGroup[get]
result = self.mgmt_client.profiles.list_by_resource_group(resource_group.name)
# List Policies in a Resource Group[get]
result = self.mgmt_client.policies.list(resource_group.name)
# Profiles_List[get]
result = self.mgmt_client.profiles.list()
# Operations_List[get]
result = self.mgmt_client.operations.list()
# EdgeNodes_List[get]
result = self.mgmt_client.edge_nodes.list()
"""
# CustomDomains_DisableCustomHttps[post]
result = self.mgmt_client.custom_domains.disable_custom_https(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME)
# CustomDomains_EnableCustomHttpsUsingYourOwnCertificate[post]
BODY = {
"certificate_source": "AzureKeyVault",
"protocol_type": "ServerNameIndication",
"certificate_source_parameters": {
"odata.type": "#Microsoft.Azure.Cdn.Models.KeyVaultCertificateSourceParameters",
"subscription_id": "subid",
"resource_group_name": "RG",
"vault_name": "kv",
"secret_name": "secret1",
"secret_version": "00000000-0000-0000-0000-000000000000",
"update_rule": "NoAction",
"delete_rule": "NoAction"
}
}
result = self.mgmt_client.custom_domains.enable_custom_https(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME, BODY)
# CustomDomains_EnableCustomHttpsUsingCDNManagedCertificate[post]
BODY = {
"certificate_source": "Cdn",
"protocol_type": "ServerNameIndication",
"certificate_source_parameters": {
"odata.type": "#Microsoft.Azure.Cdn.Models.CdnCertificateSourceParameters",
"certificate_type": "Shared"
}
}
result = self.mgmt_client.custom_domains.enable_custom_https(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME, BODY)
"""
# Origins_Update[patch]
BODY = {
"http_port": "42",
"https_port": "43"
}
result = self.mgmt_client.origins.begin_update(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, ORIGIN_NAME, BODY)
result = result.result()
"""
# Creates specific policy[put]
BODY = {
"location": "WestUs",
"sku": {
"name": "Standard_Microsoft"
},
"policy_settings": {
"default_redirect_url": "http://www.bing.com",
"default_custom_block_response_status_code": "499",
"default_custom_block_response_body": "PGh0bWw+CjxoZWFkZXI+PHRpdGxlPkhlbGxvPC90aXRsZT48L2hlYWRlcj4KPGJvZHk+CkhlbGxvIHdvcmxkCjwvYm9keT4KPC9odG1sPg=="
},
"rate_limit_rules": {
"rules": [
{
"name": "RateLimitRule1",
"priority": "1",
"enabled_state": "Enabled",
"rate_limit_duration_in_minutes": "0",
"rate_limit_threshold": "1000",
"match_conditions": [
{
"match_variable": "RemoteAddr",
"operator": "IPMatch",
"negate_condition": False,
"transforms": [],
"match_value": [
"192.168.1.0/24",
"10.0.0.0/24"
]
}
],
"action": "Block"
}
]
},
"custom_rules": {
"rules": [
{
"name": "CustomRule1",
"priority": "2",
"enabled_state": "Enabled",
"match_conditions": [
{
"match_variable": "RemoteAddr",
"operator": "GeoMatch",
"negate_condition": False,
"transforms": [],
"match_value": [
"CH"
]
},
{
"match_variable": "RequestHeader",
"selector": "UserAgent",
"operator": "Contains",
"negate_condition": False,
"transforms": [],
"match_value": [
"windows"
]
},
{
"match_variable": "QueryString",
"selector": "search",
"operator": "Contains",
"negate_condition": False,
"transforms": [
"UrlDecode",
"Lowercase"
],
"match_value": [
"<?php",
"?>"
]
}
],
"action": "Block"
}
]
},
"managed_rules": {
"managed_rule_sets": [
{
"rule_set_type": "DefaultRuleSet",
"rule_set_version": "preview-1.0",
"rule_group_overrides": [
{
"rule_group_name": "Group1",
"rules": [
{
"rule_id": "GROUP1-0001",
"enabled_state": "Enabled",
"action": "Redirect"
},
{
"rule_id": "GROUP1-0002",
"enabled_state": "Disabled"
}
]
}
]
}
]
}
}
result = self.mgmt_client.policies.create_or_update(resource_group.name, CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME, BODY)
result = result.result()
"""
# Endpoints_ValidateCustomDomain[post]
BODY = {
"host_name": "www.someDomain.com"
}
# HOST_NAME = "www.someDomain.com"
result = self.mgmt_client.endpoints.validate_custom_domain(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
# Endpoints_ListResourceUsage[post]
result = self.mgmt_client.endpoints.list_resource_usage(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
# Endpoints_PurgeContent[post]
BODY = {
"content_paths": [
"/folder1"
]
}
# CONTENT_PATHS = ["/folder1"]
result = self.mgmt_client.endpoints.begin_purge_content(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
# Endpoints_Stop[post]
result = self.mgmt_client.endpoints.begin_stop(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = result.result()
# Endpoints_Start[post]
result = self.mgmt_client.endpoints.begin_start(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = result.result()
# Endpoints_LoadContent[post]
BODY = {
"content_paths": [
"/folder1"
]
}
# CONTENT_PATHS = ["/folder1"]
result = self.mgmt_client.endpoints.begin_load_content(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
# Profiles_ListSupportedOptimizationTypes[post]
result = self.mgmt_client.profiles.list_supported_optimization_types(resource_group.name, PROFILE_NAME)
# Endpoints_Update[patch]
BODY = {
"tags": {
"additional_properties": "Tag1"
},
# "web_application_firewall_policy_link": {
# "id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Cdn/CdnWebApplicationFirewallPolicies/" + CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME + ""
# }
}
result = self.mgmt_client.endpoints.begin_update(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
# Profiles_ListResourceUsage[post]
result = self.mgmt_client.profiles.list_resource_usage(resource_group.name, PROFILE_NAME)
# Profiles_GenerateSsoUri[post]
result = self.mgmt_client.profiles.generate_sso_uri(resource_group.name, PROFILE_NAME)
# Profiles_Update[patch]
BODY = {
"tags": {
"additional_properties": "Tag1"
}
}
result = self.mgmt_client.profiles.begin_update(resource_group.name, PROFILE_NAME, BODY)
result = result.result()
# CheckNameAvailabilityWithSubscription[post]
BODY = {
"name": "sampleName",
"type": "Microsoft.Cdn/Profiles/Endpoints"
}
# CHECK_NAME = "sampleName"
result = self.mgmt_client.check_name_availability_with_subscription(BODY)
# ResourceUsage_List[post]
result = self.mgmt_client.resource_usage.list()
# ValidateProbe[post]
BODY = {
"probe_url": "https://www.bing.com/image"
}
# PROBEURL = "https://www.bing.com/image"
result = self.mgmt_client.validate_probe(BODY)
# CheckNameAvailability[post]
BODY = {
"name": "sampleName",
"type": "Microsoft.Cdn/Profiles/Endpoints"
}
# CHECKNAME = "sampleName"
result = self.mgmt_client.check_name_availability(BODY)
# CustomDomains_Delete[delete]
result = self.mgmt_client.custom_domains.begin_delete(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME)
result = result.result()
"""
# Delete protection policy[delete]
result = self.mgmt_client.policies.delete(resource_group.name, CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME)
"""
# Endpoints_Delete[delete]
result = self.mgmt_client.endpoints.begin_delete(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = result.result()
# Profiles_Delete[delete]
result = self.mgmt_client.profiles.begin_delete(resource_group.name, PROFILE_NAME)
result = result.result()
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| 36.218914 | 205 | 0.499782 |
import os
import unittest
import azure.mgmt.cdn
from devtools_testutils import AzureMgmtTestCase, ResourceGroupPreparer
AZURE_LOCATION = 'eastus'
class MgmtCdnTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtCdnTest, self).setUp()
self.mgmt_client = self.create_mgmt_client(
azure.mgmt.cdn.CdnManagementClient
)
@ResourceGroupPreparer(location=AZURE_LOCATION)
def test_cdn(self, resource_group):
SUBSCRIPTION_ID = None
if self.is_live:
SUBSCRIPTION_ID = os.environ.get("AZURE_SUBSCRIPTION_ID", None)
if not SUBSCRIPTION_ID:
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
RESOURCE_GROUP = resource_group.name
PROFILE_NAME = "profilename"
CDN_WEB_APPLICATION_FIREWALL_POLICY_NAME = "policyname"
ENDPOINT_NAME = "endpoint9527x"
CUSTOM_DOMAIN_NAME = "someDomain"
ORIGIN_NAME = "origin1"
BODY = {
"location": "WestUs",
"sku": {
"name": "Standard_Verizon"
}
}
result = self.mgmt_client.profiles.begin_create(resource_group.name, PROFILE_NAME, BODY)
result = result.result()
BODY = {
"origin_host_header": "www.bing.com",
"origin_path": "/image",
"content_types_to_compress": [
"text/html",
"application/octet-stream"
],
"is_compression_enabled": True,
"is_http_allowed": True,
"is_https_allowed": True,
"query_string_caching_behavior": "BypassCaching",
"origins": [
{
"name": "origin1",
"host_name": "host1.hello.com"
}
],
"location": "WestUs",
"tags": {
"kay1": "value1"
}
}
result = self.mgmt_client.endpoints.begin_create(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
result = self.mgmt_client.origins.get(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, ORIGIN_NAME)
result = self.mgmt_client.custom_domains.list_by_endpoint(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = self.mgmt_client.origins.list_by_endpoint(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = self.mgmt_client.endpoints.get(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = self.mgmt_client.endpoints.list_by_profile(resource_group.name, PROFILE_NAME)
result = self.mgmt_client.policies.list(resource_group.name)
result = self.mgmt_client.profiles.get(resource_group.name, PROFILE_NAME)
result = self.mgmt_client.profiles.list_by_resource_group(resource_group.name)
result = self.mgmt_client.policies.list(resource_group.name)
result = self.mgmt_client.profiles.list()
result = self.mgmt_client.operations.list()
result = self.mgmt_client.edge_nodes.list()
BODY = {
"http_port": "42",
"https_port": "43"
}
result = self.mgmt_client.origins.begin_update(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, ORIGIN_NAME, BODY)
result = result.result()
BODY = {
"host_name": "www.someDomain.com"
}
result = self.mgmt_client.endpoints.validate_custom_domain(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = self.mgmt_client.endpoints.list_resource_usage(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
BODY = {
"content_paths": [
"/folder1"
]
}
result = self.mgmt_client.endpoints.begin_purge_content(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
result = self.mgmt_client.endpoints.begin_stop(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = result.result()
result = self.mgmt_client.endpoints.begin_start(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = result.result()
BODY = {
"content_paths": [
"/folder1"
]
}
result = self.mgmt_client.endpoints.begin_load_content(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
result = self.mgmt_client.profiles.list_supported_optimization_types(resource_group.name, PROFILE_NAME)
BODY = {
"tags": {
"additional_properties": "Tag1"
},
}
result = self.mgmt_client.endpoints.begin_update(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, BODY)
result = result.result()
result = self.mgmt_client.profiles.list_resource_usage(resource_group.name, PROFILE_NAME)
result = self.mgmt_client.profiles.generate_sso_uri(resource_group.name, PROFILE_NAME)
BODY = {
"tags": {
"additional_properties": "Tag1"
}
}
result = self.mgmt_client.profiles.begin_update(resource_group.name, PROFILE_NAME, BODY)
result = result.result()
BODY = {
"name": "sampleName",
"type": "Microsoft.Cdn/Profiles/Endpoints"
}
result = self.mgmt_client.check_name_availability_with_subscription(BODY)
result = self.mgmt_client.resource_usage.list()
BODY = {
"probe_url": "https://www.bing.com/image"
}
result = self.mgmt_client.validate_probe(BODY)
BODY = {
"name": "sampleName",
"type": "Microsoft.Cdn/Profiles/Endpoints"
}
result = self.mgmt_client.check_name_availability(BODY)
result = self.mgmt_client.custom_domains.begin_delete(resource_group.name, PROFILE_NAME, ENDPOINT_NAME, CUSTOM_DOMAIN_NAME)
result = result.result()
result = self.mgmt_client.endpoints.begin_delete(resource_group.name, PROFILE_NAME, ENDPOINT_NAME)
result = result.result()
result = self.mgmt_client.profiles.begin_delete(resource_group.name, PROFILE_NAME)
result = result.result()
if __name__ == '__main__':
unittest.main()
| true | true |
f71ba71e6fe75b165544ce78ca9d1518a8b10f31 | 142 | py | Python | anoi/tests/test_anoitypes.py | jriehl/anoi | 1a7b1759824cd4615731e8d053a4bb04d4f3fea3 | [
"MIT"
] | null | null | null | anoi/tests/test_anoitypes.py | jriehl/anoi | 1a7b1759824cd4615731e8d053a4bb04d4f3fea3 | [
"MIT"
] | null | null | null | anoi/tests/test_anoitypes.py | jriehl/anoi | 1a7b1759824cd4615731e8d053a4bb04d4f3fea3 | [
"MIT"
] | null | null | null | import unittest
from .. import anoitypes
class TestANOITypes(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
| 11.833333 | 39 | 0.711268 | import unittest
from .. import anoitypes
class TestANOITypes(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
| true | true |
f71ba7b251e9bf8b98ca35f68d542663d3b132e2 | 2,286 | py | Python | src/main/resources/de/mpg/biochem/mars/fx/dashboard/xychart.py | imagejan/mars-fx | 8e493c3c9e02aba20747598827b503c3172f949b | [
"BSD-2-Clause"
] | 3 | 2020-03-10T18:02:09.000Z | 2021-09-21T16:52:58.000Z | src/main/resources/de/mpg/biochem/mars/fx/dashboard/xychart.py | imagejan/mars-fx | 8e493c3c9e02aba20747598827b503c3172f949b | [
"BSD-2-Clause"
] | 51 | 2020-03-30T11:42:44.000Z | 2022-03-16T07:09:16.000Z | src/main/resources/de/mpg/biochem/mars/fx/dashboard/xychart.py | imagejan/mars-fx | 8e493c3c9e02aba20747598827b503c3172f949b | [
"BSD-2-Clause"
] | 2 | 2021-04-20T11:51:38.000Z | 2021-09-21T16:39:12.000Z | #@OUTPUT String xlabel
#@OUTPUT String ylabel
#@OUTPUT String title
# OUTPUT Double xmin
# OUTPUT Double xmax
# OUTPUT Double ymin
# OUTPUT Double ymax
# xmin = -2.0
# xmax = 2.0
# ymin = -2.0
# ymax = 2.0
# Set global outputs
xlabel = "X"
ylabel = "Y"
title = "XY Chart"
import math
from java.util import Random
from java.lang import Double
r = Random()
# Series 1 Outputs
#@OUTPUT Double[] series1_xvalues
#@OUTPUT Double[] series1_yvalues
#@OUTPUT Double[] series1_error
#@OUTPUT String series1_fillColor
#@OUTPUT String series1_strokeColor
#@OUTPUT Integer series1_strokeWidth
series1_strokeColor = "rgb(" + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + ")"
series1_fillColor = series1_strokeColor
series1_strokeWidth = 1
series1_xvalues = []
series1_yvalues = []
series1_error = []
currentY = 0
for i in range(29):
series1_xvalues.append(i)
series1_yvalues.append(currentY)
currentY += r.nextGaussian()
series1_error.append(abs(r.nextGaussian()))
r = Random()
# Series 2 Outputs
#@OUTPUT Double[] series2_xvalues
#@OUTPUT Double[] series2_yvalues
#@OUTPUT Double[] series2_error
#@OUTPUT String series2_fillColor
#@OUTPUT String series2_strokeColor
#@OUTPUT Integer series2_strokeWidth
series2_strokeColor = "rgb(" + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + ")"
series2_fillColor = series1_strokeColor
series2_strokeWidth = 1
series2_xvalues = []
series2_yvalues = []
series2_error = []
currentY = 0
for i in range(29):
series2_xvalues.append(i)
series2_yvalues.append(currentY)
currentY += r.nextGaussian()
series2_error.append(abs(r.nextGaussian()))
# Series 3 Outputs
#@OUTPUT Double[] series3_xvalues
#@OUTPUT Double[] series3_yvalues
#@OUTPUT Double[] series3_error
#@OUTPUT String series3_fillColor
#@OUTPUT String series3_strokeColor
#@OUTPUT Integer series3_strokeWidth
series3_strokeColor = "rgb(" + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + ")"
series3_fillColor = series1_strokeColor
series3_strokeWidth = 1
series3_xvalues = []
series3_yvalues = []
series3_error = []
currentY = 0
for i in range(29):
series3_xvalues.append(i)
series3_yvalues.append(currentY)
currentY += r.nextGaussian()
series3_error.append(abs(r.nextGaussian())) | 24.319149 | 112 | 0.737533 |
xlabel = "X"
ylabel = "Y"
title = "XY Chart"
import math
from java.util import Random
from java.lang import Double
r = Random()
series1_strokeColor = "rgb(" + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + ")"
series1_fillColor = series1_strokeColor
series1_strokeWidth = 1
series1_xvalues = []
series1_yvalues = []
series1_error = []
currentY = 0
for i in range(29):
series1_xvalues.append(i)
series1_yvalues.append(currentY)
currentY += r.nextGaussian()
series1_error.append(abs(r.nextGaussian()))
r = Random()
series2_strokeColor = "rgb(" + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + ")"
series2_fillColor = series1_strokeColor
series2_strokeWidth = 1
series2_xvalues = []
series2_yvalues = []
series2_error = []
currentY = 0
for i in range(29):
series2_xvalues.append(i)
series2_yvalues.append(currentY)
currentY += r.nextGaussian()
series2_error.append(abs(r.nextGaussian()))
series3_strokeColor = "rgb(" + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + "," + str(r.nextInt(255)) + ")"
series3_fillColor = series1_strokeColor
series3_strokeWidth = 1
series3_xvalues = []
series3_yvalues = []
series3_error = []
currentY = 0
for i in range(29):
series3_xvalues.append(i)
series3_yvalues.append(currentY)
currentY += r.nextGaussian()
series3_error.append(abs(r.nextGaussian())) | true | true |
f71ba974fcdaec851af4d51a7fedeed1954c2992 | 418 | py | Python | homework/第 2 课/Aimee/1-Aimee.py | xrandx/-Dating-with-python-this-winter | d242faeda1598d50c3b371deeccfbbe3bbc8fb51 | [
"Apache-2.0"
] | 3 | 2021-01-03T10:10:25.000Z | 2021-01-11T06:13:40.000Z | homework/第 2 课/Aimee/1-Aimee.py | xrandx/-Dating-with-python-this-winter | d242faeda1598d50c3b371deeccfbbe3bbc8fb51 | [
"Apache-2.0"
] | null | null | null | homework/第 2 课/Aimee/1-Aimee.py | xrandx/-Dating-with-python-this-winter | d242faeda1598d50c3b371deeccfbbe3bbc8fb51 | [
"Apache-2.0"
] | 2 | 2021-01-08T10:12:17.000Z | 2021-01-19T02:03:32.000Z | array=[2,5,8,9,3,6]
a=[1,3,6]
matrix=[a,[2,5,7]]
array.append(1)
print(matrix)
print(array)
new_list=array+matrix
print(new_list)
new_list.pop(-3)
print(new_list)
new_list[0]="T"
print(new_list)
# new_list.clear()
# print(new_list)
print(new_list.index(5))
table=tuple(["Monday","Tuesday"])
print(table[1])
string="123456789"
print(string[1:-3])
f=open("test.txt", "w", encoding="utf-8")
f.write(string*10)
f.close()
| 17.416667 | 41 | 0.69378 | array=[2,5,8,9,3,6]
a=[1,3,6]
matrix=[a,[2,5,7]]
array.append(1)
print(matrix)
print(array)
new_list=array+matrix
print(new_list)
new_list.pop(-3)
print(new_list)
new_list[0]="T"
print(new_list)
print(new_list.index(5))
table=tuple(["Monday","Tuesday"])
print(table[1])
string="123456789"
print(string[1:-3])
f=open("test.txt", "w", encoding="utf-8")
f.write(string*10)
f.close()
| true | true |
f71ba98a374e556c7c9d0359e89bdb1f986ca8f0 | 43,300 | py | Python | bin/api_connector_splunk/solnlib/packages/simpleyaml/emitter.py | CyberGRX/api-connector-splunk | 7f1db1cecb7ae367c1882c3188dc9f8bcb6bc4c6 | [
"MIT"
] | 106 | 2018-03-09T13:03:05.000Z | 2022-03-10T11:01:48.000Z | bin/api_connector_splunk/solnlib/packages/simpleyaml/emitter.py | CyberGRX/api-connector-splunk | 7f1db1cecb7ae367c1882c3188dc9f8bcb6bc4c6 | [
"MIT"
] | 54 | 2016-08-11T14:22:30.000Z | 2020-08-07T22:14:55.000Z | bin/api_connector_splunk/solnlib/packages/simpleyaml/emitter.py | CyberGRX/api-connector-splunk | 7f1db1cecb7ae367c1882c3188dc9f8bcb6bc4c6 | [
"MIT"
] | 33 | 2018-04-23T20:18:11.000Z | 2022-03-27T16:41:03.000Z |
# Emitter expects events obeying the following grammar:
# stream ::= STREAM-START document* STREAM-END
# document ::= DOCUMENT-START node DOCUMENT-END
# node ::= SCALAR | sequence | mapping
# sequence ::= SEQUENCE-START node* SEQUENCE-END
# mapping ::= MAPPING-START (node node)* MAPPING-END
__all__ = ['Emitter', 'EmitterError']
from .error import YAMLError
from .events import *
class EmitterError(YAMLError):
pass
class ScalarAnalysis(object):
def __init__(self, scalar, empty, multiline,
allow_flow_plain, allow_block_plain,
allow_single_quoted, allow_double_quoted,
allow_block):
self.scalar = scalar
self.empty = empty
self.multiline = multiline
self.allow_flow_plain = allow_flow_plain
self.allow_block_plain = allow_block_plain
self.allow_single_quoted = allow_single_quoted
self.allow_double_quoted = allow_double_quoted
self.allow_block = allow_block
class Emitter(object):
DEFAULT_TAG_PREFIXES = {
u'!' : u'!',
u'tag:yaml.org,2002:' : u'!!',
}
def __init__(self, stream, canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
# The stream should have the methods `write` and possibly `flush`.
self.stream = stream
# Encoding can be overriden by STREAM-START.
self.encoding = None
# Emitter is a state machine with a stack of states to handle nested
# structures.
self.states = []
self.state = self.expect_stream_start
# Current event and the event queue.
self.events = []
self.event = None
# The current indentation level and the stack of previous indents.
self.indents = []
self.indent = None
# Flow level.
self.flow_level = 0
# Contexts.
self.root_context = False
self.sequence_context = False
self.mapping_context = False
self.simple_key_context = False
# Characteristics of the last emitted character:
# - current position.
# - is it a whitespace?
# - is it an indention character
# (indentation space, '-', '?', or ':')?
self.line = 0
self.column = 0
self.whitespace = True
self.indention = True
# Whether the document requires an explicit document indicator
self.open_ended = False
# Formatting details.
self.canonical = canonical
self.allow_unicode = allow_unicode
self.best_indent = 2
if indent and 1 < indent < 10:
self.best_indent = indent
self.best_width = 80
if width and width > self.best_indent*2:
self.best_width = width
self.best_line_break = u'\n'
if line_break in [u'\r', u'\n', u'\r\n']:
self.best_line_break = line_break
# Tag prefixes.
self.tag_prefixes = None
# Prepared anchor and tag.
self.prepared_anchor = None
self.prepared_tag = None
# Scalar analysis and style.
self.analysis = None
self.style = None
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def emit(self, event):
self.events.append(event)
while not self.need_more_events():
self.event = self.events.pop(0)
self.state()
self.event = None
# In some cases, we wait for a few next events before emitting.
def need_more_events(self):
if not self.events:
return True
event = self.events[0]
if isinstance(event, DocumentStartEvent):
return self.need_events(1)
elif isinstance(event, SequenceStartEvent):
return self.need_events(2)
elif isinstance(event, MappingStartEvent):
return self.need_events(3)
else:
return False
def need_events(self, count):
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
level += 1
elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
level -= 1
elif isinstance(event, StreamEndEvent):
level = -1
if level < 0:
return False
return (len(self.events) < count+1)
def increase_indent(self, flow=False, indentless=False):
self.indents.append(self.indent)
if self.indent is None:
if flow:
self.indent = self.best_indent
else:
self.indent = 0
elif not indentless:
self.indent += self.best_indent
# States.
# Stream handlers.
def expect_stream_start(self):
if isinstance(self.event, StreamStartEvent):
if self.event.encoding and not getattr(self.stream, 'encoding', None):
self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
raise EmitterError("expected StreamStartEvent, but got %s"
% self.event)
def expect_nothing(self):
raise EmitterError("expected nothing, but got %s" % self.event)
# Document handlers.
def expect_first_document_start(self):
return self.expect_document_start(first=True)
def expect_document_start(self, first=False):
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
if self.event.version:
version_text = self.prepare_version(self.event.version)
self.write_version_directive(version_text)
self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
if self.event.tags:
handles = self.event.tags.keys()
handles.sort()
for handle in handles:
prefix = self.event.tags[handle]
self.tag_prefixes[prefix] = handle
handle_text = self.prepare_tag_handle(handle)
prefix_text = self.prepare_tag_prefix(prefix)
self.write_tag_directive(handle_text, prefix_text)
implicit = (first and not self.event.explicit and not self.canonical
and not self.event.version and not self.event.tags
and not self.check_empty_document())
if not implicit:
self.write_indent()
self.write_indicator(u'---', True)
if self.canonical:
self.write_indent()
self.state = self.expect_document_root
elif isinstance(self.event, StreamEndEvent):
if self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
self.write_stream_end()
self.state = self.expect_nothing
else:
raise EmitterError("expected DocumentStartEvent, but got %s"
% self.event)
def expect_document_end(self):
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
self.write_indicator(u'...', True)
self.write_indent()
self.flush_stream()
self.state = self.expect_document_start
else:
raise EmitterError("expected DocumentEndEvent, but got %s"
% self.event)
def expect_document_root(self):
self.states.append(self.expect_document_end)
self.expect_node(root=True)
# Node handlers.
def expect_node(self, root=False, sequence=False, mapping=False,
simple_key=False):
self.root_context = root
self.sequence_context = sequence
self.mapping_context = mapping
self.simple_key_context = simple_key
if isinstance(self.event, AliasEvent):
self.expect_alias()
elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
self.process_anchor(u'&')
self.process_tag()
if isinstance(self.event, ScalarEvent):
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_sequence():
self.expect_flow_sequence()
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_mapping():
self.expect_flow_mapping()
else:
self.expect_block_mapping()
else:
raise EmitterError("expected NodeEvent, but got %s" % self.event)
def expect_alias(self):
if self.event.anchor is None:
raise EmitterError("anchor is not specified for alias")
self.process_anchor(u'*')
self.state = self.states.pop()
def expect_scalar(self):
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
self.state = self.states.pop()
# Flow sequence handlers.
def expect_flow_sequence(self):
self.write_indicator(u'[', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_sequence_item
def expect_first_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
def expect_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
# Flow mapping handlers.
def expect_flow_mapping(self):
self.write_indicator(u'{', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_mapping_key
def expect_first_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
def expect_flow_mapping_value(self):
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(u':', True)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
# Block sequence handlers.
def expect_block_sequence(self):
indentless = (self.mapping_context and not self.indention)
self.increase_indent(flow=False, indentless=indentless)
self.state = self.expect_first_block_sequence_item
def expect_first_block_sequence_item(self):
return self.expect_block_sequence_item(first=True)
def expect_block_sequence_item(self, first=False):
if not first and isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
self.write_indicator(u'-', True, indention=True)
self.states.append(self.expect_block_sequence_item)
self.expect_node(sequence=True)
# Block mapping handlers.
def expect_block_mapping(self):
self.increase_indent(flow=False)
self.state = self.expect_first_block_mapping_key
def expect_first_block_mapping_key(self):
return self.expect_block_mapping_key(first=True)
def expect_block_mapping_key(self, first=False):
if not first and isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
if self.check_simple_key():
self.states.append(self.expect_block_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True, indention=True)
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
def expect_block_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
def expect_block_mapping_value(self):
self.write_indent()
self.write_indicator(u':', True, indention=True)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
# Checkers.
def check_empty_sequence(self):
return (isinstance(self.event, SequenceStartEvent) and self.events
and isinstance(self.events[0], SequenceEndEvent))
def check_empty_mapping(self):
return (isinstance(self.event, MappingStartEvent) and self.events
and isinstance(self.events[0], MappingEndEvent))
def check_empty_document(self):
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
return (isinstance(event, ScalarEvent) and event.anchor is None
and event.tag is None and event.implicit and event.value == u'')
def check_simple_key(self):
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
length += len(self.prepared_anchor)
if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
and self.event.tag is not None:
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(self.event.tag)
length += len(self.prepared_tag)
if isinstance(self.event, ScalarEvent):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
length += len(self.analysis.scalar)
return (length < 128 and (isinstance(self.event, AliasEvent)
or (isinstance(self.event, ScalarEvent)
and not self.analysis.empty and not self.analysis.multiline)
or self.check_empty_sequence() or self.check_empty_mapping()))
# Anchor, Tag, and Scalar processors.
def process_anchor(self, indicator):
if self.event.anchor is None:
self.prepared_anchor = None
return
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
if self.prepared_anchor:
self.write_indicator(indicator+self.prepared_anchor, True)
self.prepared_anchor = None
def process_tag(self):
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
self.style = self.choose_scalar_style()
if ((not self.canonical or tag is None) and
((self.style == '' and self.event.implicit[0])
or (self.style != '' and self.event.implicit[1]))):
self.prepared_tag = None
return
if self.event.implicit[0] and tag is None:
tag = u'!'
self.prepared_tag = None
else:
if (not self.canonical or tag is None) and self.event.implicit:
self.prepared_tag = None
return
if tag is None:
raise EmitterError("tag is not specified")
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(tag)
if self.prepared_tag:
self.write_indicator(self.prepared_tag, True)
self.prepared_tag = None
def choose_scalar_style(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
return '"'
if not self.event.style and self.event.implicit[0]:
if (not (self.simple_key_context and
(self.analysis.empty or self.analysis.multiline))
and (self.flow_level and self.analysis.allow_flow_plain
or (not self.flow_level and self.analysis.allow_block_plain))):
return ''
if self.event.style and self.event.style in '|>':
if (not self.flow_level and not self.simple_key_context
and self.analysis.allow_block):
return self.event.style
if not self.event.style or self.event.style == '\'':
if (self.analysis.allow_single_quoted and
not (self.simple_key_context and self.analysis.multiline)):
return '\''
return '"'
def process_scalar(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
self.style = self.choose_scalar_style()
split = (not self.simple_key_context)
#if self.analysis.multiline and split \
# and (not self.style or self.style in '\'\"'):
# self.write_indent()
if self.style == '"':
self.write_double_quoted(self.analysis.scalar, split)
elif self.style == '\'':
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
self.write_folded(self.analysis.scalar)
elif self.style == '|':
self.write_literal(self.analysis.scalar)
else:
self.write_plain(self.analysis.scalar, split)
self.analysis = None
self.style = None
# Analyzers.
def prepare_version(self, version):
major, minor = version
if major != 1:
raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
return u'%d.%d' % (major, minor)
def prepare_tag_handle(self, handle):
if not handle:
raise EmitterError("tag handle must not be empty")
if handle[0] != u'!' or handle[-1] != u'!':
raise EmitterError("tag handle must start and end with '!': %r"
% (handle.encode('utf-8')))
for ch in handle[1:-1]:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the tag handle: %r"
% (ch.encode('utf-8'), handle.encode('utf-8')))
return handle
def prepare_tag_prefix(self, prefix):
if not prefix:
raise EmitterError("tag prefix must not be empty")
chunks = []
start = end = 0
if prefix[0] == u'!':
end = 1
while end < len(prefix):
ch = prefix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?!:@&=+$,_.~*\'()[]':
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(prefix[start:end])
return u''.join(chunks)
def prepare_tag(self, tag):
if not tag:
raise EmitterError("tag must not be empty")
if tag == u'!':
return tag
handle = None
suffix = tag
prefixes = self.tag_prefixes.keys()
prefixes.sort()
for prefix in prefixes:
if tag.startswith(prefix) \
and (prefix == u'!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix):]
chunks = []
start = end = 0
while end < len(suffix):
ch = suffix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?:@&=+$,_.~*\'()[]' \
or (ch == u'!' and handle != u'!'):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(suffix[start:end])
suffix_text = u''.join(chunks)
if handle:
return u'%s%s' % (handle, suffix_text)
else:
return u'!<%s>' % suffix_text
def prepare_anchor(self, anchor):
if not anchor:
raise EmitterError("anchor must not be empty")
for ch in anchor:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the anchor: %r"
% (ch.encode('utf-8'), anchor.encode('utf-8')))
return anchor
def analyze_scalar(self, scalar):
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
allow_flow_plain=False, allow_block_plain=True,
allow_single_quoted=True, allow_double_quoted=True,
allow_block=False)
# Indicators and special characters.
block_indicators = False
flow_indicators = False
line_breaks = False
special_characters = False
# Important whitespace combinations.
leading_space = False
leading_break = False
trailing_space = False
trailing_break = False
break_space = False
space_break = False
# Check document indicators.
if scalar.startswith(u'---') or scalar.startswith(u'...'):
block_indicators = True
flow_indicators = True
# First character or preceded by a whitespace.
preceeded_by_whitespace = True
# Last character or followed by a whitespace.
followed_by_whitespace = (len(scalar) == 1 or
scalar[1] in u'\0 \t\r\n\x85\u2028\u2029')
# The previous character is a space.
previous_space = False
# The previous character is a break.
previous_break = False
index = 0
while index < len(scalar):
ch = scalar[index]
# Check for indicators.
if index == 0:
# Leading indicators are special characters.
if ch in u'#,[]{}&*!|>\'\"%@`':
flow_indicators = True
block_indicators = True
if ch in u'?:':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'-' and followed_by_whitespace:
flow_indicators = True
block_indicators = True
else:
# Some indicators cannot appear within a scalar as well.
if ch in u',?[]{}':
flow_indicators = True
if ch == u':':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'#' and preceeded_by_whitespace:
flow_indicators = True
block_indicators = True
# Check for line breaks, special, and unicode characters.
if ch in u'\n\x85\u2028\u2029':
line_breaks = True
if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF':
unicode_characters = True
if not self.allow_unicode:
special_characters = True
else:
special_characters = True
# Detect important whitespace combinations.
if ch == u' ':
if index == 0:
leading_space = True
if index == len(scalar)-1:
trailing_space = True
if previous_break:
break_space = True
previous_space = True
previous_break = False
elif ch in u'\n\x85\u2028\u2029':
if index == 0:
leading_break = True
if index == len(scalar)-1:
trailing_break = True
if previous_space:
space_break = True
previous_space = False
previous_break = True
else:
previous_space = False
previous_break = False
# Prepare for the next character.
index += 1
preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029')
followed_by_whitespace = (index+1 >= len(scalar) or
scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029')
# Let's decide what styles are allowed.
allow_flow_plain = True
allow_block_plain = True
allow_single_quoted = True
allow_double_quoted = True
allow_block = True
# Leading and trailing whitespaces are bad for plain scalars.
if (leading_space or leading_break
or trailing_space or trailing_break):
allow_flow_plain = allow_block_plain = False
# We do not permit trailing spaces for block scalars.
if trailing_space:
allow_block = False
# Spaces at the beginning of a new line are only acceptable for block
# scalars.
if break_space:
allow_flow_plain = allow_block_plain = allow_single_quoted = False
# Spaces followed by breaks, as well as special character are only
# allowed for double quoted scalars.
if space_break or special_characters:
allow_flow_plain = allow_block_plain = \
allow_single_quoted = allow_block = False
# Although the plain scalar writer supports breaks, we never emit
# multiline plain scalars.
if line_breaks:
allow_flow_plain = allow_block_plain = False
# Flow indicators are forbidden for flow plain scalars.
if flow_indicators:
allow_flow_plain = False
# Block indicators are forbidden for block plain scalars.
if block_indicators:
allow_block_plain = False
return ScalarAnalysis(scalar=scalar,
empty=False, multiline=line_breaks,
allow_flow_plain=allow_flow_plain,
allow_block_plain=allow_block_plain,
allow_single_quoted=allow_single_quoted,
allow_double_quoted=allow_double_quoted,
allow_block=allow_block)
# Writers.
def flush_stream(self):
if hasattr(self.stream, 'flush'):
self.stream.flush()
def write_stream_start(self):
# Write BOM if needed.
if self.encoding and self.encoding.startswith('utf-16'):
self.stream.write(u'\uFEFF'.encode(self.encoding))
def write_stream_end(self):
self.flush_stream()
def write_indicator(self, indicator, need_whitespace,
whitespace=False, indention=False):
if self.whitespace or not need_whitespace:
data = indicator
else:
data = u' '+indicator
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
self.open_ended = False
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_indent(self):
indent = self.indent or 0
if not self.indention or self.column > indent \
or (self.column == indent and not self.whitespace):
self.write_line_break()
if self.column < indent:
self.whitespace = True
data = u' '*(indent-self.column)
self.column = indent
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_line_break(self, data=None):
if data is None:
data = self.best_line_break
self.whitespace = True
self.indention = True
self.line += 1
self.column = 0
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_version_directive(self, version_text):
data = u'%%YAML %s' % version_text
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
def write_tag_directive(self, handle_text, prefix_text):
data = u'%%TAG %s %s' % (handle_text, prefix_text)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
# Scalar streams.
def write_single_quoted(self, text, split=True):
self.write_indicator(u'\'', True)
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch is None or ch != u' ':
if start+1 == end and self.column > self.best_width and split \
and start != 0 and end != len(text):
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'':
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch == u'\'':
data = u'\'\''
self.column += 2
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
self.write_indicator(u'\'', False)
ESCAPE_REPLACEMENTS = {
u'\0': u'0',
u'\x07': u'a',
u'\x08': u'b',
u'\x09': u't',
u'\x0A': u'n',
u'\x0B': u'v',
u'\x0C': u'f',
u'\x0D': u'r',
u'\x1B': u'e',
u'\"': u'\"',
u'\\': u'\\',
u'\x85': u'N',
u'\xA0': u'_',
u'\u2028': u'L',
u'\u2029': u'P',
}
def write_double_quoted(self, text, split=True):
self.write_indicator(u'"', True)
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \
or not (u'\x20' <= ch <= u'\x7E'
or (self.allow_unicode
and (u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD'))):
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
if ch in self.ESCAPE_REPLACEMENTS:
data = u'\\'+self.ESCAPE_REPLACEMENTS[ch]
elif ch <= u'\xFF':
data = u'\\x%02X' % ord(ch)
elif ch <= u'\uFFFF':
data = u'\\u%04X' % ord(ch)
else:
data = u'\\U%08X' % ord(ch)
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end+1
if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \
and self.column+(end-start) > self.best_width and split:
data = text[start:end]+u'\\'
if start < end:
start = end
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_indent()
self.whitespace = False
self.indention = False
if text[start] == u' ':
data = u'\\'
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
end += 1
self.write_indicator(u'"', False)
def determine_block_hints(self, text):
hints = u''
if text:
if text[0] in u' \n\x85\u2028\u2029':
hints += unicode(self.best_indent)
if text[-1] not in u'\n\x85\u2028\u2029':
hints += u'-'
elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
hints += u'+'
return hints
def write_folded(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'>'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
leading_space = True
spaces = False
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if not leading_space and ch is not None and ch != u' ' \
and text[start] == u'\n':
self.write_line_break()
leading_space = (ch == u' ')
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
elif spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width:
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
spaces = (ch == u' ')
end += 1
def write_literal(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'|'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
else:
if ch is None or ch in u'\n\x85\u2028\u2029':
data = text[start:end]
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
def write_plain(self, text, split=True):
if self.root_context:
self.open_ended = True
if not text:
return
if not self.whitespace:
data = u' '
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.whitespace = False
self.indention = False
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width and split:
self.write_indent()
self.whitespace = False
self.indention = False
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
self.whitespace = False
self.indention = False
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
| 37.949167 | 85 | 0.527737 |
__all__ = ['Emitter', 'EmitterError']
from .error import YAMLError
from .events import *
class EmitterError(YAMLError):
pass
class ScalarAnalysis(object):
def __init__(self, scalar, empty, multiline,
allow_flow_plain, allow_block_plain,
allow_single_quoted, allow_double_quoted,
allow_block):
self.scalar = scalar
self.empty = empty
self.multiline = multiline
self.allow_flow_plain = allow_flow_plain
self.allow_block_plain = allow_block_plain
self.allow_single_quoted = allow_single_quoted
self.allow_double_quoted = allow_double_quoted
self.allow_block = allow_block
class Emitter(object):
DEFAULT_TAG_PREFIXES = {
u'!' : u'!',
u'tag:yaml.org,2002:' : u'!!',
}
def __init__(self, stream, canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
self.stream = stream
self.encoding = None
self.states = []
self.state = self.expect_stream_start
self.events = []
self.event = None
self.indents = []
self.indent = None
self.flow_level = 0
self.root_context = False
self.sequence_context = False
self.mapping_context = False
self.simple_key_context = False
self.line = 0
self.column = 0
self.whitespace = True
self.indention = True
self.open_ended = False
self.canonical = canonical
self.allow_unicode = allow_unicode
self.best_indent = 2
if indent and 1 < indent < 10:
self.best_indent = indent
self.best_width = 80
if width and width > self.best_indent*2:
self.best_width = width
self.best_line_break = u'\n'
if line_break in [u'\r', u'\n', u'\r\n']:
self.best_line_break = line_break
self.tag_prefixes = None
self.prepared_anchor = None
self.prepared_tag = None
self.analysis = None
self.style = None
def dispose(self):
self.states = []
self.state = None
def emit(self, event):
self.events.append(event)
while not self.need_more_events():
self.event = self.events.pop(0)
self.state()
self.event = None
def need_more_events(self):
if not self.events:
return True
event = self.events[0]
if isinstance(event, DocumentStartEvent):
return self.need_events(1)
elif isinstance(event, SequenceStartEvent):
return self.need_events(2)
elif isinstance(event, MappingStartEvent):
return self.need_events(3)
else:
return False
def need_events(self, count):
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
level += 1
elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
level -= 1
elif isinstance(event, StreamEndEvent):
level = -1
if level < 0:
return False
return (len(self.events) < count+1)
def increase_indent(self, flow=False, indentless=False):
self.indents.append(self.indent)
if self.indent is None:
if flow:
self.indent = self.best_indent
else:
self.indent = 0
elif not indentless:
self.indent += self.best_indent
def expect_stream_start(self):
if isinstance(self.event, StreamStartEvent):
if self.event.encoding and not getattr(self.stream, 'encoding', None):
self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
raise EmitterError("expected StreamStartEvent, but got %s"
% self.event)
def expect_nothing(self):
raise EmitterError("expected nothing, but got %s" % self.event)
def expect_first_document_start(self):
return self.expect_document_start(first=True)
def expect_document_start(self, first=False):
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
if self.event.version:
version_text = self.prepare_version(self.event.version)
self.write_version_directive(version_text)
self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
if self.event.tags:
handles = self.event.tags.keys()
handles.sort()
for handle in handles:
prefix = self.event.tags[handle]
self.tag_prefixes[prefix] = handle
handle_text = self.prepare_tag_handle(handle)
prefix_text = self.prepare_tag_prefix(prefix)
self.write_tag_directive(handle_text, prefix_text)
implicit = (first and not self.event.explicit and not self.canonical
and not self.event.version and not self.event.tags
and not self.check_empty_document())
if not implicit:
self.write_indent()
self.write_indicator(u'---', True)
if self.canonical:
self.write_indent()
self.state = self.expect_document_root
elif isinstance(self.event, StreamEndEvent):
if self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
self.write_stream_end()
self.state = self.expect_nothing
else:
raise EmitterError("expected DocumentStartEvent, but got %s"
% self.event)
def expect_document_end(self):
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
self.write_indicator(u'...', True)
self.write_indent()
self.flush_stream()
self.state = self.expect_document_start
else:
raise EmitterError("expected DocumentEndEvent, but got %s"
% self.event)
def expect_document_root(self):
self.states.append(self.expect_document_end)
self.expect_node(root=True)
def expect_node(self, root=False, sequence=False, mapping=False,
simple_key=False):
self.root_context = root
self.sequence_context = sequence
self.mapping_context = mapping
self.simple_key_context = simple_key
if isinstance(self.event, AliasEvent):
self.expect_alias()
elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
self.process_anchor(u'&')
self.process_tag()
if isinstance(self.event, ScalarEvent):
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_sequence():
self.expect_flow_sequence()
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_mapping():
self.expect_flow_mapping()
else:
self.expect_block_mapping()
else:
raise EmitterError("expected NodeEvent, but got %s" % self.event)
def expect_alias(self):
if self.event.anchor is None:
raise EmitterError("anchor is not specified for alias")
self.process_anchor(u'*')
self.state = self.states.pop()
def expect_scalar(self):
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
self.state = self.states.pop()
def expect_flow_sequence(self):
self.write_indicator(u'[', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_sequence_item
def expect_first_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
def expect_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
def expect_flow_mapping(self):
self.write_indicator(u'{', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_mapping_key
def expect_first_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
def expect_flow_mapping_value(self):
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(u':', True)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
def expect_block_sequence(self):
indentless = (self.mapping_context and not self.indention)
self.increase_indent(flow=False, indentless=indentless)
self.state = self.expect_first_block_sequence_item
def expect_first_block_sequence_item(self):
return self.expect_block_sequence_item(first=True)
def expect_block_sequence_item(self, first=False):
if not first and isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
self.write_indicator(u'-', True, indention=True)
self.states.append(self.expect_block_sequence_item)
self.expect_node(sequence=True)
def expect_block_mapping(self):
self.increase_indent(flow=False)
self.state = self.expect_first_block_mapping_key
def expect_first_block_mapping_key(self):
return self.expect_block_mapping_key(first=True)
def expect_block_mapping_key(self, first=False):
if not first and isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
if self.check_simple_key():
self.states.append(self.expect_block_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True, indention=True)
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
def expect_block_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
def expect_block_mapping_value(self):
self.write_indent()
self.write_indicator(u':', True, indention=True)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
def check_empty_sequence(self):
return (isinstance(self.event, SequenceStartEvent) and self.events
and isinstance(self.events[0], SequenceEndEvent))
def check_empty_mapping(self):
return (isinstance(self.event, MappingStartEvent) and self.events
and isinstance(self.events[0], MappingEndEvent))
def check_empty_document(self):
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
return (isinstance(event, ScalarEvent) and event.anchor is None
and event.tag is None and event.implicit and event.value == u'')
def check_simple_key(self):
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
length += len(self.prepared_anchor)
if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
and self.event.tag is not None:
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(self.event.tag)
length += len(self.prepared_tag)
if isinstance(self.event, ScalarEvent):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
length += len(self.analysis.scalar)
return (length < 128 and (isinstance(self.event, AliasEvent)
or (isinstance(self.event, ScalarEvent)
and not self.analysis.empty and not self.analysis.multiline)
or self.check_empty_sequence() or self.check_empty_mapping()))
def process_anchor(self, indicator):
if self.event.anchor is None:
self.prepared_anchor = None
return
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
if self.prepared_anchor:
self.write_indicator(indicator+self.prepared_anchor, True)
self.prepared_anchor = None
def process_tag(self):
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
self.style = self.choose_scalar_style()
if ((not self.canonical or tag is None) and
((self.style == '' and self.event.implicit[0])
or (self.style != '' and self.event.implicit[1]))):
self.prepared_tag = None
return
if self.event.implicit[0] and tag is None:
tag = u'!'
self.prepared_tag = None
else:
if (not self.canonical or tag is None) and self.event.implicit:
self.prepared_tag = None
return
if tag is None:
raise EmitterError("tag is not specified")
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(tag)
if self.prepared_tag:
self.write_indicator(self.prepared_tag, True)
self.prepared_tag = None
def choose_scalar_style(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
return '"'
if not self.event.style and self.event.implicit[0]:
if (not (self.simple_key_context and
(self.analysis.empty or self.analysis.multiline))
and (self.flow_level and self.analysis.allow_flow_plain
or (not self.flow_level and self.analysis.allow_block_plain))):
return ''
if self.event.style and self.event.style in '|>':
if (not self.flow_level and not self.simple_key_context
and self.analysis.allow_block):
return self.event.style
if not self.event.style or self.event.style == '\'':
if (self.analysis.allow_single_quoted and
not (self.simple_key_context and self.analysis.multiline)):
return '\''
return '"'
def process_scalar(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
self.style = self.choose_scalar_style()
split = (not self.simple_key_context)
#if self.analysis.multiline and split \
# and (not self.style or self.style in '\'\"'):
# self.write_indent()
if self.style == '"':
self.write_double_quoted(self.analysis.scalar, split)
elif self.style == '\'':
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
self.write_folded(self.analysis.scalar)
elif self.style == '|':
self.write_literal(self.analysis.scalar)
else:
self.write_plain(self.analysis.scalar, split)
self.analysis = None
self.style = None
# Analyzers.
def prepare_version(self, version):
major, minor = version
if major != 1:
raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
return u'%d.%d' % (major, minor)
def prepare_tag_handle(self, handle):
if not handle:
raise EmitterError("tag handle must not be empty")
if handle[0] != u'!' or handle[-1] != u'!':
raise EmitterError("tag handle must start and end with '!': %r"
% (handle.encode('utf-8')))
for ch in handle[1:-1]:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the tag handle: %r"
% (ch.encode('utf-8'), handle.encode('utf-8')))
return handle
def prepare_tag_prefix(self, prefix):
if not prefix:
raise EmitterError("tag prefix must not be empty")
chunks = []
start = end = 0
if prefix[0] == u'!':
end = 1
while end < len(prefix):
ch = prefix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?!:@&=+$,_.~*\'()[]':
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(prefix[start:end])
return u''.join(chunks)
def prepare_tag(self, tag):
if not tag:
raise EmitterError("tag must not be empty")
if tag == u'!':
return tag
handle = None
suffix = tag
prefixes = self.tag_prefixes.keys()
prefixes.sort()
for prefix in prefixes:
if tag.startswith(prefix) \
and (prefix == u'!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix):]
chunks = []
start = end = 0
while end < len(suffix):
ch = suffix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?:@&=+$,_.~*\'()[]' \
or (ch == u'!' and handle != u'!'):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(suffix[start:end])
suffix_text = u''.join(chunks)
if handle:
return u'%s%s' % (handle, suffix_text)
else:
return u'!<%s>' % suffix_text
def prepare_anchor(self, anchor):
if not anchor:
raise EmitterError("anchor must not be empty")
for ch in anchor:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the anchor: %r"
% (ch.encode('utf-8'), anchor.encode('utf-8')))
return anchor
def analyze_scalar(self, scalar):
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
allow_flow_plain=False, allow_block_plain=True,
allow_single_quoted=True, allow_double_quoted=True,
allow_block=False)
# Indicators and special characters.
block_indicators = False
flow_indicators = False
line_breaks = False
special_characters = False
# Important whitespace combinations.
leading_space = False
leading_break = False
trailing_space = False
trailing_break = False
break_space = False
space_break = False
# Check document indicators.
if scalar.startswith(u'---') or scalar.startswith(u'...'):
block_indicators = True
flow_indicators = True
# First character or preceded by a whitespace.
preceeded_by_whitespace = True
# Last character or followed by a whitespace.
followed_by_whitespace = (len(scalar) == 1 or
scalar[1] in u'\0 \t\r\n\x85\u2028\u2029')
# The previous character is a space.
previous_space = False
# The previous character is a break.
previous_break = False
index = 0
while index < len(scalar):
ch = scalar[index]
# Check for indicators.
if index == 0:
# Leading indicators are special characters.
if ch in u'#,[]{}&*!|>\'\"%@`':
flow_indicators = True
block_indicators = True
if ch in u'?:':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'-' and followed_by_whitespace:
flow_indicators = True
block_indicators = True
else:
# Some indicators cannot appear within a scalar as well.
if ch in u',?[]{}':
flow_indicators = True
if ch == u':':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'
flow_indicators = True
block_indicators = True
# Check for line breaks, special, and unicode characters.
if ch in u'\n\x85\u2028\u2029':
line_breaks = True
if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF':
unicode_characters = True
if not self.allow_unicode:
special_characters = True
else:
special_characters = True
# Detect important whitespace combinations.
if ch == u' ':
if index == 0:
leading_space = True
if index == len(scalar)-1:
trailing_space = True
if previous_break:
break_space = True
previous_space = True
previous_break = False
elif ch in u'\n\x85\u2028\u2029':
if index == 0:
leading_break = True
if index == len(scalar)-1:
trailing_break = True
if previous_space:
space_break = True
previous_space = False
previous_break = True
else:
previous_space = False
previous_break = False
# Prepare for the next character.
index += 1
preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029')
followed_by_whitespace = (index+1 >= len(scalar) or
scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029')
# Let's decide what styles are allowed.
allow_flow_plain = True
allow_block_plain = True
allow_single_quoted = True
allow_double_quoted = True
allow_block = True
if (leading_space or leading_break
or trailing_space or trailing_break):
allow_flow_plain = allow_block_plain = False
if trailing_space:
allow_block = False
if break_space:
allow_flow_plain = allow_block_plain = allow_single_quoted = False
if space_break or special_characters:
allow_flow_plain = allow_block_plain = \
allow_single_quoted = allow_block = False
if line_breaks:
allow_flow_plain = allow_block_plain = False
if flow_indicators:
allow_flow_plain = False
if block_indicators:
allow_block_plain = False
return ScalarAnalysis(scalar=scalar,
empty=False, multiline=line_breaks,
allow_flow_plain=allow_flow_plain,
allow_block_plain=allow_block_plain,
allow_single_quoted=allow_single_quoted,
allow_double_quoted=allow_double_quoted,
allow_block=allow_block)
def flush_stream(self):
if hasattr(self.stream, 'flush'):
self.stream.flush()
def write_stream_start(self):
if self.encoding and self.encoding.startswith('utf-16'):
self.stream.write(u'\uFEFF'.encode(self.encoding))
def write_stream_end(self):
self.flush_stream()
def write_indicator(self, indicator, need_whitespace,
whitespace=False, indention=False):
if self.whitespace or not need_whitespace:
data = indicator
else:
data = u' '+indicator
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
self.open_ended = False
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_indent(self):
indent = self.indent or 0
if not self.indention or self.column > indent \
or (self.column == indent and not self.whitespace):
self.write_line_break()
if self.column < indent:
self.whitespace = True
data = u' '*(indent-self.column)
self.column = indent
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_line_break(self, data=None):
if data is None:
data = self.best_line_break
self.whitespace = True
self.indention = True
self.line += 1
self.column = 0
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_version_directive(self, version_text):
data = u'%%YAML %s' % version_text
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
def write_tag_directive(self, handle_text, prefix_text):
data = u'%%TAG %s %s' % (handle_text, prefix_text)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
def write_single_quoted(self, text, split=True):
self.write_indicator(u'\'', True)
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch is None or ch != u' ':
if start+1 == end and self.column > self.best_width and split \
and start != 0 and end != len(text):
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'':
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch == u'\'':
data = u'\'\''
self.column += 2
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
self.write_indicator(u'\'', False)
ESCAPE_REPLACEMENTS = {
u'\0': u'0',
u'\x07': u'a',
u'\x08': u'b',
u'\x09': u't',
u'\x0A': u'n',
u'\x0B': u'v',
u'\x0C': u'f',
u'\x0D': u'r',
u'\x1B': u'e',
u'\"': u'\"',
u'\\': u'\\',
u'\x85': u'N',
u'\xA0': u'_',
u'\u2028': u'L',
u'\u2029': u'P',
}
def write_double_quoted(self, text, split=True):
self.write_indicator(u'"', True)
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \
or not (u'\x20' <= ch <= u'\x7E'
or (self.allow_unicode
and (u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD'))):
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
if ch in self.ESCAPE_REPLACEMENTS:
data = u'\\'+self.ESCAPE_REPLACEMENTS[ch]
elif ch <= u'\xFF':
data = u'\\x%02X' % ord(ch)
elif ch <= u'\uFFFF':
data = u'\\u%04X' % ord(ch)
else:
data = u'\\U%08X' % ord(ch)
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end+1
if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \
and self.column+(end-start) > self.best_width and split:
data = text[start:end]+u'\\'
if start < end:
start = end
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_indent()
self.whitespace = False
self.indention = False
if text[start] == u' ':
data = u'\\'
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
end += 1
self.write_indicator(u'"', False)
def determine_block_hints(self, text):
hints = u''
if text:
if text[0] in u' \n\x85\u2028\u2029':
hints += unicode(self.best_indent)
if text[-1] not in u'\n\x85\u2028\u2029':
hints += u'-'
elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
hints += u'+'
return hints
def write_folded(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'>'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
leading_space = True
spaces = False
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if not leading_space and ch is not None and ch != u' ' \
and text[start] == u'\n':
self.write_line_break()
leading_space = (ch == u' ')
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
elif spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width:
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
spaces = (ch == u' ')
end += 1
def write_literal(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'|'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
else:
if ch is None or ch in u'\n\x85\u2028\u2029':
data = text[start:end]
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
def write_plain(self, text, split=True):
if self.root_context:
self.open_ended = True
if not text:
return
if not self.whitespace:
data = u' '
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.whitespace = False
self.indention = False
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width and split:
self.write_indent()
self.whitespace = False
self.indention = False
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
self.whitespace = False
self.indention = False
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
| true | true |
f71baa78a75aac20e49dcb9ac6b3ae5910b61a00 | 13,930 | py | Python | vendor/istio.io/api/python/istio_api/mixer/v1/config/client/api_spec_pb2.py | PinZhang/istio | dce455456d77ca5af34ba5848f9704577349c6bd | [
"Apache-2.0"
] | 40 | 2018-10-24T18:56:01.000Z | 2021-12-30T22:05:33.000Z | vendor/istio.io/api/python/istio_api/mixer/v1/config/client/api_spec_pb2.py | PinZhang/istio | dce455456d77ca5af34ba5848f9704577349c6bd | [
"Apache-2.0"
] | 17 | 2019-01-11T05:57:35.000Z | 2019-08-29T05:33:38.000Z | vendor/istio.io/api/python/istio_api/mixer/v1/config/client/api_spec_pb2.py | PinZhang/istio | dce455456d77ca5af34ba5848f9704577349c6bd | [
"Apache-2.0"
] | 14 | 2018-11-09T19:17:26.000Z | 2021-12-16T16:36:24.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mixer/v1/config/client/api_spec.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2
from mixer.v1 import attributes_pb2 as mixer_dot_v1_dot_attributes__pb2
from mixer.v1.config.client import service_pb2 as mixer_dot_v1_dot_config_dot_client_dot_service__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='mixer/v1/config/client/api_spec.proto',
package='istio.mixer.v1.config.client',
syntax='proto3',
serialized_pb=_b('\n%mixer/v1/config/client/api_spec.proto\x12\x1cistio.mixer.v1.config.client\x1a\x14gogoproto/gogo.proto\x1a\x19mixer/v1/attributes.proto\x1a$mixer/v1/config/client/service.proto\"\xb9\x01\n\x0bHTTPAPISpec\x12.\n\nattributes\x18\x01 \x01(\x0b\x32\x1a.istio.mixer.v1.Attributes\x12\x42\n\x08patterns\x18\x02 \x03(\x0b\x32\x30.istio.mixer.v1.config.client.HTTPAPISpecPattern\x12\x36\n\x08\x61pi_keys\x18\x03 \x03(\x0b\x32$.istio.mixer.v1.config.client.APIKey\"\x8d\x01\n\x12HTTPAPISpecPattern\x12.\n\nattributes\x18\x01 \x01(\x0b\x32\x1a.istio.mixer.v1.Attributes\x12\x13\n\x0bhttp_method\x18\x02 \x01(\t\x12\x16\n\x0curi_template\x18\x03 \x01(\tH\x00\x12\x0f\n\x05regex\x18\x04 \x01(\tH\x00\x42\t\n\x07pattern\"D\n\x06\x41PIKey\x12\x0f\n\x05query\x18\x01 \x01(\tH\x00\x12\x10\n\x06header\x18\x02 \x01(\tH\x00\x12\x10\n\x06\x63ookie\x18\x03 \x01(\tH\x00\x42\x05\n\x03key\"7\n\x14HTTPAPISpecReference\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tnamespace\x18\x02 \x01(\t\"\x99\x01\n\x12HTTPAPISpecBinding\x12<\n\x08services\x18\x01 \x03(\x0b\x32*.istio.mixer.v1.config.client.IstioService\x12\x45\n\tapi_specs\x18\x02 \x03(\x0b\x32\x32.istio.mixer.v1.config.client.HTTPAPISpecReferenceB1Z#istio.io/api/mixer/v1/config/client\xc8\xe1\x1e\x00\xa8\xe2\x1e\x00\xf0\xe1\x1e\x00\x62\x06proto3')
,
dependencies=[gogoproto_dot_gogo__pb2.DESCRIPTOR,mixer_dot_v1_dot_attributes__pb2.DESCRIPTOR,mixer_dot_v1_dot_config_dot_client_dot_service__pb2.DESCRIPTOR,])
_HTTPAPISPEC = _descriptor.Descriptor(
name='HTTPAPISpec',
full_name='istio.mixer.v1.config.client.HTTPAPISpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='istio.mixer.v1.config.client.HTTPAPISpec.attributes', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='patterns', full_name='istio.mixer.v1.config.client.HTTPAPISpec.patterns', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='api_keys', full_name='istio.mixer.v1.config.client.HTTPAPISpec.api_keys', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=159,
serialized_end=344,
)
_HTTPAPISPECPATTERN = _descriptor.Descriptor(
name='HTTPAPISpecPattern',
full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.attributes', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='http_method', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.http_method', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uri_template', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.uri_template', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='regex', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.regex', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='pattern', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.pattern',
index=0, containing_type=None, fields=[]),
],
serialized_start=347,
serialized_end=488,
)
_APIKEY = _descriptor.Descriptor(
name='APIKey',
full_name='istio.mixer.v1.config.client.APIKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='istio.mixer.v1.config.client.APIKey.query', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='header', full_name='istio.mixer.v1.config.client.APIKey.header', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cookie', full_name='istio.mixer.v1.config.client.APIKey.cookie', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='key', full_name='istio.mixer.v1.config.client.APIKey.key',
index=0, containing_type=None, fields=[]),
],
serialized_start=490,
serialized_end=558,
)
_HTTPAPISPECREFERENCE = _descriptor.Descriptor(
name='HTTPAPISpecReference',
full_name='istio.mixer.v1.config.client.HTTPAPISpecReference',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='istio.mixer.v1.config.client.HTTPAPISpecReference.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='namespace', full_name='istio.mixer.v1.config.client.HTTPAPISpecReference.namespace', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=560,
serialized_end=615,
)
_HTTPAPISPECBINDING = _descriptor.Descriptor(
name='HTTPAPISpecBinding',
full_name='istio.mixer.v1.config.client.HTTPAPISpecBinding',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='services', full_name='istio.mixer.v1.config.client.HTTPAPISpecBinding.services', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='api_specs', full_name='istio.mixer.v1.config.client.HTTPAPISpecBinding.api_specs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=618,
serialized_end=771,
)
_HTTPAPISPEC.fields_by_name['attributes'].message_type = mixer_dot_v1_dot_attributes__pb2._ATTRIBUTES
_HTTPAPISPEC.fields_by_name['patterns'].message_type = _HTTPAPISPECPATTERN
_HTTPAPISPEC.fields_by_name['api_keys'].message_type = _APIKEY
_HTTPAPISPECPATTERN.fields_by_name['attributes'].message_type = mixer_dot_v1_dot_attributes__pb2._ATTRIBUTES
_HTTPAPISPECPATTERN.oneofs_by_name['pattern'].fields.append(
_HTTPAPISPECPATTERN.fields_by_name['uri_template'])
_HTTPAPISPECPATTERN.fields_by_name['uri_template'].containing_oneof = _HTTPAPISPECPATTERN.oneofs_by_name['pattern']
_HTTPAPISPECPATTERN.oneofs_by_name['pattern'].fields.append(
_HTTPAPISPECPATTERN.fields_by_name['regex'])
_HTTPAPISPECPATTERN.fields_by_name['regex'].containing_oneof = _HTTPAPISPECPATTERN.oneofs_by_name['pattern']
_APIKEY.oneofs_by_name['key'].fields.append(
_APIKEY.fields_by_name['query'])
_APIKEY.fields_by_name['query'].containing_oneof = _APIKEY.oneofs_by_name['key']
_APIKEY.oneofs_by_name['key'].fields.append(
_APIKEY.fields_by_name['header'])
_APIKEY.fields_by_name['header'].containing_oneof = _APIKEY.oneofs_by_name['key']
_APIKEY.oneofs_by_name['key'].fields.append(
_APIKEY.fields_by_name['cookie'])
_APIKEY.fields_by_name['cookie'].containing_oneof = _APIKEY.oneofs_by_name['key']
_HTTPAPISPECBINDING.fields_by_name['services'].message_type = mixer_dot_v1_dot_config_dot_client_dot_service__pb2._ISTIOSERVICE
_HTTPAPISPECBINDING.fields_by_name['api_specs'].message_type = _HTTPAPISPECREFERENCE
DESCRIPTOR.message_types_by_name['HTTPAPISpec'] = _HTTPAPISPEC
DESCRIPTOR.message_types_by_name['HTTPAPISpecPattern'] = _HTTPAPISPECPATTERN
DESCRIPTOR.message_types_by_name['APIKey'] = _APIKEY
DESCRIPTOR.message_types_by_name['HTTPAPISpecReference'] = _HTTPAPISPECREFERENCE
DESCRIPTOR.message_types_by_name['HTTPAPISpecBinding'] = _HTTPAPISPECBINDING
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HTTPAPISpec = _reflection.GeneratedProtocolMessageType('HTTPAPISpec', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPEC,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpec)
))
_sym_db.RegisterMessage(HTTPAPISpec)
HTTPAPISpecPattern = _reflection.GeneratedProtocolMessageType('HTTPAPISpecPattern', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPECPATTERN,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpecPattern)
))
_sym_db.RegisterMessage(HTTPAPISpecPattern)
APIKey = _reflection.GeneratedProtocolMessageType('APIKey', (_message.Message,), dict(
DESCRIPTOR = _APIKEY,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.APIKey)
))
_sym_db.RegisterMessage(APIKey)
HTTPAPISpecReference = _reflection.GeneratedProtocolMessageType('HTTPAPISpecReference', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPECREFERENCE,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpecReference)
))
_sym_db.RegisterMessage(HTTPAPISpecReference)
HTTPAPISpecBinding = _reflection.GeneratedProtocolMessageType('HTTPAPISpecBinding', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPECBINDING,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpecBinding)
))
_sym_db.RegisterMessage(HTTPAPISpecBinding)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z#istio.io/api/mixer/v1/config/client\310\341\036\000\250\342\036\000\360\341\036\000'))
# @@protoc_insertion_point(module_scope)
| 43.26087 | 1,306 | 0.760876 |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
_sym_db = _symbol_database.Default()
from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2
from mixer.v1 import attributes_pb2 as mixer_dot_v1_dot_attributes__pb2
from mixer.v1.config.client import service_pb2 as mixer_dot_v1_dot_config_dot_client_dot_service__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='mixer/v1/config/client/api_spec.proto',
package='istio.mixer.v1.config.client',
syntax='proto3',
serialized_pb=_b('\n%mixer/v1/config/client/api_spec.proto\x12\x1cistio.mixer.v1.config.client\x1a\x14gogoproto/gogo.proto\x1a\x19mixer/v1/attributes.proto\x1a$mixer/v1/config/client/service.proto\"\xb9\x01\n\x0bHTTPAPISpec\x12.\n\nattributes\x18\x01 \x01(\x0b\x32\x1a.istio.mixer.v1.Attributes\x12\x42\n\x08patterns\x18\x02 \x03(\x0b\x32\x30.istio.mixer.v1.config.client.HTTPAPISpecPattern\x12\x36\n\x08\x61pi_keys\x18\x03 \x03(\x0b\x32$.istio.mixer.v1.config.client.APIKey\"\x8d\x01\n\x12HTTPAPISpecPattern\x12.\n\nattributes\x18\x01 \x01(\x0b\x32\x1a.istio.mixer.v1.Attributes\x12\x13\n\x0bhttp_method\x18\x02 \x01(\t\x12\x16\n\x0curi_template\x18\x03 \x01(\tH\x00\x12\x0f\n\x05regex\x18\x04 \x01(\tH\x00\x42\t\n\x07pattern\"D\n\x06\x41PIKey\x12\x0f\n\x05query\x18\x01 \x01(\tH\x00\x12\x10\n\x06header\x18\x02 \x01(\tH\x00\x12\x10\n\x06\x63ookie\x18\x03 \x01(\tH\x00\x42\x05\n\x03key\"7\n\x14HTTPAPISpecReference\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tnamespace\x18\x02 \x01(\t\"\x99\x01\n\x12HTTPAPISpecBinding\x12<\n\x08services\x18\x01 \x03(\x0b\x32*.istio.mixer.v1.config.client.IstioService\x12\x45\n\tapi_specs\x18\x02 \x03(\x0b\x32\x32.istio.mixer.v1.config.client.HTTPAPISpecReferenceB1Z#istio.io/api/mixer/v1/config/client\xc8\xe1\x1e\x00\xa8\xe2\x1e\x00\xf0\xe1\x1e\x00\x62\x06proto3')
,
dependencies=[gogoproto_dot_gogo__pb2.DESCRIPTOR,mixer_dot_v1_dot_attributes__pb2.DESCRIPTOR,mixer_dot_v1_dot_config_dot_client_dot_service__pb2.DESCRIPTOR,])
_HTTPAPISPEC = _descriptor.Descriptor(
name='HTTPAPISpec',
full_name='istio.mixer.v1.config.client.HTTPAPISpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='istio.mixer.v1.config.client.HTTPAPISpec.attributes', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='patterns', full_name='istio.mixer.v1.config.client.HTTPAPISpec.patterns', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='api_keys', full_name='istio.mixer.v1.config.client.HTTPAPISpec.api_keys', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=159,
serialized_end=344,
)
_HTTPAPISPECPATTERN = _descriptor.Descriptor(
name='HTTPAPISpecPattern',
full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.attributes', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='http_method', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.http_method', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uri_template', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.uri_template', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='regex', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.regex', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='pattern', full_name='istio.mixer.v1.config.client.HTTPAPISpecPattern.pattern',
index=0, containing_type=None, fields=[]),
],
serialized_start=347,
serialized_end=488,
)
_APIKEY = _descriptor.Descriptor(
name='APIKey',
full_name='istio.mixer.v1.config.client.APIKey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='istio.mixer.v1.config.client.APIKey.query', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='header', full_name='istio.mixer.v1.config.client.APIKey.header', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cookie', full_name='istio.mixer.v1.config.client.APIKey.cookie', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='key', full_name='istio.mixer.v1.config.client.APIKey.key',
index=0, containing_type=None, fields=[]),
],
serialized_start=490,
serialized_end=558,
)
_HTTPAPISPECREFERENCE = _descriptor.Descriptor(
name='HTTPAPISpecReference',
full_name='istio.mixer.v1.config.client.HTTPAPISpecReference',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='istio.mixer.v1.config.client.HTTPAPISpecReference.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='namespace', full_name='istio.mixer.v1.config.client.HTTPAPISpecReference.namespace', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=560,
serialized_end=615,
)
_HTTPAPISPECBINDING = _descriptor.Descriptor(
name='HTTPAPISpecBinding',
full_name='istio.mixer.v1.config.client.HTTPAPISpecBinding',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='services', full_name='istio.mixer.v1.config.client.HTTPAPISpecBinding.services', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='api_specs', full_name='istio.mixer.v1.config.client.HTTPAPISpecBinding.api_specs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=618,
serialized_end=771,
)
_HTTPAPISPEC.fields_by_name['attributes'].message_type = mixer_dot_v1_dot_attributes__pb2._ATTRIBUTES
_HTTPAPISPEC.fields_by_name['patterns'].message_type = _HTTPAPISPECPATTERN
_HTTPAPISPEC.fields_by_name['api_keys'].message_type = _APIKEY
_HTTPAPISPECPATTERN.fields_by_name['attributes'].message_type = mixer_dot_v1_dot_attributes__pb2._ATTRIBUTES
_HTTPAPISPECPATTERN.oneofs_by_name['pattern'].fields.append(
_HTTPAPISPECPATTERN.fields_by_name['uri_template'])
_HTTPAPISPECPATTERN.fields_by_name['uri_template'].containing_oneof = _HTTPAPISPECPATTERN.oneofs_by_name['pattern']
_HTTPAPISPECPATTERN.oneofs_by_name['pattern'].fields.append(
_HTTPAPISPECPATTERN.fields_by_name['regex'])
_HTTPAPISPECPATTERN.fields_by_name['regex'].containing_oneof = _HTTPAPISPECPATTERN.oneofs_by_name['pattern']
_APIKEY.oneofs_by_name['key'].fields.append(
_APIKEY.fields_by_name['query'])
_APIKEY.fields_by_name['query'].containing_oneof = _APIKEY.oneofs_by_name['key']
_APIKEY.oneofs_by_name['key'].fields.append(
_APIKEY.fields_by_name['header'])
_APIKEY.fields_by_name['header'].containing_oneof = _APIKEY.oneofs_by_name['key']
_APIKEY.oneofs_by_name['key'].fields.append(
_APIKEY.fields_by_name['cookie'])
_APIKEY.fields_by_name['cookie'].containing_oneof = _APIKEY.oneofs_by_name['key']
_HTTPAPISPECBINDING.fields_by_name['services'].message_type = mixer_dot_v1_dot_config_dot_client_dot_service__pb2._ISTIOSERVICE
_HTTPAPISPECBINDING.fields_by_name['api_specs'].message_type = _HTTPAPISPECREFERENCE
DESCRIPTOR.message_types_by_name['HTTPAPISpec'] = _HTTPAPISPEC
DESCRIPTOR.message_types_by_name['HTTPAPISpecPattern'] = _HTTPAPISPECPATTERN
DESCRIPTOR.message_types_by_name['APIKey'] = _APIKEY
DESCRIPTOR.message_types_by_name['HTTPAPISpecReference'] = _HTTPAPISPECREFERENCE
DESCRIPTOR.message_types_by_name['HTTPAPISpecBinding'] = _HTTPAPISPECBINDING
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HTTPAPISpec = _reflection.GeneratedProtocolMessageType('HTTPAPISpec', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPEC,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpec)
))
_sym_db.RegisterMessage(HTTPAPISpec)
HTTPAPISpecPattern = _reflection.GeneratedProtocolMessageType('HTTPAPISpecPattern', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPECPATTERN,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpecPattern)
))
_sym_db.RegisterMessage(HTTPAPISpecPattern)
APIKey = _reflection.GeneratedProtocolMessageType('APIKey', (_message.Message,), dict(
DESCRIPTOR = _APIKEY,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.APIKey)
))
_sym_db.RegisterMessage(APIKey)
HTTPAPISpecReference = _reflection.GeneratedProtocolMessageType('HTTPAPISpecReference', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPECREFERENCE,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpecReference)
))
_sym_db.RegisterMessage(HTTPAPISpecReference)
HTTPAPISpecBinding = _reflection.GeneratedProtocolMessageType('HTTPAPISpecBinding', (_message.Message,), dict(
DESCRIPTOR = _HTTPAPISPECBINDING,
__module__ = 'mixer.v1.config.client.api_spec_pb2'
# @@protoc_insertion_point(class_scope:istio.mixer.v1.config.client.HTTPAPISpecBinding)
))
_sym_db.RegisterMessage(HTTPAPISpecBinding)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z#istio.io/api/mixer/v1/config/client\310\341\036\000\250\342\036\000\360\341\036\000'))
# @@protoc_insertion_point(module_scope)
| true | true |
f71baa8f160f4d36be63fb40e4502ee2b84f9a05 | 4,141 | py | Python | pextant/solvers/SEXTANTsolver.py | norheim/pextant | f4235719279c0e6f178ae1e0f8b1ea3346533915 | [
"MIT"
] | null | null | null | pextant/solvers/SEXTANTsolver.py | norheim/pextant | f4235719279c0e6f178ae1e0f8b1ea3346533915 | [
"MIT"
] | 1 | 2019-12-03T03:52:41.000Z | 2019-12-04T14:50:36.000Z | pextant/solvers/SEXTANTsolver.py | norheim/pextant | f4235719279c0e6f178ae1e0f8b1ea3346533915 | [
"MIT"
] | 1 | 2019-12-03T02:37:57.000Z | 2019-12-03T02:37:57.000Z | from pextant.lib.geoshapely import GeoPolygon, LONG_LAT
import numpy as np
import csv
class SEXTANTSolver(object):
def __init__(self, environmental_model, cost_function, viz):
self.env_model = environmental_model
self.cost_function = cost_function
self.viz = viz
self.searches = []
def solve(self, start_point, end_point):
pass
def solvemultipoint(self, waypoints):
search_list = sextantSearchList(waypoints)
for i in range(len(waypoints) - 1):
search_result = self.solve(waypoints[i], waypoints[i + 1])
search_list.append(search_result)
return search_list, search_list.raw(), search_list.itemssrchd()
class sextantSearchList(object):
def __init__(self, points):
self.startpoint = points[0]
self.endpoint = points[-1]
self.waypoints = points
self.list = []
self.rawpoints = []
def addresult(self, raw, nodes, coordinates, expanded_items):
self.list.append(sextantSearch(raw, nodes, coordinates, expanded_items))
def append(self, sextantsearch):
self.list.append(sextantsearch)
def raw(self):
result = []
for search in self.list:
if search == False:
return None
result += search.raw
return np.array(result)
def coordinates(self):
result = []
for search in self.list:
if type(search) == bool:
return None
result += search.coordinates.to(LONG_LAT).transpose().tolist()
return GeoPolygon(LONG_LAT, *np.array(result).transpose())
def itemssrchd(self):
result = []
for search in self.list:
if type(search) == bool:
return None
result += search.expanded_items
return np.array(result)
def tojson(self, save=False):
return [elt.tojson() for elt in self.list]
def tocsv(self, filepath=None):
csvlist = [elt.tocsv() for elt in self.list]
rows = [['isStation', 'x', 'y', 'z', 'distanceMeters', 'energyJoules', 'timeSeconds']]
for row in csvlist:
rows += row
if filepath:
with open(filepath, 'wb') as csvfile:
writer = csv.writer(csvfile)
for row in rows:
writer.writerow(row)
return csvlist
class sextantSearch(object):
def __init__(self, raw, nodes, coordinates, expanded_items):
self.namemap = {
'time': ['timeList','totalTime'],
'pathlength': ['distanceList','totalDistance'],
'energy': ['energyList','totalEnergy']
}
#self.searches = []
self.nodes = nodes
self.raw = raw
self.npraw = np.array(raw).transpose()
self.coordinates = coordinates
self.expanded_items = expanded_items
def tojson(self):
out = {}
coordinates = self.coordinates.to(LONG_LAT).transpose().tolist()
out["geometry"] = {
'type': 'LineString',
'coordinates': coordinates
}
results = {}
for k, v in self.namemap.items():
results.update({v[0]:[],v[1]:0})
for i, mesh_srch_elt in enumerate(self.nodes):
derived = mesh_srch_elt.derived
for k, v in derived.items():
results[self.namemap[k][0]].append(v)
for k, v in self.namemap.items():
results[v[1]] = sum(results[v[0]])
out["derivedInfo"] = results
return out
def tocsv(self, coordstype=LONG_LAT):
sequence = []
coords = self.coordinates.to(coordstype).transpose().tolist()
for i, mesh_srch_elt in enumerate(self.nodes):
if i != 0:
row_entry = [i==1 or i==len(coords)-1] #True if it's the first or last entry
row_entry += coords[i] + [mesh_srch_elt.mesh_element.z]
derived = mesh_srch_elt.derived
row_entry += [derived['pathlength'], derived['time'], derived['energy']]
sequence += [row_entry]
return sequence
| 34.508333 | 94 | 0.576914 | from pextant.lib.geoshapely import GeoPolygon, LONG_LAT
import numpy as np
import csv
class SEXTANTSolver(object):
def __init__(self, environmental_model, cost_function, viz):
self.env_model = environmental_model
self.cost_function = cost_function
self.viz = viz
self.searches = []
def solve(self, start_point, end_point):
pass
def solvemultipoint(self, waypoints):
search_list = sextantSearchList(waypoints)
for i in range(len(waypoints) - 1):
search_result = self.solve(waypoints[i], waypoints[i + 1])
search_list.append(search_result)
return search_list, search_list.raw(), search_list.itemssrchd()
class sextantSearchList(object):
def __init__(self, points):
self.startpoint = points[0]
self.endpoint = points[-1]
self.waypoints = points
self.list = []
self.rawpoints = []
def addresult(self, raw, nodes, coordinates, expanded_items):
self.list.append(sextantSearch(raw, nodes, coordinates, expanded_items))
def append(self, sextantsearch):
self.list.append(sextantsearch)
def raw(self):
result = []
for search in self.list:
if search == False:
return None
result += search.raw
return np.array(result)
def coordinates(self):
result = []
for search in self.list:
if type(search) == bool:
return None
result += search.coordinates.to(LONG_LAT).transpose().tolist()
return GeoPolygon(LONG_LAT, *np.array(result).transpose())
def itemssrchd(self):
result = []
for search in self.list:
if type(search) == bool:
return None
result += search.expanded_items
return np.array(result)
def tojson(self, save=False):
return [elt.tojson() for elt in self.list]
def tocsv(self, filepath=None):
csvlist = [elt.tocsv() for elt in self.list]
rows = [['isStation', 'x', 'y', 'z', 'distanceMeters', 'energyJoules', 'timeSeconds']]
for row in csvlist:
rows += row
if filepath:
with open(filepath, 'wb') as csvfile:
writer = csv.writer(csvfile)
for row in rows:
writer.writerow(row)
return csvlist
class sextantSearch(object):
def __init__(self, raw, nodes, coordinates, expanded_items):
self.namemap = {
'time': ['timeList','totalTime'],
'pathlength': ['distanceList','totalDistance'],
'energy': ['energyList','totalEnergy']
}
self.nodes = nodes
self.raw = raw
self.npraw = np.array(raw).transpose()
self.coordinates = coordinates
self.expanded_items = expanded_items
def tojson(self):
out = {}
coordinates = self.coordinates.to(LONG_LAT).transpose().tolist()
out["geometry"] = {
'type': 'LineString',
'coordinates': coordinates
}
results = {}
for k, v in self.namemap.items():
results.update({v[0]:[],v[1]:0})
for i, mesh_srch_elt in enumerate(self.nodes):
derived = mesh_srch_elt.derived
for k, v in derived.items():
results[self.namemap[k][0]].append(v)
for k, v in self.namemap.items():
results[v[1]] = sum(results[v[0]])
out["derivedInfo"] = results
return out
def tocsv(self, coordstype=LONG_LAT):
sequence = []
coords = self.coordinates.to(coordstype).transpose().tolist()
for i, mesh_srch_elt in enumerate(self.nodes):
if i != 0:
row_entry = [i==1 or i==len(coords)-1]
row_entry += coords[i] + [mesh_srch_elt.mesh_element.z]
derived = mesh_srch_elt.derived
row_entry += [derived['pathlength'], derived['time'], derived['energy']]
sequence += [row_entry]
return sequence
| true | true |
f71baa9da47252c881d3e4b3537037ac3e200836 | 3,658 | py | Python | models/client.py | dssaenzml/federated_learning_nlp | b48fbeb3e78af5971885337203504c017ef1553b | [
"BSD-2-Clause"
] | 2 | 2021-05-17T05:30:50.000Z | 2021-05-18T16:20:10.000Z | models/client.py | dssaenzml/federated_learning_nlp | b48fbeb3e78af5971885337203504c017ef1553b | [
"BSD-2-Clause"
] | null | null | null | models/client.py | dssaenzml/federated_learning_nlp | b48fbeb3e78af5971885337203504c017ef1553b | [
"BSD-2-Clause"
] | 1 | 2021-07-10T21:07:01.000Z | 2021-07-10T21:07:01.000Z | import random
import warnings
class Client:
def __init__(self, client_id, group=None, train_data={'x' : [],'y' : []}, eval_data={'x' : [],'y' : []}, model=None):
self._model = model
self.id = client_id
self.group = group
self.train_data = train_data
self.eval_data = eval_data
def train(self, num_epochs=1, batch_size=10, minibatch=None):
"""Trains on self.model using the client's train_data.
Args:
num_epochs: Number of epochs to train. Unsupported if minibatch is provided (minibatch has only 1 epoch)
batch_size: Size of training batches.
minibatch: fraction of client's data to apply minibatch sgd,
None to use FedAvg
Return:
comp: number of FLOPs executed in training process
num_samples: number of samples used in training
update: set of weights
update_size: number of bytes in update
"""
if minibatch is None:
data = self.train_data
comp, update = self.model.train(data, num_epochs, batch_size)
else:
frac = min(1.0, minibatch)
num_data = max(1, int(frac*len(self.train_data["x"])))
xs, ys = zip(*random.sample(list(zip(self.train_data["x"], self.train_data["y"])), num_data))
data = {'x': xs, 'y': ys}
# Minibatch trains for only 1 epoch - multiple local epochs don't make sense!
num_epochs = 1
comp, update = self.model.train(data, num_epochs, num_data)
num_train_samples = len(data['y'])
return comp, num_train_samples, update
def test(self, set_to_use='test'):
"""Tests self.model on self.test_data.
Args:
set_to_use. Set to test on. Should be in ['train', 'test'].
Return:
dict of metrics returned by the model.
"""
assert set_to_use in ['train', 'test', 'val']
if set_to_use == 'train':
data = self.train_data
elif set_to_use == 'test' or set_to_use == 'val':
data = self.eval_data
return self.model.test(data)
@property
def num_test_samples(self):
"""Number of test samples for this client.
Return:
int: Number of test samples for this client
"""
if self.eval_data is None:
return 0
return len(self.eval_data['y'])
@property
def num_train_samples(self):
"""Number of train samples for this client.
Return:
int: Number of train samples for this client
"""
if self.train_data is None:
return 0
return len(self.train_data['y'])
@property
def num_samples(self):
"""Number samples for this client.
Return:
int: Number of samples for this client
"""
train_size = 0
if self.train_data is not None:
train_size = len(self.train_data['y'])
test_size = 0
if self.eval_data is not None:
test_size = len(self.eval_data['y'])
return train_size + test_size
@property
def model(self):
"""Returns this client reference to model being trained"""
return self._model
@model.setter
def model(self, model):
warnings.warn('The current implementation shares the model among all clients.'
'Setting it on one client will effectively modify all clients.')
self._model = model
| 34.509434 | 122 | 0.563423 | import random
import warnings
class Client:
def __init__(self, client_id, group=None, train_data={'x' : [],'y' : []}, eval_data={'x' : [],'y' : []}, model=None):
self._model = model
self.id = client_id
self.group = group
self.train_data = train_data
self.eval_data = eval_data
def train(self, num_epochs=1, batch_size=10, minibatch=None):
if minibatch is None:
data = self.train_data
comp, update = self.model.train(data, num_epochs, batch_size)
else:
frac = min(1.0, minibatch)
num_data = max(1, int(frac*len(self.train_data["x"])))
xs, ys = zip(*random.sample(list(zip(self.train_data["x"], self.train_data["y"])), num_data))
data = {'x': xs, 'y': ys}
num_epochs = 1
comp, update = self.model.train(data, num_epochs, num_data)
num_train_samples = len(data['y'])
return comp, num_train_samples, update
def test(self, set_to_use='test'):
assert set_to_use in ['train', 'test', 'val']
if set_to_use == 'train':
data = self.train_data
elif set_to_use == 'test' or set_to_use == 'val':
data = self.eval_data
return self.model.test(data)
@property
def num_test_samples(self):
if self.eval_data is None:
return 0
return len(self.eval_data['y'])
@property
def num_train_samples(self):
if self.train_data is None:
return 0
return len(self.train_data['y'])
@property
def num_samples(self):
train_size = 0
if self.train_data is not None:
train_size = len(self.train_data['y'])
test_size = 0
if self.eval_data is not None:
test_size = len(self.eval_data['y'])
return train_size + test_size
@property
def model(self):
return self._model
@model.setter
def model(self, model):
warnings.warn('The current implementation shares the model among all clients.'
'Setting it on one client will effectively modify all clients.')
self._model = model
| true | true |
f71bab718e68dcdd90ff7a41d77e7394ed89c45b | 1,236 | py | Python | tests/test_views.py | kingsdigitallab/django-geonames-place | 2484abaee6896bafe2f86e93bffca634073a6d3b | [
"MIT"
] | 3 | 2019-06-30T08:13:38.000Z | 2020-06-09T22:30:17.000Z | tests/test_views.py | kingsdigitallab/django-geonames-place | 2484abaee6896bafe2f86e93bffca634073a6d3b | [
"MIT"
] | null | null | null | tests/test_views.py | kingsdigitallab/django-geonames-place | 2484abaee6896bafe2f86e93bffca634073a6d3b | [
"MIT"
] | null | null | null | """
test_django-geonames-place
------------
Tests for `django-geonames-place` views module.
"""
import unittest
from django.conf import settings
from django.test import Client, TestCase
from django.urls import reverse
from geonames_place.models import Place
@unittest.skipUnless(
settings.GEONAMES_KEY, 'No GEONAMES_KEY environment variable set')
class TestGeonamesPlaceViews(TestCase):
def setUp(self):
self.geonames_id = 2635167
self.geonames_address = 'United Kingdom'
self.address = 'London'
def test_autocomplete_view(self):
self.assertEqual(Place.objects.count(), 0)
url = reverse('place_autocomplete')
c = Client()
response = c.get(url, {'term': 'Lo'})
self.assertEqual(response.json()['results'], [])
p = Place(geonames_id=self.geonames_id)
p.save()
self.assertEqual(Place.objects.count(), 1)
response = c.get(url, {'term': 'London'})
self.assertNotEqual(len(response.json()['results']), 0)
# There are many places with London in the name, and they
# should now be stored in the Place model, having been fetched
# from Geonames.
self.assertTrue(Place.objects.count() > 1)
| 30.146341 | 70 | 0.661812 |
import unittest
from django.conf import settings
from django.test import Client, TestCase
from django.urls import reverse
from geonames_place.models import Place
@unittest.skipUnless(
settings.GEONAMES_KEY, 'No GEONAMES_KEY environment variable set')
class TestGeonamesPlaceViews(TestCase):
def setUp(self):
self.geonames_id = 2635167
self.geonames_address = 'United Kingdom'
self.address = 'London'
def test_autocomplete_view(self):
self.assertEqual(Place.objects.count(), 0)
url = reverse('place_autocomplete')
c = Client()
response = c.get(url, {'term': 'Lo'})
self.assertEqual(response.json()['results'], [])
p = Place(geonames_id=self.geonames_id)
p.save()
self.assertEqual(Place.objects.count(), 1)
response = c.get(url, {'term': 'London'})
self.assertNotEqual(len(response.json()['results']), 0)
self.assertTrue(Place.objects.count() > 1)
| true | true |
f71bac9cf6ccf566ff63cc6598f04ef5c0efd656 | 1,978 | py | Python | tests/test_security_api_key_header_optional.py | jfunez/fastapi | 7372f6ba11abb515a7f11814dba52a1d1c0925f0 | [
"MIT"
] | 2 | 2020-04-09T07:11:28.000Z | 2020-12-12T14:04:35.000Z | tests/test_security_api_key_header_optional.py | jfunez/fastapi | 7372f6ba11abb515a7f11814dba52a1d1c0925f0 | [
"MIT"
] | 1 | 2021-03-27T18:37:32.000Z | 2021-05-25T15:08:24.000Z | tests/test_security_api_key_header_optional.py | jfunez/fastapi | 7372f6ba11abb515a7f11814dba52a1d1c0925f0 | [
"MIT"
] | 1 | 2021-02-03T00:43:04.000Z | 2021-02-03T00:43:04.000Z | from typing import Optional
from fastapi import Depends, FastAPI, Security
from fastapi.security import APIKeyHeader
from fastapi.testclient import TestClient
from pydantic import BaseModel
app = FastAPI()
api_key = APIKeyHeader(name="key", auto_error=False)
class User(BaseModel):
username: str
def get_current_user(oauth_header: Optional[str] = Security(api_key)):
if oauth_header is None:
return None
user = User(username=oauth_header)
return user
@app.get("/users/me")
def read_current_user(current_user: Optional[User] = Depends(get_current_user)):
if current_user is None:
return {"msg": "Create an account first"}
return current_user
client = TestClient(app)
openapi_schema = {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/users/me": {
"get": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"summary": "Read Current User",
"operationId": "read_current_user_users_me_get",
"security": [{"APIKeyHeader": []}],
}
}
},
"components": {
"securitySchemes": {
"APIKeyHeader": {"type": "apiKey", "name": "key", "in": "header"}
}
},
}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200
assert response.json() == openapi_schema
def test_security_api_key():
response = client.get("/users/me", headers={"key": "secret"})
assert response.status_code == 200
assert response.json() == {"username": "secret"}
def test_security_api_key_no_key():
response = client.get("/users/me")
assert response.status_code == 200
assert response.json() == {"msg": "Create an account first"}
| 26.373333 | 80 | 0.592518 | from typing import Optional
from fastapi import Depends, FastAPI, Security
from fastapi.security import APIKeyHeader
from fastapi.testclient import TestClient
from pydantic import BaseModel
app = FastAPI()
api_key = APIKeyHeader(name="key", auto_error=False)
class User(BaseModel):
username: str
def get_current_user(oauth_header: Optional[str] = Security(api_key)):
if oauth_header is None:
return None
user = User(username=oauth_header)
return user
@app.get("/users/me")
def read_current_user(current_user: Optional[User] = Depends(get_current_user)):
if current_user is None:
return {"msg": "Create an account first"}
return current_user
client = TestClient(app)
openapi_schema = {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/users/me": {
"get": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"summary": "Read Current User",
"operationId": "read_current_user_users_me_get",
"security": [{"APIKeyHeader": []}],
}
}
},
"components": {
"securitySchemes": {
"APIKeyHeader": {"type": "apiKey", "name": "key", "in": "header"}
}
},
}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200
assert response.json() == openapi_schema
def test_security_api_key():
response = client.get("/users/me", headers={"key": "secret"})
assert response.status_code == 200
assert response.json() == {"username": "secret"}
def test_security_api_key_no_key():
response = client.get("/users/me")
assert response.status_code == 200
assert response.json() == {"msg": "Create an account first"}
| true | true |
f71bacf6ae00989a3846b6e65f0871f7161f845d | 5,427 | py | Python | docqa/elmo/ablate_elmo_sub_filter.py | Willyoung2017/doc-qa | 7ee02218952b0b9db63bc82b3895f743cdbd8f22 | [
"Apache-2.0"
] | null | null | null | docqa/elmo/ablate_elmo_sub_filter.py | Willyoung2017/doc-qa | 7ee02218952b0b9db63bc82b3895f743cdbd8f22 | [
"Apache-2.0"
] | null | null | null | docqa/elmo/ablate_elmo_sub_filter.py | Willyoung2017/doc-qa | 7ee02218952b0b9db63bc82b3895f743cdbd8f22 | [
"Apache-2.0"
] | null | null | null | import argparse
from datetime import datetime
from tensorflow.contrib.keras.python.keras.initializers import TruncatedNormal
from docqa import trainer
from docqa.data_processing.qa_training_data import ContextLenKey
from docqa.dataset import ClusteredBatcher
from docqa.encoder import DocumentAndQuestionEncoder, SingleSpanAnswerEncoder, DocumentAndQuestionEncoderWithSubstring
from docqa.evaluator import LossEvaluator, SpanEvaluator
from docqa.elmo.elmo import ElmoLayer
from docqa.elmo.lm_qa_models import AttentionWithElmo, SquadContextConcatSkip
from docqa.model_dir import ModelDir
from docqa.nn.attention import BiAttention, StaticAttentionSelf
from docqa.nn.embedder import FixedWordEmbedder, CharWordEmbedder, LearnedCharEmbedder, LearnedSubstringEmbedder, \
FilteredFixedWordEmbedder
from docqa.nn.layers import FullyConnected, ChainBiMapper, NullBiMapper, MaxPool, Conv1d, SequenceMapperSeq, \
VariationalDropoutLayer, ResidualLayer, ConcatWithProduct, MapperSeq, DropoutLayer
from docqa.nn.recurrent_layers import CudnnGru
from docqa.nn.similarity_layers import TriLinear
from docqa.nn.span_prediction import BoundsPredictor
from docqa.squad.squad_data import SquadCorpus, DocumentQaTrainingData
def main():
parser = argparse.ArgumentParser("Train our ELMo model on SQuAD")
parser.add_argument("output_dir")
parser.add_argument("--dim", type=int, default=90)
parser.add_argument("--l2", type=float, default=0)
parser.add_argument("--mode", choices=["input", "output", "both", "none"], default="both")
parser.add_argument("--top_layer_only", action="store_true")
#parser.add_argument("--combination", choices=["x, y", "x * y", "x, y, x * y"], default="x, y")
parser.add_argument("--use_substring", type=str, default="None")
parser.add_argument("--sub_dim", type=int, default=50)
args = parser.parse_args()
print(args)
out = args.output_dir + "-" + datetime.now().strftime("%m%d-%H%M%S")
dim = args.dim
recurrent_layer = CudnnGru(dim, w_init=TruncatedNormal(stddev=0.05))
params = trainer.TrainParams(trainer.SerializableOptimizer("Adadelta", dict(learning_rate=1.0)),
ema=0.999, max_checkpoints_to_keep=2, async_encoding=10,
num_epochs=24, log_period=30, eval_period=1200, save_period=1200,
best_weights=("dev", "b17/text-f1"),
eval_samples=dict(dev=None, train=8000))
lm_reduce = MapperSeq(
ElmoLayer(args.l2, layer_norm=False, top_layer_only=args.top_layer_only),
DropoutLayer(0.5),
)
CharEmbedderCls, EncoderCls = (LearnedCharEmbedder, DocumentAndQuestionEncoder) if args.use_substring == "None" \
else (LearnedSubstringEmbedder, DocumentAndQuestionEncoderWithSubstring)
charEmbedder = CharEmbedderCls(word_size_th=14, char_th=20, char_dim=args.sub_dim, init_scale=0.05, force_cpu=True)
if args.use_substring != None:
charEmbedder._load_substring_vocab(args.use_substring)
final_sub_dim = 100 #if args.combination == "x, y" else 300
model = AttentionWithElmo(
#combination=args.combination,
encoder=EncoderCls(SingleSpanAnswerEncoder()),
lm_model=SquadContextConcatSkip(),
append_before_atten=(args.mode == "both" or args.mode == "output"),
append_embed=(args.mode == "both" or args.mode == "input"),
max_batch_size=128,
word_embed=FilteredFixedWordEmbedder(vec_name="glove.840B.300d", word_vec_init_scale=0, learn_unk=True, cpu=True),
char_embed=CharWordEmbedder(
charEmbedder,
MaxPool(Conv1d(final_sub_dim, 5, 0.8)),
shared_parameters=True
),
embed_mapper=SequenceMapperSeq(
VariationalDropoutLayer(0.8),
recurrent_layer,
VariationalDropoutLayer(0.8),
),
lm_reduce=None,
lm_reduce_shared=lm_reduce,
per_sentence=False,
memory_builder=NullBiMapper(),
attention=BiAttention(TriLinear(bias=True), True),
match_encoder=SequenceMapperSeq(FullyConnected(dim * 2, activation="relu"),
ResidualLayer(SequenceMapperSeq(
VariationalDropoutLayer(0.8),
recurrent_layer,
VariationalDropoutLayer(0.8),
StaticAttentionSelf(TriLinear(bias=True), ConcatWithProduct()),
FullyConnected(dim * 2, activation="relu"),
)),
VariationalDropoutLayer(0.8)),
predictor = BoundsPredictor(ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
))
)
batcher = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(SquadCorpus(), None, batcher, batcher)
with open(__file__, "r") as f:
notes = f.read()
notes = str(sorted(args.__dict__.items(), key=lambda x:x[0])) + "\n" + notes
trainer.start_training(data, model, params,
[LossEvaluator(), SpanEvaluator(bound=[17], text_eval="squad")],
ModelDir(out), notes)
if __name__ == "__main__":
main() | 49.336364 | 122 | 0.66206 | import argparse
from datetime import datetime
from tensorflow.contrib.keras.python.keras.initializers import TruncatedNormal
from docqa import trainer
from docqa.data_processing.qa_training_data import ContextLenKey
from docqa.dataset import ClusteredBatcher
from docqa.encoder import DocumentAndQuestionEncoder, SingleSpanAnswerEncoder, DocumentAndQuestionEncoderWithSubstring
from docqa.evaluator import LossEvaluator, SpanEvaluator
from docqa.elmo.elmo import ElmoLayer
from docqa.elmo.lm_qa_models import AttentionWithElmo, SquadContextConcatSkip
from docqa.model_dir import ModelDir
from docqa.nn.attention import BiAttention, StaticAttentionSelf
from docqa.nn.embedder import FixedWordEmbedder, CharWordEmbedder, LearnedCharEmbedder, LearnedSubstringEmbedder, \
FilteredFixedWordEmbedder
from docqa.nn.layers import FullyConnected, ChainBiMapper, NullBiMapper, MaxPool, Conv1d, SequenceMapperSeq, \
VariationalDropoutLayer, ResidualLayer, ConcatWithProduct, MapperSeq, DropoutLayer
from docqa.nn.recurrent_layers import CudnnGru
from docqa.nn.similarity_layers import TriLinear
from docqa.nn.span_prediction import BoundsPredictor
from docqa.squad.squad_data import SquadCorpus, DocumentQaTrainingData
def main():
parser = argparse.ArgumentParser("Train our ELMo model on SQuAD")
parser.add_argument("output_dir")
parser.add_argument("--dim", type=int, default=90)
parser.add_argument("--l2", type=float, default=0)
parser.add_argument("--mode", choices=["input", "output", "both", "none"], default="both")
parser.add_argument("--top_layer_only", action="store_true")
parser.add_argument("--use_substring", type=str, default="None")
parser.add_argument("--sub_dim", type=int, default=50)
args = parser.parse_args()
print(args)
out = args.output_dir + "-" + datetime.now().strftime("%m%d-%H%M%S")
dim = args.dim
recurrent_layer = CudnnGru(dim, w_init=TruncatedNormal(stddev=0.05))
params = trainer.TrainParams(trainer.SerializableOptimizer("Adadelta", dict(learning_rate=1.0)),
ema=0.999, max_checkpoints_to_keep=2, async_encoding=10,
num_epochs=24, log_period=30, eval_period=1200, save_period=1200,
best_weights=("dev", "b17/text-f1"),
eval_samples=dict(dev=None, train=8000))
lm_reduce = MapperSeq(
ElmoLayer(args.l2, layer_norm=False, top_layer_only=args.top_layer_only),
DropoutLayer(0.5),
)
CharEmbedderCls, EncoderCls = (LearnedCharEmbedder, DocumentAndQuestionEncoder) if args.use_substring == "None" \
else (LearnedSubstringEmbedder, DocumentAndQuestionEncoderWithSubstring)
charEmbedder = CharEmbedderCls(word_size_th=14, char_th=20, char_dim=args.sub_dim, init_scale=0.05, force_cpu=True)
if args.use_substring != None:
charEmbedder._load_substring_vocab(args.use_substring)
final_sub_dim = 100
model = AttentionWithElmo(
encoder=EncoderCls(SingleSpanAnswerEncoder()),
lm_model=SquadContextConcatSkip(),
append_before_atten=(args.mode == "both" or args.mode == "output"),
append_embed=(args.mode == "both" or args.mode == "input"),
max_batch_size=128,
word_embed=FilteredFixedWordEmbedder(vec_name="glove.840B.300d", word_vec_init_scale=0, learn_unk=True, cpu=True),
char_embed=CharWordEmbedder(
charEmbedder,
MaxPool(Conv1d(final_sub_dim, 5, 0.8)),
shared_parameters=True
),
embed_mapper=SequenceMapperSeq(
VariationalDropoutLayer(0.8),
recurrent_layer,
VariationalDropoutLayer(0.8),
),
lm_reduce=None,
lm_reduce_shared=lm_reduce,
per_sentence=False,
memory_builder=NullBiMapper(),
attention=BiAttention(TriLinear(bias=True), True),
match_encoder=SequenceMapperSeq(FullyConnected(dim * 2, activation="relu"),
ResidualLayer(SequenceMapperSeq(
VariationalDropoutLayer(0.8),
recurrent_layer,
VariationalDropoutLayer(0.8),
StaticAttentionSelf(TriLinear(bias=True), ConcatWithProduct()),
FullyConnected(dim * 2, activation="relu"),
)),
VariationalDropoutLayer(0.8)),
predictor = BoundsPredictor(ChainBiMapper(
first_layer=recurrent_layer,
second_layer=recurrent_layer
))
)
batcher = ClusteredBatcher(45, ContextLenKey(), False, False)
data = DocumentQaTrainingData(SquadCorpus(), None, batcher, batcher)
with open(__file__, "r") as f:
notes = f.read()
notes = str(sorted(args.__dict__.items(), key=lambda x:x[0])) + "\n" + notes
trainer.start_training(data, model, params,
[LossEvaluator(), SpanEvaluator(bound=[17], text_eval="squad")],
ModelDir(out), notes)
if __name__ == "__main__":
main() | true | true |
f71badae1870e808949da29b629a3e7d4dc4fa3d | 4,753 | py | Python | i18n/i18n.py | AshleyFires/krux | 80de3ee0df5e7147dd32c10967cac7c3e6aecd09 | [
"MIT"
] | null | null | null | i18n/i18n.py | AshleyFires/krux | 80de3ee0df5e7147dd32c10967cac7c3e6aecd09 | [
"MIT"
] | null | null | null | i18n/i18n.py | AshleyFires/krux | 80de3ee0df5e7147dd32c10967cac7c3e6aecd09 | [
"MIT"
] | null | null | null | # The MIT License (MIT)
# Copyright (c) 2021 Tom J. Sun
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import json
from os import listdir, walk
from os.path import isfile, join
import re
import shutil
SRC_DIR = '../src'
TRANSLATION_FILES_DIR = 'translations'
def find_translation_slugs():
"""Searches the src directory for all 'slugs' that should be translated
by looking for matches of the pattern ( 'string' )
"""
slugs = {}
for (dirpath, _, filenames) in walk(SRC_DIR):
for filename in filenames:
if not filename.endswith('.py'):
continue
with open(join(dirpath, filename), 'r') as src_file:
contents = src_file.read()
for match in re.findall(r'\( \'(.+?)\' \)', contents):
slugs[match] = True
return slugs
def load_translations(translation_file):
"""Loads translations from the given file and returns them as a map"""
translations = json.load(translation_file)
for slug, translation in list(translations.items()):
del translations[slug]
translations[slug.replace('\n', '\\n')] = translation.replace('\n', '\\n')
return translations
def main():
"""Main handler"""
slugs = find_translation_slugs()
if sys.argv[1] == 'validate':
translation_filenames = [
f for f in listdir(TRANSLATION_FILES_DIR)
if isfile(join(TRANSLATION_FILES_DIR, f))
]
for translation_filename in translation_filenames:
print('Validating %s...' % translation_filename)
valid = True
with open(join(TRANSLATION_FILES_DIR, translation_filename), 'r') as translation_file:
translations = load_translations(translation_file)
for slug in slugs:
if slug not in translations or translations[slug] == '':
print('Missing translation for "%s"' % slug)
valid = False
for translation_slug in translations:
if translation_slug not in slugs:
print('Unnecessary translation for "%s"' % translation_slug)
valid = False
if valid:
print('OK')
elif sys.argv[1] == 'new':
locale = sys.argv[2]
translations = {}
for slug in slugs:
translations[slug.replace('\\n', '\n')] = ''
with open(join(TRANSLATION_FILES_DIR, '%s.json' % locale), 'w') as translation_file:
translation_file.write(json.dumps(translations, sort_keys=True, indent=4))
elif sys.argv[1] == 'translate':
locale = sys.argv[2]
output_dir = sys.argv[3]
with open(join(TRANSLATION_FILES_DIR, '%s.json' % locale), 'r') as translation_file:
translations = load_translations(translation_file)
for (dirpath, _, filenames) in walk(output_dir):
for filename in filenames:
if not filename.endswith('.py'):
continue
with open(join(dirpath, filename), 'r') as src_file:
contents = src_file.read()
for slug, translation in translations.items():
contents = contents.replace(
'( \'%s\' )' % slug,
'"""%s"""' % translation
)
with open(join(dirpath, filename + '.tmp'), 'w') as tmp_src_file:
tmp_src_file.write(contents)
shutil.move(join(dirpath, filename + '.tmp'), join(dirpath, filename))
if __name__ == '__main__':
main()
| 43.605505 | 98 | 0.606775 |
import sys
import json
from os import listdir, walk
from os.path import isfile, join
import re
import shutil
SRC_DIR = '../src'
TRANSLATION_FILES_DIR = 'translations'
def find_translation_slugs():
slugs = {}
for (dirpath, _, filenames) in walk(SRC_DIR):
for filename in filenames:
if not filename.endswith('.py'):
continue
with open(join(dirpath, filename), 'r') as src_file:
contents = src_file.read()
for match in re.findall(r'\( \'(.+?)\' \)', contents):
slugs[match] = True
return slugs
def load_translations(translation_file):
translations = json.load(translation_file)
for slug, translation in list(translations.items()):
del translations[slug]
translations[slug.replace('\n', '\\n')] = translation.replace('\n', '\\n')
return translations
def main():
slugs = find_translation_slugs()
if sys.argv[1] == 'validate':
translation_filenames = [
f for f in listdir(TRANSLATION_FILES_DIR)
if isfile(join(TRANSLATION_FILES_DIR, f))
]
for translation_filename in translation_filenames:
print('Validating %s...' % translation_filename)
valid = True
with open(join(TRANSLATION_FILES_DIR, translation_filename), 'r') as translation_file:
translations = load_translations(translation_file)
for slug in slugs:
if slug not in translations or translations[slug] == '':
print('Missing translation for "%s"' % slug)
valid = False
for translation_slug in translations:
if translation_slug not in slugs:
print('Unnecessary translation for "%s"' % translation_slug)
valid = False
if valid:
print('OK')
elif sys.argv[1] == 'new':
locale = sys.argv[2]
translations = {}
for slug in slugs:
translations[slug.replace('\\n', '\n')] = ''
with open(join(TRANSLATION_FILES_DIR, '%s.json' % locale), 'w') as translation_file:
translation_file.write(json.dumps(translations, sort_keys=True, indent=4))
elif sys.argv[1] == 'translate':
locale = sys.argv[2]
output_dir = sys.argv[3]
with open(join(TRANSLATION_FILES_DIR, '%s.json' % locale), 'r') as translation_file:
translations = load_translations(translation_file)
for (dirpath, _, filenames) in walk(output_dir):
for filename in filenames:
if not filename.endswith('.py'):
continue
with open(join(dirpath, filename), 'r') as src_file:
contents = src_file.read()
for slug, translation in translations.items():
contents = contents.replace(
'( \'%s\' )' % slug,
'"""%s"""' % translation
)
with open(join(dirpath, filename + '.tmp'), 'w') as tmp_src_file:
tmp_src_file.write(contents)
shutil.move(join(dirpath, filename + '.tmp'), join(dirpath, filename))
if __name__ == '__main__':
main()
| true | true |
f71badc6bf833dce168ea8abc5e6eedabacd9bc3 | 1,205 | py | Python | pari/article/urls.py | theju/pari | 318a4ffba08362e78253ded100a63f5b5c6eadf9 | [
"BSD-3-Clause"
] | null | null | null | pari/article/urls.py | theju/pari | 318a4ffba08362e78253ded100a63f5b5c6eadf9 | [
"BSD-3-Clause"
] | null | null | null | pari/article/urls.py | theju/pari | 318a4ffba08362e78253ded100a63f5b5c6eadf9 | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import patterns, url
from .views import (LocationDetail, CategoriesList, CategoryDetail, ArticleDetail,
ArticleList, KeywordDetail, AuthorDetail, ArchiveDetail, ArticleCarouselImageDetail)
urlpatterns = patterns('pari.article.views',
url(r'^categories/(?P<slug>.+)/$', CategoryDetail.as_view(), name='category-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^authors/(?P<slug>.+)/$', AuthorDetail.as_view(), name='author-detail'),
url(r'^articles/(?P<slug>.+)/$', ArticleDetail.as_view(), name='article-detail'),
url(r'^articles/(?P<slug>.+)/(?P<order>\d+)/$', ArticleCarouselImageDetail.as_view(), name='article-image-detail'),
url(r'^articles/$', ArticleList.as_view(), name='article-list'),
url(r'^topics/(?P<slug>.+)/$', AuthorDetail.as_view(), name='topic-detail'),
url(r'^locations/(?P<slug>.+)/$', LocationDetail.as_view(), name='location-detail'),
url(r'^keywords/(?P<slug>.+)/$', KeywordDetail.as_view(template_name="article/keyword_detail.html"), name='keyword-detail'),
url(r'^archive/(?P<year>\d{4})/(?P<month>\d+)/$', ArchiveDetail.as_view(), name='archive-detail'),
)
| 66.944444 | 128 | 0.674689 | from django.conf.urls import patterns, url
from .views import (LocationDetail, CategoriesList, CategoryDetail, ArticleDetail,
ArticleList, KeywordDetail, AuthorDetail, ArchiveDetail, ArticleCarouselImageDetail)
urlpatterns = patterns('pari.article.views',
url(r'^categories/(?P<slug>.+)/$', CategoryDetail.as_view(), name='category-detail'),
url(r'^categories/$', CategoriesList.as_view(), name='category-list'),
url(r'^authors/(?P<slug>.+)/$', AuthorDetail.as_view(), name='author-detail'),
url(r'^articles/(?P<slug>.+)/$', ArticleDetail.as_view(), name='article-detail'),
url(r'^articles/(?P<slug>.+)/(?P<order>\d+)/$', ArticleCarouselImageDetail.as_view(), name='article-image-detail'),
url(r'^articles/$', ArticleList.as_view(), name='article-list'),
url(r'^topics/(?P<slug>.+)/$', AuthorDetail.as_view(), name='topic-detail'),
url(r'^locations/(?P<slug>.+)/$', LocationDetail.as_view(), name='location-detail'),
url(r'^keywords/(?P<slug>.+)/$', KeywordDetail.as_view(template_name="article/keyword_detail.html"), name='keyword-detail'),
url(r'^archive/(?P<year>\d{4})/(?P<month>\d+)/$', ArchiveDetail.as_view(), name='archive-detail'),
)
| true | true |
f71bae99af72eaffd84e9a35dfa07be932c56df2 | 4,017 | py | Python | classes/Helpers.py | alexdevmotion/UnifiedEegLogger | 735b2734fc9cd5dd9e6b7148b4310a4267624e63 | [
"Apache-2.0"
] | 1 | 2018-06-07T03:47:31.000Z | 2018-06-07T03:47:31.000Z | classes/Helpers.py | alexdevmotion/unified-eeg-experiment-machine | 735b2734fc9cd5dd9e6b7148b4310a4267624e63 | [
"Apache-2.0"
] | null | null | null | classes/Helpers.py | alexdevmotion/unified-eeg-experiment-machine | 735b2734fc9cd5dd9e6b7148b4310a4267624e63 | [
"Apache-2.0"
] | null | null | null | from Tkinter import *
from PIL import ImageTk, Image
import tkMessageBox
import sys
import os
def getNoImagesInDirectory(dir):
return len(getImagesInDirectory(dir))
def getImagesInDirectory(dir):
files = os.listdir(dir)
images = []
for file in files:
if file.lower().endswith((".jpg", ".png", ".jpeg", ".gif")):
images.append(file)
return images
def representsInt(s):
try:
int(s)
return True
except ValueError:
return False
class FullScreenWindow:
def __init__(self, closingCallback):
self.closingCallback = closingCallback
self.tk = Tk()
self.frame = Frame(self.tk)
self.frame.pack()
self.state = False
self.tk.iconbitmap("misc/favicon.ico")
self.tk.title("EEG Unified Logger a.k.a. The Experiment Machine")
self.tk.minsize(width=600, height=400)
self.tk.bind("<F11>", self.toggle_fullscreen)
self.tk.bind("<Escape>", self.end_fullscreen)
self.tk.protocol("WM_DELETE_WINDOW", self.on_closing)
def toggle_fullscreen(self, event=None):
self.state = not self.state
self.tk.attributes("-fullscreen", self.state)
return "break"
def end_fullscreen(self, event=None):
self.state = False
self.tk.attributes("-fullscreen", False)
return "break"
def on_closing(self):
if tkMessageBox.askokcancel("Quit", "Are you sure you want to exit?"):
self.tk.destroy()
if self.closingCallback:
self.closingCallback()
sys.exit(0)
class ImageWindow:
def __init__(self, tk, dir, images, imageInterval, threadedTasks, crop):
self.tk = tk
self.dir = dir
self.images = images
self.imageInterval = imageInterval
self.threadedTasks = threadedTasks
self.crop = crop
self.curImageIndex = 0
self.window = Toplevel(self.tk)
self.window.attributes("-fullscreen", True)
self.window.focus_force()
self.window.bind("<Escape>", self.experimentStoppedByUser)
self.windowDestroyed = False
self.imagePanel = Label(self.window, image=None)
self.imagePanel.pack(side="bottom", fill="both", expand="yes")
def experimentStoppedByUser(self, event=None):
self.window.destroy()
self.windowDestroyed = True
self.threadedTasks.stopLoggingToFile()
def handleNextImage(self):
if not self.windowDestroyed:
try:
curImage = str(self.images[self.curImageIndex])
self.threadedTasks.setCurrentFileName(curImage)
self.displayImage(self.dir + "/" + curImage)
self.curImageIndex += 1
self.window.after(self.imageInterval * 1000, self.handleNextImage)
except IndexError:
self.experimentStoppedByUser()
def displayImage(self, path):
img = Image.open(path)
if self.crop:
img = self.cropAndResize(img, self.window.winfo_screenwidth(), self.window.winfo_screenheight())
photoimg = ImageTk.PhotoImage(img)
self.imagePanel.configure(image=photoimg)
self.imagePanel.image = photoimg
def cropAndResize(self, image, ideal_width, ideal_height):
width = image.size[0]
height = image.size[1]
aspect = width / float(height)
ideal_aspect = ideal_width / float(ideal_height)
if aspect > ideal_aspect:
# Then crop the left and right edges:
new_width = int(ideal_aspect * height)
offset = (width - new_width) / 2
resize = (offset, 0, width - offset, height)
else:
# ... crop the top and bottom:
new_height = int(width / ideal_aspect)
offset = (height - new_height) / 2
resize = (0, offset, width, height - offset)
return image.crop(resize).resize((ideal_width, ideal_height), Image.ANTIALIAS)
| 30.664122 | 108 | 0.61638 | from Tkinter import *
from PIL import ImageTk, Image
import tkMessageBox
import sys
import os
def getNoImagesInDirectory(dir):
return len(getImagesInDirectory(dir))
def getImagesInDirectory(dir):
files = os.listdir(dir)
images = []
for file in files:
if file.lower().endswith((".jpg", ".png", ".jpeg", ".gif")):
images.append(file)
return images
def representsInt(s):
try:
int(s)
return True
except ValueError:
return False
class FullScreenWindow:
def __init__(self, closingCallback):
self.closingCallback = closingCallback
self.tk = Tk()
self.frame = Frame(self.tk)
self.frame.pack()
self.state = False
self.tk.iconbitmap("misc/favicon.ico")
self.tk.title("EEG Unified Logger a.k.a. The Experiment Machine")
self.tk.minsize(width=600, height=400)
self.tk.bind("<F11>", self.toggle_fullscreen)
self.tk.bind("<Escape>", self.end_fullscreen)
self.tk.protocol("WM_DELETE_WINDOW", self.on_closing)
def toggle_fullscreen(self, event=None):
self.state = not self.state
self.tk.attributes("-fullscreen", self.state)
return "break"
def end_fullscreen(self, event=None):
self.state = False
self.tk.attributes("-fullscreen", False)
return "break"
def on_closing(self):
if tkMessageBox.askokcancel("Quit", "Are you sure you want to exit?"):
self.tk.destroy()
if self.closingCallback:
self.closingCallback()
sys.exit(0)
class ImageWindow:
def __init__(self, tk, dir, images, imageInterval, threadedTasks, crop):
self.tk = tk
self.dir = dir
self.images = images
self.imageInterval = imageInterval
self.threadedTasks = threadedTasks
self.crop = crop
self.curImageIndex = 0
self.window = Toplevel(self.tk)
self.window.attributes("-fullscreen", True)
self.window.focus_force()
self.window.bind("<Escape>", self.experimentStoppedByUser)
self.windowDestroyed = False
self.imagePanel = Label(self.window, image=None)
self.imagePanel.pack(side="bottom", fill="both", expand="yes")
def experimentStoppedByUser(self, event=None):
self.window.destroy()
self.windowDestroyed = True
self.threadedTasks.stopLoggingToFile()
def handleNextImage(self):
if not self.windowDestroyed:
try:
curImage = str(self.images[self.curImageIndex])
self.threadedTasks.setCurrentFileName(curImage)
self.displayImage(self.dir + "/" + curImage)
self.curImageIndex += 1
self.window.after(self.imageInterval * 1000, self.handleNextImage)
except IndexError:
self.experimentStoppedByUser()
def displayImage(self, path):
img = Image.open(path)
if self.crop:
img = self.cropAndResize(img, self.window.winfo_screenwidth(), self.window.winfo_screenheight())
photoimg = ImageTk.PhotoImage(img)
self.imagePanel.configure(image=photoimg)
self.imagePanel.image = photoimg
def cropAndResize(self, image, ideal_width, ideal_height):
width = image.size[0]
height = image.size[1]
aspect = width / float(height)
ideal_aspect = ideal_width / float(ideal_height)
if aspect > ideal_aspect:
new_width = int(ideal_aspect * height)
offset = (width - new_width) / 2
resize = (offset, 0, width - offset, height)
else:
new_height = int(width / ideal_aspect)
offset = (height - new_height) / 2
resize = (0, offset, width, height - offset)
return image.crop(resize).resize((ideal_width, ideal_height), Image.ANTIALIAS)
| true | true |
f71baee95ce425491965abf613df86bac3425409 | 3,126 | py | Python | acm/uri/1455-icpc-finals.py | neizod/problems | 180aaf7d0ecfc3d0dd5f1d4345a7a4d83b1b884a | [
"MIT"
] | 1 | 2015-10-17T11:15:42.000Z | 2015-10-17T11:15:42.000Z | acm/uri/1455-icpc-finals.py | neizod/problems | 180aaf7d0ecfc3d0dd5f1d4345a7a4d83b1b884a | [
"MIT"
] | null | null | null | acm/uri/1455-icpc-finals.py | neizod/problems | 180aaf7d0ecfc3d0dd5f1d4345a7a4d83b1b884a | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import random
from itertools import count
from collections import namedtuple
Point = namedtuple('Point', 'x y')
Point.from_spec = lambda st: Point(*[float(si) for si in st.split()])
Circle = namedtuple('Circle', 'c r')
Circle.__new__.__defaults__ = (Point(0, 0), 0)
Circle.__contains__ = lambda s, p: s.r > 0 and sq_dist(p, s.c) <= s.r**2
Circle.from_arc = lambda *ps: ( circle_from_1_points(*ps) if len(ps) == 1 else
circle_from_2_points(*ps) if len(ps) == 2 else
circle_decide_3_points(*ps) if len(ps) == 3 else
NotImplemented )
sq_dist = lambda p, q=Point(0, 0): (p.x-q.x)**2 + (p.y-q.y)**2
det = lambda u, v, w: u.x*v.y + v.x*w.y + w.x*u.y - u.x*w.y - v.x*u.y - w.x*v.y
detx = lambda u, v, w: ( sq_dist(u)*v.y + sq_dist(v)*w.y + sq_dist(w)*u.y
- sq_dist(u)*w.y - sq_dist(v)*u.y - sq_dist(w)*v.y )
dety = lambda u, v, w: ( u.x*sq_dist(v) + v.x*sq_dist(w) + w.x*sq_dist(u)
- u.x*sq_dist(w) - v.x*sq_dist(u) - w.x*sq_dist(v) )
dets = lambda u, v, w: ( u.x*v.y*sq_dist(w) + v.x*w.y*sq_dist(u) + w.x*u.y*sq_dist(v)
- u.x*w.y*sq_dist(v) - v.x*u.y*sq_dist(w) - w.x*v.y*sq_dist(u) )
def shuffled(points):
ls = list(points)
random.shuffle(ls)
return ls
# XXX please note that though the problem statement says 2 <= n <= 100,
# the actual test does contain case of n == 1 too. WTF!!!
def circle_from_1_points(p1):
return Circle(p1, 0)
def circle_from_2_points(p1, p2):
dx = (p1.x + p2.x)/2
dy = (p1.y + p2.y)/2
c = Point(dx, dy)
r = sq_dist(p1, p2)**.5/2
return Circle(c, r)
def circle_from_3_points(p1, p2, p3):
a = det(p1, p2, p3)
b = dets(p1, p2, p3)
sx = detx(p1, p2, p3)
sy = dety(p1, p2, p3)
c = Point(sx/a/2, sy/a/2)
return Circle(c, (b/a + sq_dist(c))**.5)
def circle_decide_3_points(p1, p2, p3):
ps = {p1, p2, p3}
for p in ps:
circle = circle_from_2_points(*ps-{p})
if p in circle:
return circle
return circle_from_3_points(p1, p2, p3)
def mincircle2p(points, p1, p2):
circle = Circle.from_arc(p1, p2)
for i, p in enumerate(points):
if p not in circle:
circle = Circle.from_arc(p1, p2, p)
return circle
def mincircle1p(points, p1):
circle = Circle.from_arc(p1)
ps = shuffled(points)
for i, p in enumerate(ps):
if p not in circle:
circle = mincircle2p(set(ps[:i]), p1, p)
return circle
def mincircle(points):
circle = Circle()
ps = shuffled(points)
for i, p in enumerate(ps):
if p not in circle:
circle = mincircle1p(set(ps[:i]), p)
return circle
def main():
for t in count(1):
n = int(input())
if n == 0:
break
points = [Point.from_spec(input()) for _ in range(n)]
ans = mincircle(points)
print('Instancia {}'.format(t))
print('{:.2f} {:.2f} {:.2f}'.format(ans.c.x, ans.c.y, ans.r))
print()
if __name__ == '__main__':
main()
| 28.944444 | 87 | 0.555662 |
import random
from itertools import count
from collections import namedtuple
Point = namedtuple('Point', 'x y')
Point.from_spec = lambda st: Point(*[float(si) for si in st.split()])
Circle = namedtuple('Circle', 'c r')
Circle.__new__.__defaults__ = (Point(0, 0), 0)
Circle.__contains__ = lambda s, p: s.r > 0 and sq_dist(p, s.c) <= s.r**2
Circle.from_arc = lambda *ps: ( circle_from_1_points(*ps) if len(ps) == 1 else
circle_from_2_points(*ps) if len(ps) == 2 else
circle_decide_3_points(*ps) if len(ps) == 3 else
NotImplemented )
sq_dist = lambda p, q=Point(0, 0): (p.x-q.x)**2 + (p.y-q.y)**2
det = lambda u, v, w: u.x*v.y + v.x*w.y + w.x*u.y - u.x*w.y - v.x*u.y - w.x*v.y
detx = lambda u, v, w: ( sq_dist(u)*v.y + sq_dist(v)*w.y + sq_dist(w)*u.y
- sq_dist(u)*w.y - sq_dist(v)*u.y - sq_dist(w)*v.y )
dety = lambda u, v, w: ( u.x*sq_dist(v) + v.x*sq_dist(w) + w.x*sq_dist(u)
- u.x*sq_dist(w) - v.x*sq_dist(u) - w.x*sq_dist(v) )
dets = lambda u, v, w: ( u.x*v.y*sq_dist(w) + v.x*w.y*sq_dist(u) + w.x*u.y*sq_dist(v)
- u.x*w.y*sq_dist(v) - v.x*u.y*sq_dist(w) - w.x*v.y*sq_dist(u) )
def shuffled(points):
ls = list(points)
random.shuffle(ls)
return ls
def circle_from_1_points(p1):
return Circle(p1, 0)
def circle_from_2_points(p1, p2):
dx = (p1.x + p2.x)/2
dy = (p1.y + p2.y)/2
c = Point(dx, dy)
r = sq_dist(p1, p2)**.5/2
return Circle(c, r)
def circle_from_3_points(p1, p2, p3):
a = det(p1, p2, p3)
b = dets(p1, p2, p3)
sx = detx(p1, p2, p3)
sy = dety(p1, p2, p3)
c = Point(sx/a/2, sy/a/2)
return Circle(c, (b/a + sq_dist(c))**.5)
def circle_decide_3_points(p1, p2, p3):
ps = {p1, p2, p3}
for p in ps:
circle = circle_from_2_points(*ps-{p})
if p in circle:
return circle
return circle_from_3_points(p1, p2, p3)
def mincircle2p(points, p1, p2):
circle = Circle.from_arc(p1, p2)
for i, p in enumerate(points):
if p not in circle:
circle = Circle.from_arc(p1, p2, p)
return circle
def mincircle1p(points, p1):
circle = Circle.from_arc(p1)
ps = shuffled(points)
for i, p in enumerate(ps):
if p not in circle:
circle = mincircle2p(set(ps[:i]), p1, p)
return circle
def mincircle(points):
circle = Circle()
ps = shuffled(points)
for i, p in enumerate(ps):
if p not in circle:
circle = mincircle1p(set(ps[:i]), p)
return circle
def main():
for t in count(1):
n = int(input())
if n == 0:
break
points = [Point.from_spec(input()) for _ in range(n)]
ans = mincircle(points)
print('Instancia {}'.format(t))
print('{:.2f} {:.2f} {:.2f}'.format(ans.c.x, ans.c.y, ans.r))
print()
if __name__ == '__main__':
main()
| true | true |
f71baf805de10ecd0c891aeb9bfc3b748a4b7980 | 4,340 | py | Python | openstack_dashboard/dashboards/project/instances/workflows/resize_instance.py | CplusShen/aurora-horizon | 8df16b3b87097d5a19bae3752d4b341ac64bda75 | [
"Apache-2.0"
] | 1 | 2020-04-12T19:21:18.000Z | 2020-04-12T19:21:18.000Z | openstack_dashboard/dashboards/project/instances/workflows/resize_instance.py | CplusShen/aurora-horizon | 8df16b3b87097d5a19bae3752d4b341ac64bda75 | [
"Apache-2.0"
] | 12 | 2022-03-22T07:28:29.000Z | 2022-03-22T07:29:55.000Z | openstack_dashboard/dashboards/project/instances/workflows/resize_instance.py | CplusShen/aurora-horizon | 8df16b3b87097d5a19bae3752d4b341ac64bda75 | [
"Apache-2.0"
] | 2 | 2019-01-17T06:06:00.000Z | 2019-08-07T02:21:07.000Z | # Copyright 2013 CentRin Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables # noqa
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances \
import utils as instance_utils
from openstack_dashboard.dashboards.project.instances.workflows \
import create_instance
class SetFlavorChoiceAction(workflows.Action):
old_flavor_id = forms.CharField(required=False, widget=forms.HiddenInput())
old_flavor_name = forms.CharField(
label=_("Old Flavor"),
widget=forms.TextInput(attrs={'readonly': 'readonly'}),
required=False,
)
flavor = forms.ThemableChoiceField(
label=_("New Flavor"),
help_text=_("Choose the flavor to launch."))
class Meta(object):
name = _("Flavor Choice")
slug = 'flavor_choice'
help_text_template = ("project/instances/"
"_flavors_and_quotas.html")
def populate_flavor_choices(self, request, context):
old_flavor_id = context.get('old_flavor_id')
flavors = context.get('flavors').values()
# Remove current flavor from the list of flavor choices
flavors = [flavor for flavor in flavors if flavor.id != old_flavor_id]
if flavors:
if len(flavors) > 1:
flavors = instance_utils.sort_flavor_list(request, flavors)
else:
flavor = flavors[0]
flavors = [(flavor.id, flavor.name)]
flavors.insert(0, ("", _("Select a New Flavor")))
else:
flavors.insert(0, ("", _("No flavors available")))
return flavors
def get_help_text(self, extra_context=None):
extra = {} if extra_context is None else dict(extra_context)
try:
extra['usages'] = api.nova.tenant_absolute_limits(self.request,
reserved=True)
extra['usages_json'] = json.dumps(extra['usages'])
flavors = json.dumps([f._info for f in
instance_utils.flavor_list(self.request)])
extra['flavors'] = flavors
extra['resize_instance'] = True
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve quota information."))
return super(SetFlavorChoiceAction, self).get_help_text(extra)
class SetFlavorChoice(workflows.Step):
action_class = SetFlavorChoiceAction
depends_on = ("instance_id", "name")
contributes = ("old_flavor_id", "old_flavor_name", "flavors", "flavor")
class ResizeInstance(workflows.Workflow):
slug = "resize_instance"
name = _("Resize Instance")
finalize_button_name = _("Resize")
success_message = _('Request for resizing of instance "%s" '
'has been submitted.')
failure_message = _('Unable to resize instance "%s".')
success_url = "horizon:project:instances:index"
default_steps = (SetFlavorChoice, create_instance.SetAdvanced)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown instance')
@sensitive_variables('context')
def handle(self, request, context):
instance_id = context.get('instance_id', None)
flavor = context.get('flavor', None)
disk_config = context.get('disk_config', None)
try:
api.nova.server_resize(request, instance_id, flavor, disk_config)
return True
except Exception:
exceptions.handle(request)
return False
| 38.75 | 79 | 0.65 |
import json
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances \
import utils as instance_utils
from openstack_dashboard.dashboards.project.instances.workflows \
import create_instance
class SetFlavorChoiceAction(workflows.Action):
old_flavor_id = forms.CharField(required=False, widget=forms.HiddenInput())
old_flavor_name = forms.CharField(
label=_("Old Flavor"),
widget=forms.TextInput(attrs={'readonly': 'readonly'}),
required=False,
)
flavor = forms.ThemableChoiceField(
label=_("New Flavor"),
help_text=_("Choose the flavor to launch."))
class Meta(object):
name = _("Flavor Choice")
slug = 'flavor_choice'
help_text_template = ("project/instances/"
"_flavors_and_quotas.html")
def populate_flavor_choices(self, request, context):
old_flavor_id = context.get('old_flavor_id')
flavors = context.get('flavors').values()
flavors = [flavor for flavor in flavors if flavor.id != old_flavor_id]
if flavors:
if len(flavors) > 1:
flavors = instance_utils.sort_flavor_list(request, flavors)
else:
flavor = flavors[0]
flavors = [(flavor.id, flavor.name)]
flavors.insert(0, ("", _("Select a New Flavor")))
else:
flavors.insert(0, ("", _("No flavors available")))
return flavors
def get_help_text(self, extra_context=None):
extra = {} if extra_context is None else dict(extra_context)
try:
extra['usages'] = api.nova.tenant_absolute_limits(self.request,
reserved=True)
extra['usages_json'] = json.dumps(extra['usages'])
flavors = json.dumps([f._info for f in
instance_utils.flavor_list(self.request)])
extra['flavors'] = flavors
extra['resize_instance'] = True
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve quota information."))
return super(SetFlavorChoiceAction, self).get_help_text(extra)
class SetFlavorChoice(workflows.Step):
action_class = SetFlavorChoiceAction
depends_on = ("instance_id", "name")
contributes = ("old_flavor_id", "old_flavor_name", "flavors", "flavor")
class ResizeInstance(workflows.Workflow):
slug = "resize_instance"
name = _("Resize Instance")
finalize_button_name = _("Resize")
success_message = _('Request for resizing of instance "%s" '
'has been submitted.')
failure_message = _('Unable to resize instance "%s".')
success_url = "horizon:project:instances:index"
default_steps = (SetFlavorChoice, create_instance.SetAdvanced)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown instance')
@sensitive_variables('context')
def handle(self, request, context):
instance_id = context.get('instance_id', None)
flavor = context.get('flavor', None)
disk_config = context.get('disk_config', None)
try:
api.nova.server_resize(request, instance_id, flavor, disk_config)
return True
except Exception:
exceptions.handle(request)
return False
| true | true |
f71baf91c9ac2a1be7c1a45f7c74c72209692386 | 207 | py | Python | tests/test_MBus_connect.py | droid4control/python-mbus | 8e26c1847c06e57bc0e878ef3d6610dc9ba913b4 | [
"BSD-3-Clause"
] | 23 | 2015-05-19T15:57:40.000Z | 2021-03-18T11:33:22.000Z | tests/test_MBus_connect.py | Sensenode/python-mbus | 9b598ada5b3da17bb513cf78e5b4a8f2a3f9a1f1 | [
"BSD-3-Clause"
] | 14 | 2015-09-20T20:26:22.000Z | 2020-05-13T16:39:15.000Z | tests/test_MBus_connect.py | neurobat/python-mbus | 8e26c1847c06e57bc0e878ef3d6610dc9ba913b4 | [
"BSD-3-Clause"
] | 22 | 2015-07-27T08:50:44.000Z | 2022-03-19T01:17:18.000Z | import sys
sys.path.append('../python-mbus')
import pytest
from mbus import MBus
@pytest.fixture
def mbus_tcp():
return MBus.MBus(host="127.0.0.1")
def test_connect(mbus_tcp):
mbus_tcp.connect()
| 14.785714 | 38 | 0.714976 | import sys
sys.path.append('../python-mbus')
import pytest
from mbus import MBus
@pytest.fixture
def mbus_tcp():
return MBus.MBus(host="127.0.0.1")
def test_connect(mbus_tcp):
mbus_tcp.connect()
| true | true |
f71bafa9a01675524b1c846aa6c881e2336cfcb0 | 609 | py | Python | utils.py | Duy-Vu/stock-network | 3e84cfc581cd07001e86c20101c91c2f8910deb2 | [
"MIT"
] | null | null | null | utils.py | Duy-Vu/stock-network | 3e84cfc581cd07001e86c20101c91c2f8910deb2 | [
"MIT"
] | null | null | null | utils.py | Duy-Vu/stock-network | 3e84cfc581cd07001e86c20101c91c2f8910deb2 | [
"MIT"
] | null | null | null | import numpy as np
def clean_data(df, out_df_dir=""):
df.dropna(axis=1, inplace=True)
if out_df_dir:
df.to_csv(out_df_dir)
return df
# Calculate log change of daily price
def log_change(series):
return np.log(series[1] / series[0])
# Calculate correaltion
def calculate_cor(df, start, end):
return df[start:end].rolling(
window=2,
min_periods=2
).apply(
log_change,
raw=True
).corr(method="pearson")
# Calculate profit
def take_profit(price, start, end):
return price.iloc[end]/price.iloc[start] - 1 | 20.3 | 48 | 0.62069 | import numpy as np
def clean_data(df, out_df_dir=""):
df.dropna(axis=1, inplace=True)
if out_df_dir:
df.to_csv(out_df_dir)
return df
def log_change(series):
return np.log(series[1] / series[0])
def calculate_cor(df, start, end):
return df[start:end].rolling(
window=2,
min_periods=2
).apply(
log_change,
raw=True
).corr(method="pearson")
def take_profit(price, start, end):
return price.iloc[end]/price.iloc[start] - 1 | true | true |
f71bb0f290045da4932989588f794fc5c7a82389 | 12,982 | py | Python | dateparser/data/numeral_translation_data/nb.py | bazingarj/dateparser | 48c4563fb7f6ce685fbd6d27e9e83257521d2203 | [
"BSD-3-Clause"
] | 8 | 2019-11-15T21:00:15.000Z | 2021-12-21T22:09:42.000Z | dateparser/data/numeral_translation_data/nb.py | bazingarj/dateparser | 48c4563fb7f6ce685fbd6d27e9e83257521d2203 | [
"BSD-3-Clause"
] | 9 | 2020-06-05T21:28:57.000Z | 2022-02-12T12:30:39.000Z | dateparser/data/numeral_translation_data/nb.py | bazingarj/dateparser | 48c4563fb7f6ce685fbd6d27e9e83257521d2203 | [
"BSD-3-Clause"
] | 21 | 2019-03-11T04:25:23.000Z | 2022-02-03T08:54:33.000Z | # -*- coding: utf-8 -*-
info = {
"%%and-small": {
"(0, 99)": "og =%%spellout-cardinal-reale=;",
"(100, 'inf')": "=%%spellout-cardinal-reale=;"
},
"%%and-small-f": {
"(0, 99)": "og =%spellout-cardinal-feminine=;",
"(100, 'inf')": "=%spellout-cardinal-feminine=;"
},
"%%and-small-n": {
"(0, 99)": "og =%spellout-cardinal-neuter=;",
"(100, 'inf')": "=%spellout-cardinal-neuter=;"
},
"%%ord-fem-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-feminine=;"
},
"%%ord-fem-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-feminine=;"
},
"%%ord-fem-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-feminine=;"
},
"%%ord-fem-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-feminine=;"
},
"%%ord-masc-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-masculine=;"
},
"%%ord-masc-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-masculine=;"
},
"%%ord-masc-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-masculine=;"
},
"%%ord-masc-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-masculine=;"
},
"%%ord-neut-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-neuter=;"
},
"%%ord-neut-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-neuter=;"
},
"%%ord-neut-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-neuter=;"
},
"%%ord-neut-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-neuter=;"
},
"%%ord-plural-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-plural=;"
},
"%%ord-plural-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-plural=;"
},
"%%ord-plural-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-plural=;"
},
"%%ord-plural-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-plural=;"
},
"%%spellout-cardinal-reale": {
"0": "null;",
"1": "én;",
"2": "to;",
"3": "tre;",
"4": "fire;",
"5": "fem;",
"6": "seks;",
"7": "sju;",
"8": "åtte;",
"9": "ni;",
"10": "ti;",
"11": "elleve;",
"12": "tolv;",
"13": "tretten;",
"14": "fjorten;",
"15": "femten;",
"16": "seksten;",
"17": "sytten;",
"18": "atten;",
"19": "nitten;",
"(20, 29)": "tjue[>>];",
"(30, 39)": "tretti[>>];",
"(40, 49)": "førti[>>];",
"(50, 59)": "femti[>>];",
"(60, 69)": "seksti[>>];",
"(70, 79)": "sytti[>>];",
"(80, 89)": "åtti[>>];",
"(90, 99)": "nitti[>>];",
"(100, 199)": "hundre[ og >>];",
"(200, 999)": "<%spellout-cardinal-neuter< hundre[ og >>];",
"(1000, 1999)": "tusen[ >%%and-small>];",
"(2000, 999999)": "<%spellout-cardinal-neuter< tusen[ >%%and-small>];",
"(1000000, 1999999)": "én million[ >>];",
"(2000000, 999999999)": "<< millioner[ >>];",
"(1000000000, 1999999999)": "én milliard[ >>];",
"(2000000000, 999999999999)": "<< milliarder[ >>];",
"(1000000000000, 1999999999999)": "én billion[ >>];",
"(2000000000000, 999999999999999)": "<< billioner[ >>];",
"(1000000000000000, 1999999999999999)": "én billiard[ >>];",
"(2000000000000000, 999999999999999999)": "<< billiarder[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-cardinal-feminine": {
"0": "null;",
"1": "ei;",
"(2, 99)": "=%%spellout-cardinal-reale=;",
"(100, 199)": "hundre[ og >>];",
"(200, 999)": "<%spellout-cardinal-neuter< hundre[ og >>];",
"(1000, 1999)": "tusen[ >%%and-small-f>];",
"(2000, 999999)": "<%spellout-cardinal-neuter< tusen[ >%%and-small-f>];",
"(1000000, 1999999)": "én million[ >>];",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< millioner[ >>];",
"(1000000000, 1999999999)": "én milliard[ >>];",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliarder[ >>];",
"(1000000000000, 1999999999999)": "én billion[ >>];",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billioner[ >>];",
"(1000000000000000, 1999999999999999)": "én billiard[ >>];",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiarder[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-cardinal-masculine": {
"(0, 'inf')": "=%%spellout-cardinal-reale=;"
},
"%spellout-cardinal-neuter": {
"0": "null;",
"1": "ett;",
"(2, 19)": "=%%spellout-cardinal-reale=;",
"(20, 29)": "tjue[>>];",
"(30, 39)": "tretti[>>];",
"(40, 49)": "førti[>>];",
"(50, 59)": "femti[>>];",
"(60, 69)": "seksti[>>];",
"(70, 79)": "sytti[>>];",
"(80, 89)": "åtti[>>];",
"(90, 99)": "nitti[>>];",
"(100, 199)": "hundre[ og >>];",
"(200, 999)": "<%spellout-cardinal-neuter< hundre[ og >>];",
"(1000, 1999)": "tusen[ >%%and-small-n>];",
"(2000, 999999)": "<%spellout-cardinal-neuter< tusen[ >%%and-small-n>];",
"(1000000, 1999999)": "én million[ >>];",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< millioner[ >>];",
"(1000000000, 1999999999)": "én milliard[ >>];",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliarder[ >>];",
"(1000000000000, 1999999999999)": "én billion[ >>];",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billioner[ >>];",
"(1000000000000000, 1999999999999999)": "én billiard[ >>];",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiarder[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-numbering": {
"(0, 'inf')": "=%%spellout-cardinal-reale=;"
},
"%spellout-numbering-year": {
"(0, 9999)": "=%spellout-numbering=;",
"(10000, 'inf')": "=%spellout-numbering=;"
},
"%spellout-ordinal-feminine": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-fem-nde>;",
"(30, 39)": "tretti>%%ord-fem-nde>;",
"(40, 49)": "førti>%%ord-fem-nde>;",
"(50, 59)": "femti>%%ord-fem-nde>;",
"(60, 69)": "seksti>%%ord-fem-nde>;",
"(70, 79)": "sytti>%%ord-fem-nde>;",
"(80, 89)": "åtti>%%ord-fem-nde>;",
"(90, 99)": "nitti>%%ord-fem-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-fem-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-fem-de>;",
"(1000000, 1999999)": "én million>%%ord-fem-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-fem-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-fem-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-fem-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-fem-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-fem-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-fem-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-fem-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
},
"%spellout-ordinal-masculine": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-masc-nde>;",
"(30, 39)": "tretti>%%ord-masc-nde>;",
"(40, 49)": "førti>%%ord-masc-nde>;",
"(50, 59)": "femti>%%ord-masc-nde>;",
"(60, 69)": "seksti>%%ord-masc-nde>;",
"(70, 79)": "sytti>%%ord-masc-nde>;",
"(80, 89)": "åtti>%%ord-masc-nde>;",
"(90, 99)": "nitti>%%ord-masc-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-masc-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-masc-de>;",
"(1000000, 1999999)": "én million>%%ord-masc-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-masc-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-masc-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-masc-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-masc-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-masc-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-masc-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-masc-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
},
"%spellout-ordinal-neuter": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-neut-nde>;",
"(30, 39)": "tretti>%%ord-neut-nde>;",
"(40, 49)": "førti>%%ord-neut-nde>;",
"(50, 59)": "femti>%%ord-neut-nde>;",
"(60, 69)": "seksti>%%ord-neut-nde>;",
"(70, 79)": "sytti>%%ord-neut-nde>;",
"(80, 89)": "åtti>%%ord-neut-nde>;",
"(90, 99)": "nitti>%%ord-neut-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-neut-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-neut-de>;",
"(1000000, 1999999)": "én million>%%ord-neut-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-neut-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-neut-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-neut-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-neut-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-neut-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-neut-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-neut-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
},
"%spellout-ordinal-plural": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-plural-nde>;",
"(30, 39)": "tretti>%%ord-plural-nde>;",
"(40, 49)": "førti>%%ord-plural-nde>;",
"(50, 59)": "femti>%%ord-plural-nde>;",
"(60, 69)": "seksti>%%ord-plural-nde>;",
"(70, 79)": "sytti>%%ord-plural-nde>;",
"(80, 89)": "åtti>%%ord-plural-nde>;",
"(90, 99)": "nitti>%%ord-plural-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-plural-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-plural-de>;",
"(1000000, 1999999)": "én million>%%ord-plural-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-plural-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-plural-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-plural-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-plural-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-plural-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-plural-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-plural-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
}
} | 41.082278 | 109 | 0.470498 |
info = {
"%%and-small": {
"(0, 99)": "og =%%spellout-cardinal-reale=;",
"(100, 'inf')": "=%%spellout-cardinal-reale=;"
},
"%%and-small-f": {
"(0, 99)": "og =%spellout-cardinal-feminine=;",
"(100, 'inf')": "=%spellout-cardinal-feminine=;"
},
"%%and-small-n": {
"(0, 99)": "og =%spellout-cardinal-neuter=;",
"(100, 'inf')": "=%spellout-cardinal-neuter=;"
},
"%%ord-fem-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-feminine=;"
},
"%%ord-fem-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-feminine=;"
},
"%%ord-fem-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-feminine=;"
},
"%%ord-fem-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-feminine=;"
},
"%%ord-masc-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-masculine=;"
},
"%%ord-masc-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-masculine=;"
},
"%%ord-masc-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-masculine=;"
},
"%%ord-masc-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-masculine=;"
},
"%%ord-neut-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-neuter=;"
},
"%%ord-neut-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-neuter=;"
},
"%%ord-neut-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-neuter=;"
},
"%%ord-neut-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-neuter=;"
},
"%%ord-plural-de": {
"0": "de;",
"(1, 'inf')": "' =%spellout-ordinal-plural=;"
},
"%%ord-plural-nde": {
"0": "ende;",
"(1, 'inf')": "=%spellout-ordinal-plural=;"
},
"%%ord-plural-te": {
"0": "te;",
"(1, 'inf')": "' =%spellout-ordinal-plural=;"
},
"%%ord-plural-teer": {
"0": "te;",
"(1, 'inf')": "er =%spellout-ordinal-plural=;"
},
"%%spellout-cardinal-reale": {
"0": "null;",
"1": "én;",
"2": "to;",
"3": "tre;",
"4": "fire;",
"5": "fem;",
"6": "seks;",
"7": "sju;",
"8": "åtte;",
"9": "ni;",
"10": "ti;",
"11": "elleve;",
"12": "tolv;",
"13": "tretten;",
"14": "fjorten;",
"15": "femten;",
"16": "seksten;",
"17": "sytten;",
"18": "atten;",
"19": "nitten;",
"(20, 29)": "tjue[>>];",
"(30, 39)": "tretti[>>];",
"(40, 49)": "førti[>>];",
"(50, 59)": "femti[>>];",
"(60, 69)": "seksti[>>];",
"(70, 79)": "sytti[>>];",
"(80, 89)": "åtti[>>];",
"(90, 99)": "nitti[>>];",
"(100, 199)": "hundre[ og >>];",
"(200, 999)": "<%spellout-cardinal-neuter< hundre[ og >>];",
"(1000, 1999)": "tusen[ >%%and-small>];",
"(2000, 999999)": "<%spellout-cardinal-neuter< tusen[ >%%and-small>];",
"(1000000, 1999999)": "én million[ >>];",
"(2000000, 999999999)": "<< millioner[ >>];",
"(1000000000, 1999999999)": "én milliard[ >>];",
"(2000000000, 999999999999)": "<< milliarder[ >>];",
"(1000000000000, 1999999999999)": "én billion[ >>];",
"(2000000000000, 999999999999999)": "<< billioner[ >>];",
"(1000000000000000, 1999999999999999)": "én billiard[ >>];",
"(2000000000000000, 999999999999999999)": "<< billiarder[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-cardinal-feminine": {
"0": "null;",
"1": "ei;",
"(2, 99)": "=%%spellout-cardinal-reale=;",
"(100, 199)": "hundre[ og >>];",
"(200, 999)": "<%spellout-cardinal-neuter< hundre[ og >>];",
"(1000, 1999)": "tusen[ >%%and-small-f>];",
"(2000, 999999)": "<%spellout-cardinal-neuter< tusen[ >%%and-small-f>];",
"(1000000, 1999999)": "én million[ >>];",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< millioner[ >>];",
"(1000000000, 1999999999)": "én milliard[ >>];",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliarder[ >>];",
"(1000000000000, 1999999999999)": "én billion[ >>];",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billioner[ >>];",
"(1000000000000000, 1999999999999999)": "én billiard[ >>];",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiarder[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-cardinal-masculine": {
"(0, 'inf')": "=%%spellout-cardinal-reale=;"
},
"%spellout-cardinal-neuter": {
"0": "null;",
"1": "ett;",
"(2, 19)": "=%%spellout-cardinal-reale=;",
"(20, 29)": "tjue[>>];",
"(30, 39)": "tretti[>>];",
"(40, 49)": "førti[>>];",
"(50, 59)": "femti[>>];",
"(60, 69)": "seksti[>>];",
"(70, 79)": "sytti[>>];",
"(80, 89)": "åtti[>>];",
"(90, 99)": "nitti[>>];",
"(100, 199)": "hundre[ og >>];",
"(200, 999)": "<%spellout-cardinal-neuter< hundre[ og >>];",
"(1000, 1999)": "tusen[ >%%and-small-n>];",
"(2000, 999999)": "<%spellout-cardinal-neuter< tusen[ >%%and-small-n>];",
"(1000000, 1999999)": "én million[ >>];",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< millioner[ >>];",
"(1000000000, 1999999999)": "én milliard[ >>];",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliarder[ >>];",
"(1000000000000, 1999999999999)": "én billion[ >>];",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billioner[ >>];",
"(1000000000000000, 1999999999999999)": "én billiard[ >>];",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiarder[ >>];",
"(1000000000000000000, 'inf')": "=#,##0=;"
},
"%spellout-numbering": {
"(0, 'inf')": "=%%spellout-cardinal-reale=;"
},
"%spellout-numbering-year": {
"(0, 9999)": "=%spellout-numbering=;",
"(10000, 'inf')": "=%spellout-numbering=;"
},
"%spellout-ordinal-feminine": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-fem-nde>;",
"(30, 39)": "tretti>%%ord-fem-nde>;",
"(40, 49)": "førti>%%ord-fem-nde>;",
"(50, 59)": "femti>%%ord-fem-nde>;",
"(60, 69)": "seksti>%%ord-fem-nde>;",
"(70, 79)": "sytti>%%ord-fem-nde>;",
"(80, 89)": "åtti>%%ord-fem-nde>;",
"(90, 99)": "nitti>%%ord-fem-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-fem-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-fem-de>;",
"(1000000, 1999999)": "én million>%%ord-fem-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-fem-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-fem-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-fem-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-fem-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-fem-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-fem-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-fem-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
},
"%spellout-ordinal-masculine": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-masc-nde>;",
"(30, 39)": "tretti>%%ord-masc-nde>;",
"(40, 49)": "førti>%%ord-masc-nde>;",
"(50, 59)": "femti>%%ord-masc-nde>;",
"(60, 69)": "seksti>%%ord-masc-nde>;",
"(70, 79)": "sytti>%%ord-masc-nde>;",
"(80, 89)": "åtti>%%ord-masc-nde>;",
"(90, 99)": "nitti>%%ord-masc-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-masc-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-masc-de>;",
"(1000000, 1999999)": "én million>%%ord-masc-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-masc-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-masc-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-masc-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-masc-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-masc-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-masc-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-masc-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
},
"%spellout-ordinal-neuter": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-neut-nde>;",
"(30, 39)": "tretti>%%ord-neut-nde>;",
"(40, 49)": "førti>%%ord-neut-nde>;",
"(50, 59)": "femti>%%ord-neut-nde>;",
"(60, 69)": "seksti>%%ord-neut-nde>;",
"(70, 79)": "sytti>%%ord-neut-nde>;",
"(80, 89)": "åtti>%%ord-neut-nde>;",
"(90, 99)": "nitti>%%ord-neut-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-neut-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-neut-de>;",
"(1000000, 1999999)": "én million>%%ord-neut-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-neut-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-neut-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-neut-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-neut-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-neut-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-neut-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-neut-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
},
"%spellout-ordinal-plural": {
"0": "nullte;",
"1": "første;",
"2": "andre;",
"3": "tredje;",
"4": "fjerde;",
"5": "femte;",
"6": "sjette;",
"7": "sjuende;",
"8": "åttende;",
"9": "niende;",
"10": "tiende;",
"11": "ellevte;",
"12": "tolvte;",
"(13, 19)": "=%spellout-cardinal-neuter=de;",
"(20, 29)": "tjue>%%ord-plural-nde>;",
"(30, 39)": "tretti>%%ord-plural-nde>;",
"(40, 49)": "førti>%%ord-plural-nde>;",
"(50, 59)": "femti>%%ord-plural-nde>;",
"(60, 69)": "seksti>%%ord-plural-nde>;",
"(70, 79)": "sytti>%%ord-plural-nde>;",
"(80, 89)": "åtti>%%ord-plural-nde>;",
"(90, 99)": "nitti>%%ord-plural-nde>;",
"(100, 999)": "<%spellout-numbering<hundre>%%ord-plural-de>;",
"(1000, 999999)": "<%spellout-numbering<tusen>%%ord-plural-de>;",
"(1000000, 1999999)": "én million>%%ord-plural-te>;",
"(2000000, 999999999)": "<%%spellout-cardinal-reale< million>%%ord-plural-teer>;",
"(1000000000, 1999999999)": "én milliard>%%ord-plural-te>;",
"(2000000000, 999999999999)": "<%%spellout-cardinal-reale< milliard>%%ord-plural-teer>;",
"(1000000000000, 1999999999999)": "én billion>%%ord-plural-te>;",
"(2000000000000, 999999999999999)": "<%%spellout-cardinal-reale< billion>%%ord-plural-teer>;",
"(1000000000000000, 1999999999999999)": "én billiard>%%ord-plural-te>;",
"(2000000000000000, 999999999999999999)": "<%%spellout-cardinal-reale< billiard>%%ord-plural-teer>;",
"(1000000000000000000, 'inf')": "=#,##0=.;"
}
} | true | true |
f71bb2656be1b32d52b27e3457db0d1c0fb78c7c | 1,806 | py | Python | networks/example_ParticleTransformer.py | jet-universe/particle_transformer | 68a7fbcd7d39a64b753251064f120462400895a1 | [
"MIT"
] | 2 | 2022-03-30T12:07:17.000Z | 2022-03-30T13:22:18.000Z | networks/example_ParticleTransformer.py | jet-universe/particle_transformer | 68a7fbcd7d39a64b753251064f120462400895a1 | [
"MIT"
] | null | null | null | networks/example_ParticleTransformer.py | jet-universe/particle_transformer | 68a7fbcd7d39a64b753251064f120462400895a1 | [
"MIT"
] | null | null | null | import os
import torch
from weaver.utils.logger import _logger
from weaver.utils.import_tools import import_module
ParticleTransformer = import_module(
os.path.join(os.path.dirname(__file__), 'ParticleTransformer.py'), 'ParT').ParticleTransformer
class ParticleTransformerWrapper(torch.nn.Module):
def __init__(self, **kwargs) -> None:
super().__init__()
self.mod = ParticleTransformer(**kwargs)
@torch.jit.ignore
def no_weight_decay(self):
return {'mod.cls_token', }
def forward(self, points, features, lorentz_vectors, mask):
return self.mod(features, v=lorentz_vectors, mask=mask)
def get_model(data_config, **kwargs):
cfg = dict(
input_dim=len(data_config.input_dicts['pf_features']),
num_classes=len(data_config.label_value),
# network configurations
pair_input_dim=4,
embed_dims=[128, 512, 128],
pair_embed_dims=[64, 64, 64],
num_heads=8,
num_layers=8,
num_cls_layers=2,
block_params=None,
cls_block_params={'dropout': 0, 'attn_dropout': 0, 'activation_dropout': 0},
fc_params=[],
activation='gelu',
# misc
trim=True,
for_inference=False,
)
cfg.update(**kwargs)
_logger.info('Model config: %s' % str(cfg))
model = ParticleTransformerWrapper(**cfg)
model_info = {
'input_names': list(data_config.input_names),
'input_shapes': {k: ((1,) + s[1:]) for k, s in data_config.input_shapes.items()},
'output_names': ['softmax'],
'dynamic_axes': {**{k: {0: 'N', 2: 'n_' + k.split('_')[0]} for k in data_config.input_names}, **{'softmax': {0: 'N'}}},
}
return model, model_info
def get_loss(data_config, **kwargs):
return torch.nn.CrossEntropyLoss()
| 30.1 | 127 | 0.640089 | import os
import torch
from weaver.utils.logger import _logger
from weaver.utils.import_tools import import_module
ParticleTransformer = import_module(
os.path.join(os.path.dirname(__file__), 'ParticleTransformer.py'), 'ParT').ParticleTransformer
class ParticleTransformerWrapper(torch.nn.Module):
def __init__(self, **kwargs) -> None:
super().__init__()
self.mod = ParticleTransformer(**kwargs)
@torch.jit.ignore
def no_weight_decay(self):
return {'mod.cls_token', }
def forward(self, points, features, lorentz_vectors, mask):
return self.mod(features, v=lorentz_vectors, mask=mask)
def get_model(data_config, **kwargs):
cfg = dict(
input_dim=len(data_config.input_dicts['pf_features']),
num_classes=len(data_config.label_value),
pair_input_dim=4,
embed_dims=[128, 512, 128],
pair_embed_dims=[64, 64, 64],
num_heads=8,
num_layers=8,
num_cls_layers=2,
block_params=None,
cls_block_params={'dropout': 0, 'attn_dropout': 0, 'activation_dropout': 0},
fc_params=[],
activation='gelu',
trim=True,
for_inference=False,
)
cfg.update(**kwargs)
_logger.info('Model config: %s' % str(cfg))
model = ParticleTransformerWrapper(**cfg)
model_info = {
'input_names': list(data_config.input_names),
'input_shapes': {k: ((1,) + s[1:]) for k, s in data_config.input_shapes.items()},
'output_names': ['softmax'],
'dynamic_axes': {**{k: {0: 'N', 2: 'n_' + k.split('_')[0]} for k in data_config.input_names}, **{'softmax': {0: 'N'}}},
}
return model, model_info
def get_loss(data_config, **kwargs):
return torch.nn.CrossEntropyLoss()
| true | true |
f71bb3b612e73dc0ead31535809f8a66a642d316 | 35 | py | Python | tests/translators/__init__.py | cancervariants/variant-normalization | e89a9f8366a659c82b2042aeb7effe339851bfb4 | [
"MIT"
] | 1 | 2022-01-19T18:17:49.000Z | 2022-01-19T18:17:49.000Z | tests/translators/__init__.py | cancervariants/variation-normalization | 9c8fbab1562591ae9445d82ddd15df29f1ea1f5a | [
"MIT"
] | 99 | 2021-06-07T12:50:34.000Z | 2022-03-23T13:38:29.000Z | tests/translators/__init__.py | cancervariants/variant-normalization | e89a9f8366a659c82b2042aeb7effe339851bfb4 | [
"MIT"
] | null | null | null | """The test translator package."""
| 17.5 | 34 | 0.685714 | true | true | |
f71bb42f23d2dcf4e3a6c7c499095bafb5976cc2 | 4,781 | py | Python | nicos_mlz/kws1/testscripts/counting.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12 | 2019-11-06T15:40:36.000Z | 2022-01-01T16:23:00.000Z | nicos_mlz/kws1/testscripts/counting.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 91 | 2020-08-18T09:20:26.000Z | 2022-02-01T11:07:14.000Z | nicos_mlz/kws1/testscripts/counting.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6 | 2020-01-11T10:52:30.000Z | 2022-02-25T12:35:23.000Z | # pylint: skip-file
ClearSamples()
SetSample(1, 'Alu1', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 208.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(2, 'Alu2', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 235.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(3, 'Alu3', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 262.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(4, 'Alu10', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 289.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(5, 'Alu11', aperture=(1.2, 5.4, 12.0, 12.0), position={u'sam_trans_x': 316.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(6, 'Alu19', aperture=(1.2, 5.4, 15.0, 15.0), position={u'sam_trans_x': 343.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetupNormal()
notify('new test script started')
kwscount(sample='Alu1' , selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu2' , selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu3' , selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu10', selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu11', selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu19', selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
notify('7A, 20m, 10.0%, finished')
kwscount(sample='Alu1' , selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu2' , selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu3' , selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu10', selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu11', selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu19', selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
notify('6A, 20m, chopper off, finished')
kwscount(sample='Alu1' , selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu2' , selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu3' , selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu10', selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu11', selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu19', selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
notify('5A, 8m, 2.5%, finished')
kwscount(sample='Alu1' , selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu2' , selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu3' , selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu10', selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu11', selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu19', selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
notify('10A, 8m, chopper off, finished')
| 116.609756 | 173 | 0.673081 |
ClearSamples()
SetSample(1, 'Alu1', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 208.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(2, 'Alu2', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 235.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(3, 'Alu3', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 262.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(4, 'Alu10', aperture=(1.2, 5.4, 7.0, 7.0), position={u'sam_trans_x': 289.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(5, 'Alu11', aperture=(1.2, 5.4, 12.0, 12.0), position={u'sam_trans_x': 316.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetSample(6, 'Alu19', aperture=(1.2, 5.4, 15.0, 15.0), position={u'sam_trans_x': 343.0, u'sam_trans_y': 202.5}, timefactor=1.0, thickness=1.0, detoffset=-315.0, comment=u'')
SetupNormal()
notify('new test script started')
kwscount(sample='Alu1' , selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu2' , selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu3' , selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu10', selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu11', selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
kwscount(sample='Alu19', selector='7A' , detector='20m' , chopper='10.0%', collimation='20m (30x30)', polarizer='up' , time=720.0, T_julabo=25.0)
notify('7A, 20m, 10.0%, finished')
kwscount(sample='Alu1' , selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu2' , selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu3' , selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu10', selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu11', selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
kwscount(sample='Alu19', selector='6A' , detector='20m' , chopper='off' , collimation='20m (30x30)', polarizer='down', time=720.0, T_julabo=20.0)
notify('6A, 20m, chopper off, finished')
kwscount(sample='Alu1' , selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu2' , selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu3' , selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu10', selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu11', selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
kwscount(sample='Alu19', selector='5A' , detector='8m' , chopper='2.5%', collimation='8m (30x30)' , polarizer='up' , time=720.0, T_julabo=30.0)
notify('5A, 8m, 2.5%, finished')
kwscount(sample='Alu1' , selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu2' , selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu3' , selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu10', selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu11', selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
kwscount(sample='Alu19', selector='10A', detector='8m' , chopper='off' , collimation='14m (30x30)', polarizer='out' , time=720.0, T_julabo=20.0)
notify('10A, 8m, chopper off, finished')
| true | true |
f71bb4a59b7a4873bb3899fb045f2a55091bf311 | 13,199 | py | Python | sen/tui/ui.py | lachmanfrantisek/sen | a45e87bcdd60de1a246bd62dfdec32f60027bd37 | [
"MIT"
] | 956 | 2015-10-22T14:32:14.000Z | 2022-03-21T02:27:28.000Z | sen/tui/ui.py | lachmanfrantisek/sen | a45e87bcdd60de1a246bd62dfdec32f60027bd37 | [
"MIT"
] | 146 | 2015-09-29T10:04:14.000Z | 2022-02-22T08:28:08.000Z | sen/tui/ui.py | lachmanfrantisek/sen | a45e87bcdd60de1a246bd62dfdec32f60027bd37 | [
"MIT"
] | 77 | 2015-11-12T22:02:18.000Z | 2022-01-24T10:14:46.000Z | """
This is a framework for terminal interfaces built on top of urwid.Frame.
It must NOT contain any application specific code.
"""
import logging
import threading
from concurrent.futures.thread import ThreadPoolExecutor
import urwid
from sen.exceptions import NotifyError
from sen.tui.commands.base import (
FrontendPriority, BackendPriority,
SameThreadPriority, KeyNotMapped
)
from sen.tui.constants import CLEAR_NOTIF_BAR_MESSAGE_IN
from sen.tui.widgets.util import ThreadSafeFrame
from sen.util import log_traceback, OrderedSet
logger = logging.getLogger(__name__)
class ConcurrencyMixin:
def __init__(self):
# worker for long-running tasks - requests
self.worker = ThreadPoolExecutor(max_workers=4)
# worker for quick ui operations
self.ui_worker = ThreadPoolExecutor(max_workers=2)
@staticmethod
def _run(worker, f, *args, **kwargs):
# TODO: do another wrapper to wrap notify exceptions and show them
f = log_traceback(f)
worker.submit(f, *args, **kwargs)
def run_in_background(self, task, *args, **kwargs):
logger.info("running task %r(%s, %s) in background", task, args, kwargs)
self._run(self.worker, task, *args, **kwargs)
def run_quickly_in_background(self, task, *args, **kwargs):
logger.info("running a quick task %r(%s, %s) in background", task, args, kwargs)
self._run(self.ui_worker, task, *args, **kwargs)
class UI(ThreadSafeFrame, ConcurrencyMixin):
"""
handles all UI-specific code
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# widget -> message or None
self.widget_message_dict = {}
# message -> widget
self.message_widget_dict = {}
self.status_bar = None
self.prompt_bar = None
# lock when managing notifications:
# * when accessing self.notification_*
# * when accessing widgets
# and most importantly, remember, locking is voodoo
self.notifications_lock = threading.RLock()
# populated when loop and UI are instantiated
self.loop = None
self.commander = None
self.buffers = []
self.buffer_movement_history = OrderedSet()
self.main_list_buffer = None # singleton
self.current_buffer = None
def refresh(self):
self.loop.refresh()
def quit(self):
"""
This could be called from another thread, so let's do this via alarm
"""
def q(*args):
raise urwid.ExitMainLoop()
self.worker.shutdown(wait=False)
self.ui_worker.shutdown(wait=False)
self.loop.set_alarm_in(0, q)
# FIXME: move these to separate mixin
def _set_main_widget(self, widget, redraw):
"""
add provided widget to widget list and display it
:param widget:
:return:
"""
self.set_body(widget)
self.reload_footer()
if redraw:
logger.debug("redraw main widget")
self.refresh()
def display_buffer(self, buffer, redraw=True):
"""
display provided buffer
:param buffer: Buffer
:return:
"""
logger.debug("display buffer %r", buffer)
self.buffer_movement_history.append(buffer)
self.current_buffer = buffer
self._set_main_widget(buffer.widget, redraw=redraw)
def add_and_display_buffer(self, buffer, redraw=True):
"""
add provided buffer to buffer list and display it
:param buffer:
:return:
"""
# FIXME: some buffers have arguments, do a proper comparison -- override __eq__
if buffer not in self.buffers:
logger.debug("adding new buffer {!r}".format(buffer))
self.buffers.append(buffer)
self.display_buffer(buffer, redraw=redraw)
def pick_and_display_buffer(self, i):
"""
pick i-th buffer from list and display it
:param i: int
:return: None
"""
if len(self.buffers) == 1:
# we don't need to display anything
# listing is already displayed
return
else:
try:
self.display_buffer(self.buffers[i])
except IndexError:
# i > len
self.display_buffer(self.buffers[0])
@property
def current_buffer_index(self):
return self.buffers.index(self.current_buffer)
def remove_current_buffer(self, close_if_no_buffer=False):
if len(self.buffers) == 1 and not close_if_no_buffer:
return
self.buffers.remove(self.current_buffer)
self.buffer_movement_history.remove(self.current_buffer)
self.current_buffer.destroy()
if len(self.buffers) > 0:
self.display_buffer(self.buffer_movement_history[-1], True)
return len(self.buffers)
def reload_footer(self, refresh=True, rebuild_statusbar=True):
logger.debug("reload footer")
footer = list(self.widget_message_dict.keys())
if self.prompt_bar:
footer.append(self.prompt_bar)
else:
if rebuild_statusbar or self.status_bar is None:
self.status_bar = self.build_statusbar()
footer.append(self.status_bar)
# logger.debug(footer)
self.set_footer(urwid.Pile(footer))
if refresh:
self.loop.refresh()
def build_statusbar(self):
"""construct and return statusbar widget"""
if self.prompt_bar:
logger.info("prompt is active, won't build status bar")
return
try:
left_widgets = self.current_buffer.build_status_bar() or []
except AttributeError:
left_widgets = []
text_list = []
# FIXME: this code should be placed in buffer
# TODO: display current active worker threads
for idx, buffer in enumerate(self.buffers):
# #1 [I] fedora #2 [L]
fmt = "#{idx} [{name}]"
markup = fmt.format(idx=idx, name=buffer.display_name)
text_list.append((
"status_box_focus" if buffer == self.current_buffer else "status_box",
markup,
))
text_list.append(" ")
text_list = text_list[:-1]
if text_list:
buffer_text = urwid.Text(text_list, align="right")
else:
buffer_text = urwid.Text("", align="right")
columns = urwid.Columns(left_widgets + [buffer_text])
return urwid.AttrMap(columns, "status")
def remove_notification_message(self, message):
logger.debug("requested remove of message %r from notif bar", message)
with self.notifications_lock:
try:
w = self.message_widget_dict[message]
except KeyError:
logger.warning("there is no notification %r displayed: %s",
message, self.message_widget_dict)
return
else:
logger.debug("remove widget %r from new pile", w)
del self.widget_message_dict[w]
del self.message_widget_dict[message]
self.reload_footer(rebuild_statusbar=False)
def remove_widget(self, widget, message=None):
logger.debug("remove widget %r from notif bar", widget)
with self.notifications_lock:
try:
del self.widget_message_dict[widget]
except KeyError:
logger.info("widget %s was already removed", widget)
return
if message:
del self.message_widget_dict[message]
self.reload_footer(rebuild_statusbar=False)
def notify_message(self, message, level="info", clear_if_dupl=True,
clear_in=CLEAR_NOTIF_BAR_MESSAGE_IN):
"""
:param message, str
:param level: str, {info, error}
:param clear_if_dupl: bool, if True, don't display the notification again
:param clear_in: seconds, remove the notificantion after some time
opens notification popup.
"""
with self.notifications_lock:
if clear_if_dupl and message in self.message_widget_dict.keys():
logger.debug("notification %r is already displayed", message)
return
logger.debug("display notification %r", message)
widget = urwid.AttrMap(urwid.Text(message), "notif_{}".format(level))
return self.notify_widget(widget, message=message, clear_in=clear_in)
def notify_widget(self, widget, message=None, clear_in=CLEAR_NOTIF_BAR_MESSAGE_IN):
"""
opens notification popup.
:param widget: instance of Widget, widget to display
:param message: str, message to remove from list of notifications
:param clear_in: int, time seconds when notification should be removed
"""
@log_traceback
def clear_notification(*args, **kwargs):
# the point here is the log_traceback
self.remove_widget(widget, message=message)
if not widget:
return
logger.debug("display notification widget %s", widget)
with self.notifications_lock:
self.widget_message_dict[widget] = message
if message:
self.message_widget_dict[message] = widget
self.reload_footer(rebuild_statusbar=False)
self.loop.set_alarm_in(clear_in, clear_notification)
return widget
def run_command(self, command_input, queue=None, **kwargs):
kwargs["buffer"] = self.current_buffer
command = self.commander.get_command(command_input, **kwargs)
if command is None:
return
if queue is None:
queue = command.priority
if isinstance(queue, FrontendPriority):
self.run_quickly_in_background(command.run)
elif isinstance(queue, BackendPriority):
self.run_in_background(command.run)
elif isinstance(queue, SameThreadPriority):
logger.info("running command %s", command)
try:
command.run()
except NotifyError as ex:
self.notify_message(str(ex), level="error")
logger.error(repr(ex))
else:
raise RuntimeError("command %s doesn't have any priority: %s %s" %
(command_input, command.priority, FrontendPriority))
def run_command_by_key(self, key, size, **kwargs):
command_input = self.commander.get_command_input_by_key(key)
self.run_command(command_input, size=size, **kwargs)
def keypress(self, size, key):
logger.debug("%s keypress %r", self.__class__.__name__, key)
# we should pass the key to header, body, footer first so it's consumed in e.g. statusbar
key = super().keypress(size, key)
if key is None:
logger.info("key was consumed by frame components")
return
logger.info("key was not consumed by frame components")
focused_docker_object = None
selected_widget = getattr(self.current_buffer, "widget", None)
if selected_widget:
focused_docker_object = getattr(self.current_buffer.widget, "focused_docker_object", None)
logger.debug("focused docker object is %s", focused_docker_object)
try:
self.run_command_by_key(
key,
docker_object=focused_docker_object,
size=size
)
except KeyNotMapped as ex:
super_class = ThreadSafeFrame
logger.debug("calling: %s.keypress(%s, %s)", super_class, size, key)
# TODO: up/down doesn't do anything if len(lines) < screen height, that's confusing
key = super_class.keypress(self, size, key)
if key:
self.notify_message(str(ex), level="error")
logger.debug("was key handled? %s", "yes" if key is None else "no")
return key
return
class ThreadSafeLoop(urwid.MainLoop):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.refresh_lock = threading.RLock()
def entering_idle(self):
with self.refresh_lock:
return super().entering_idle()
def refresh(self):
"""
explicitely refresh user interface; useful when changing widgets dynamically
"""
logger.debug("refresh user interface")
try:
with self.refresh_lock:
self.draw_screen()
except AssertionError:
logger.warning("application is not running")
pass
def get_app_in_loop(pallete):
screen = urwid.raw_display.Screen()
screen.set_terminal_properties(256)
screen.register_palette(pallete)
ui = UI(urwid.SolidFill())
decorated_ui = urwid.AttrMap(ui, "root")
loop = ThreadSafeLoop(decorated_ui, screen=screen, event_loop=urwid.AsyncioEventLoop(),
handle_mouse=False)
ui.loop = loop
return loop, ui
| 35.197333 | 102 | 0.615577 | import logging
import threading
from concurrent.futures.thread import ThreadPoolExecutor
import urwid
from sen.exceptions import NotifyError
from sen.tui.commands.base import (
FrontendPriority, BackendPriority,
SameThreadPriority, KeyNotMapped
)
from sen.tui.constants import CLEAR_NOTIF_BAR_MESSAGE_IN
from sen.tui.widgets.util import ThreadSafeFrame
from sen.util import log_traceback, OrderedSet
logger = logging.getLogger(__name__)
class ConcurrencyMixin:
def __init__(self):
self.worker = ThreadPoolExecutor(max_workers=4)
self.ui_worker = ThreadPoolExecutor(max_workers=2)
@staticmethod
def _run(worker, f, *args, **kwargs):
f = log_traceback(f)
worker.submit(f, *args, **kwargs)
def run_in_background(self, task, *args, **kwargs):
logger.info("running task %r(%s, %s) in background", task, args, kwargs)
self._run(self.worker, task, *args, **kwargs)
def run_quickly_in_background(self, task, *args, **kwargs):
logger.info("running a quick task %r(%s, %s) in background", task, args, kwargs)
self._run(self.ui_worker, task, *args, **kwargs)
class UI(ThreadSafeFrame, ConcurrencyMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.widget_message_dict = {}
self.message_widget_dict = {}
self.status_bar = None
self.prompt_bar = None
self.notifications_lock = threading.RLock()
self.loop = None
self.commander = None
self.buffers = []
self.buffer_movement_history = OrderedSet()
self.main_list_buffer = None
self.current_buffer = None
def refresh(self):
self.loop.refresh()
def quit(self):
def q(*args):
raise urwid.ExitMainLoop()
self.worker.shutdown(wait=False)
self.ui_worker.shutdown(wait=False)
self.loop.set_alarm_in(0, q)
def _set_main_widget(self, widget, redraw):
self.set_body(widget)
self.reload_footer()
if redraw:
logger.debug("redraw main widget")
self.refresh()
def display_buffer(self, buffer, redraw=True):
logger.debug("display buffer %r", buffer)
self.buffer_movement_history.append(buffer)
self.current_buffer = buffer
self._set_main_widget(buffer.widget, redraw=redraw)
def add_and_display_buffer(self, buffer, redraw=True):
if buffer not in self.buffers:
logger.debug("adding new buffer {!r}".format(buffer))
self.buffers.append(buffer)
self.display_buffer(buffer, redraw=redraw)
def pick_and_display_buffer(self, i):
if len(self.buffers) == 1:
# listing is already displayed
return
else:
try:
self.display_buffer(self.buffers[i])
except IndexError:
# i > len
self.display_buffer(self.buffers[0])
@property
def current_buffer_index(self):
return self.buffers.index(self.current_buffer)
def remove_current_buffer(self, close_if_no_buffer=False):
if len(self.buffers) == 1 and not close_if_no_buffer:
return
self.buffers.remove(self.current_buffer)
self.buffer_movement_history.remove(self.current_buffer)
self.current_buffer.destroy()
if len(self.buffers) > 0:
self.display_buffer(self.buffer_movement_history[-1], True)
return len(self.buffers)
def reload_footer(self, refresh=True, rebuild_statusbar=True):
logger.debug("reload footer")
footer = list(self.widget_message_dict.keys())
if self.prompt_bar:
footer.append(self.prompt_bar)
else:
if rebuild_statusbar or self.status_bar is None:
self.status_bar = self.build_statusbar()
footer.append(self.status_bar)
# logger.debug(footer)
self.set_footer(urwid.Pile(footer))
if refresh:
self.loop.refresh()
def build_statusbar(self):
if self.prompt_bar:
logger.info("prompt is active, won't build status bar")
return
try:
left_widgets = self.current_buffer.build_status_bar() or []
except AttributeError:
left_widgets = []
text_list = []
for idx, buffer in enumerate(self.buffers):
[{name}]"
markup = fmt.format(idx=idx, name=buffer.display_name)
text_list.append((
"status_box_focus" if buffer == self.current_buffer else "status_box",
markup,
))
text_list.append(" ")
text_list = text_list[:-1]
if text_list:
buffer_text = urwid.Text(text_list, align="right")
else:
buffer_text = urwid.Text("", align="right")
columns = urwid.Columns(left_widgets + [buffer_text])
return urwid.AttrMap(columns, "status")
def remove_notification_message(self, message):
logger.debug("requested remove of message %r from notif bar", message)
with self.notifications_lock:
try:
w = self.message_widget_dict[message]
except KeyError:
logger.warning("there is no notification %r displayed: %s",
message, self.message_widget_dict)
return
else:
logger.debug("remove widget %r from new pile", w)
del self.widget_message_dict[w]
del self.message_widget_dict[message]
self.reload_footer(rebuild_statusbar=False)
def remove_widget(self, widget, message=None):
logger.debug("remove widget %r from notif bar", widget)
with self.notifications_lock:
try:
del self.widget_message_dict[widget]
except KeyError:
logger.info("widget %s was already removed", widget)
return
if message:
del self.message_widget_dict[message]
self.reload_footer(rebuild_statusbar=False)
def notify_message(self, message, level="info", clear_if_dupl=True,
clear_in=CLEAR_NOTIF_BAR_MESSAGE_IN):
with self.notifications_lock:
if clear_if_dupl and message in self.message_widget_dict.keys():
logger.debug("notification %r is already displayed", message)
return
logger.debug("display notification %r", message)
widget = urwid.AttrMap(urwid.Text(message), "notif_{}".format(level))
return self.notify_widget(widget, message=message, clear_in=clear_in)
def notify_widget(self, widget, message=None, clear_in=CLEAR_NOTIF_BAR_MESSAGE_IN):
@log_traceback
def clear_notification(*args, **kwargs):
self.remove_widget(widget, message=message)
if not widget:
return
logger.debug("display notification widget %s", widget)
with self.notifications_lock:
self.widget_message_dict[widget] = message
if message:
self.message_widget_dict[message] = widget
self.reload_footer(rebuild_statusbar=False)
self.loop.set_alarm_in(clear_in, clear_notification)
return widget
def run_command(self, command_input, queue=None, **kwargs):
kwargs["buffer"] = self.current_buffer
command = self.commander.get_command(command_input, **kwargs)
if command is None:
return
if queue is None:
queue = command.priority
if isinstance(queue, FrontendPriority):
self.run_quickly_in_background(command.run)
elif isinstance(queue, BackendPriority):
self.run_in_background(command.run)
elif isinstance(queue, SameThreadPriority):
logger.info("running command %s", command)
try:
command.run()
except NotifyError as ex:
self.notify_message(str(ex), level="error")
logger.error(repr(ex))
else:
raise RuntimeError("command %s doesn't have any priority: %s %s" %
(command_input, command.priority, FrontendPriority))
def run_command_by_key(self, key, size, **kwargs):
command_input = self.commander.get_command_input_by_key(key)
self.run_command(command_input, size=size, **kwargs)
def keypress(self, size, key):
logger.debug("%s keypress %r", self.__class__.__name__, key)
# we should pass the key to header, body, footer first so it's consumed in e.g. statusbar
key = super().keypress(size, key)
if key is None:
logger.info("key was consumed by frame components")
return
logger.info("key was not consumed by frame components")
focused_docker_object = None
selected_widget = getattr(self.current_buffer, "widget", None)
if selected_widget:
focused_docker_object = getattr(self.current_buffer.widget, "focused_docker_object", None)
logger.debug("focused docker object is %s", focused_docker_object)
try:
self.run_command_by_key(
key,
docker_object=focused_docker_object,
size=size
)
except KeyNotMapped as ex:
super_class = ThreadSafeFrame
logger.debug("calling: %s.keypress(%s, %s)", super_class, size, key)
key = super_class.keypress(self, size, key)
if key:
self.notify_message(str(ex), level="error")
logger.debug("was key handled? %s", "yes" if key is None else "no")
return key
return
class ThreadSafeLoop(urwid.MainLoop):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.refresh_lock = threading.RLock()
def entering_idle(self):
with self.refresh_lock:
return super().entering_idle()
def refresh(self):
logger.debug("refresh user interface")
try:
with self.refresh_lock:
self.draw_screen()
except AssertionError:
logger.warning("application is not running")
pass
def get_app_in_loop(pallete):
screen = urwid.raw_display.Screen()
screen.set_terminal_properties(256)
screen.register_palette(pallete)
ui = UI(urwid.SolidFill())
decorated_ui = urwid.AttrMap(ui, "root")
loop = ThreadSafeLoop(decorated_ui, screen=screen, event_loop=urwid.AsyncioEventLoop(),
handle_mouse=False)
ui.loop = loop
return loop, ui
| true | true |
f71bb74a3fd635e20003d3cfecbe5cb463ebe09a | 3,783 | py | Python | airbyte-integrations/connectors/source-marketo-singer/source_marketo_singer/source.py | abalustre/airbyte | e629613588620e8a4c48b155e0f414f41a005bd0 | [
"MIT"
] | 1 | 2021-11-04T07:55:40.000Z | 2021-11-04T07:55:40.000Z | airbyte-integrations/connectors/source-marketo-singer/source_marketo_singer/source.py | Outoftheblue-ai/airbyte | 98b18fd852ec70dafe02fb3ffe45b1ac30345cd0 | [
"MIT"
] | 1 | 2021-11-01T10:44:54.000Z | 2021-11-01T10:47:42.000Z | airbyte-integrations/connectors/source-marketo-singer/source_marketo_singer/source.py | Outoftheblue-ai/airbyte | 98b18fd852ec70dafe02fb3ffe45b1ac30345cd0 | [
"MIT"
] | null | null | null | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
import json
from typing import Dict
from airbyte_protocol import AirbyteConnectionStatus, Status, SyncMode
from base_python import AirbyteLogger
from base_singer import BaseSingerSource, SyncModeInfo
class SourceMarketoSinger(BaseSingerSource):
tap_cmd = "tap-marketo"
tap_name = "Marketo API"
api_error = Exception
def transform_config(self, raw_config):
return {
"endpoint": raw_config["endpoint_url"],
"identity": raw_config["identity_url"],
"client_id": raw_config["client_id"],
"client_secret": raw_config["client_secret"],
"start_date": raw_config["start_date"],
}
def try_connect(self, logger: AirbyteLogger, config_path: str):
self.discover(logger, config_path)
def check_config(self, logger: AirbyteLogger, config_path: str, config: json) -> AirbyteConnectionStatus:
try:
self.try_connect(logger, config_path)
except self.api_error as err:
logger.error(f"Exception while connecting to {self.tap_name}: {err}")
# this should be in UI
error_msg = f"Unable to connect to {self.tap_name} with the provided credentials."
return AirbyteConnectionStatus(status=Status.FAILED, message=error_msg)
return AirbyteConnectionStatus(status=Status.SUCCEEDED)
def get_sync_mode_overrides(self) -> Dict[str, SyncModeInfo]:
incremental_streams = [
"leads",
"activities_visit_webpage",
"activities_fill_out_form",
"activities_click_link",
"activities_send_email",
"activities_email_delivered",
"activities_email_bounced",
"activities_unsubscribe_email",
"activities_open_email",
"activities_click_email",
"activities_new_lead",
"activities_change_data_value",
"activities_change_score",
"activities_add_to_list",
"activities_remove_from_list",
"activities_email_bounced_soft",
"activities_merge_leads",
"activities_add_to_opportunity",
"activities_remove_from_opportunity",
"activities_update_opportunity",
"activities_delete_lead",
"activities_send_alert",
"activities_send_sales_email",
"activities_open_sales_email",
"activities_click_sales_email",
"activities_receive_sales_email",
"activities_request_campaign",
"activities_sales_email_bounced",
"activities_change_lead_partition",
"activities_change_revenue_stage",
"activities_change_revenue_stage_manually",
"activities_change_status_in_progression",
"activities_change_segment",
"activities_call_webhook",
"activities_sent_forward_to_friend_email",
"activities_received_forward_to_friend_email",
"activities_add_to_nurture",
"activities_change_nurture_track",
"activities_change_nurture_cadence",
"activities_change_program_member_data",
"activities_push_lead_to_marketo",
"activities_share_content",
"campaigns",
"lists",
"programs",
]
return {s: SyncModeInfo([SyncMode.incremental], True, []) for s in incremental_streams}
def read_cmd(self, logger: AirbyteLogger, config_path: str, catalog_path: str, state_path: str = None) -> str:
state_opt = f"--state {state_path}" if state_path else ""
return f"{self.tap_cmd} --config {config_path} --properties {catalog_path} {state_opt}"
| 39.821053 | 114 | 0.651335 |
import json
from typing import Dict
from airbyte_protocol import AirbyteConnectionStatus, Status, SyncMode
from base_python import AirbyteLogger
from base_singer import BaseSingerSource, SyncModeInfo
class SourceMarketoSinger(BaseSingerSource):
tap_cmd = "tap-marketo"
tap_name = "Marketo API"
api_error = Exception
def transform_config(self, raw_config):
return {
"endpoint": raw_config["endpoint_url"],
"identity": raw_config["identity_url"],
"client_id": raw_config["client_id"],
"client_secret": raw_config["client_secret"],
"start_date": raw_config["start_date"],
}
def try_connect(self, logger: AirbyteLogger, config_path: str):
self.discover(logger, config_path)
def check_config(self, logger: AirbyteLogger, config_path: str, config: json) -> AirbyteConnectionStatus:
try:
self.try_connect(logger, config_path)
except self.api_error as err:
logger.error(f"Exception while connecting to {self.tap_name}: {err}")
error_msg = f"Unable to connect to {self.tap_name} with the provided credentials."
return AirbyteConnectionStatus(status=Status.FAILED, message=error_msg)
return AirbyteConnectionStatus(status=Status.SUCCEEDED)
def get_sync_mode_overrides(self) -> Dict[str, SyncModeInfo]:
incremental_streams = [
"leads",
"activities_visit_webpage",
"activities_fill_out_form",
"activities_click_link",
"activities_send_email",
"activities_email_delivered",
"activities_email_bounced",
"activities_unsubscribe_email",
"activities_open_email",
"activities_click_email",
"activities_new_lead",
"activities_change_data_value",
"activities_change_score",
"activities_add_to_list",
"activities_remove_from_list",
"activities_email_bounced_soft",
"activities_merge_leads",
"activities_add_to_opportunity",
"activities_remove_from_opportunity",
"activities_update_opportunity",
"activities_delete_lead",
"activities_send_alert",
"activities_send_sales_email",
"activities_open_sales_email",
"activities_click_sales_email",
"activities_receive_sales_email",
"activities_request_campaign",
"activities_sales_email_bounced",
"activities_change_lead_partition",
"activities_change_revenue_stage",
"activities_change_revenue_stage_manually",
"activities_change_status_in_progression",
"activities_change_segment",
"activities_call_webhook",
"activities_sent_forward_to_friend_email",
"activities_received_forward_to_friend_email",
"activities_add_to_nurture",
"activities_change_nurture_track",
"activities_change_nurture_cadence",
"activities_change_program_member_data",
"activities_push_lead_to_marketo",
"activities_share_content",
"campaigns",
"lists",
"programs",
]
return {s: SyncModeInfo([SyncMode.incremental], True, []) for s in incremental_streams}
def read_cmd(self, logger: AirbyteLogger, config_path: str, catalog_path: str, state_path: str = None) -> str:
state_opt = f"--state {state_path}" if state_path else ""
return f"{self.tap_cmd} --config {config_path} --properties {catalog_path} {state_opt}"
| true | true |
f71bba63aef3cc3729067901106dcb6217bcab5d | 24,141 | py | Python | benchmarks/pipe.py | jessijzhao/fairscale | d6a8fc6dadc5d5ab4e3ee3f42f8cd570d70d30ec | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | benchmarks/pipe.py | jessijzhao/fairscale | d6a8fc6dadc5d5ab4e3ee3f42f8cd570d70d30ec | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | benchmarks/pipe.py | jessijzhao/fairscale | d6a8fc6dadc5d5ab4e3ee3f42f8cd570d70d30ec | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import argparse
import logging
import math
import os
import time
import warnings
from benchmark_dataset import BenchmarkLMDataset, collate_sentences_lm
import torch
from torch.distributed import rpc
import torch.multiprocessing as mp
import torch.nn as nn
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.utils.data import DataLoader
import torchtext
from torchtext.data.utils import get_tokenizer
from fairscale.nn import Pipe
from fairscale.nn.model_parallel import initialize_model_parallel
from fairscale.nn.model_parallel.initialize import get_data_parallel_group, get_pipeline_parallel_group
from fairscale.nn.pipe import LazyModule, pipe
from fairscale.optim import GradScaler
from fairscale.optim.oss import OSS
from fairscale.utils.testing import dist_init, get_worker_map
try:
from fairscale.optim import Adam # type: ignore
can_benchmark = True
except ImportError:
from torch.optim import Adam # type: ignore
can_benchmark = False
def init_random_seed(seed: int):
import numpy
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
numpy.random.seed(seed)
PIPE_CHUNKS = 2
iteration_count = 0
class EmbeddingLayer(nn.Embedding):
def __init__(self, ntoken, ninp, initrange):
super().__init__(ntoken, ninp)
self.ninp = ninp
self.weight.data.uniform_(-initrange, initrange)
def forward(self, src):
return super().forward(src) * math.sqrt(self.ninp)
class PositionalEncodingLayer(nn.Module):
def __init__(self, d_model, dropout=0.1, max_len=5000):
super(PositionalEncodingLayer, self).__init__()
self.dropout = nn.Dropout(p=dropout)
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
self.register_buffer("pe", pe)
def forward(self, x):
x = x + self.pe[: x.size(0), :]
return self.dropout(x)
class TransformerDecoderLayer(nn.TransformerEncoderLayer):
"""Though this class inherits from torch.nn.TransformerEncoderLayer,
it functions as a decoder in this model"""
def __init__(self, ninp, nhead, nhid, droupout):
super().__init__(ninp, nhead, nhid, droupout)
self.src_mask = None
def _generate_square_subsequent_mask(self, sz):
mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float("-inf")).masked_fill(mask == 1, float(0.0))
return mask
def forward(self, src):
global iteration_count
iteration_count += 1
# if iteration_count == 196:
# dump_cuda_tensors()
if self.src_mask is None or self.src_mask.size(0) != len(src):
device = src.device
mask = self._generate_square_subsequent_mask(len(src)).to(device)
self.src_mask = mask
return super().forward(src, self.src_mask)
class LinearLayer(nn.Linear):
def __init__(self, ninp, ntoken, initrange):
super().__init__(ninp, ntoken)
self.bias.data.zero_()
self.weight.data.uniform_(-initrange, initrange)
class TransformerLMSequential(nn.Sequential):
"""A small language model based on the design of GPT-2 using nn.Sequential
for compatability with Pipe"""
def __init__(self, ntokens, ninp, nhead, nhid, dropout, initrange, ndecoder):
layers = [
EmbeddingLayer(ntokens, ninp, initrange),
PositionalEncodingLayer(ninp, dropout),
]
for _ in range(ndecoder):
layers.append(TransformerDecoderLayer(ninp, nhead, nhid, dropout))
layers.append(LinearLayer(ninp, ntokens, initrange))
super(TransformerLMSequential, self).__init__(*layers)
def get_data(device):
with warnings.catch_warnings(record=True) as fjldska:
TEXT = torchtext.data.Field(
tokenize=get_tokenizer("basic_english"), init_token="<sos>", eos_token="<eos>", lower=True
)
train_txt, val_txt, test_txt = torchtext.datasets.WikiText2.splits(TEXT)
TEXT.build_vocab(train_txt)
ntokens = len(TEXT.vocab.stoi)
batch_size = 20
eval_batch_size = 10
train_data = batchify(train_txt, batch_size, TEXT, device)
val_data = batchify(val_txt, eval_batch_size, TEXT, device)
test_data = batchify(test_txt, eval_batch_size, TEXT, device)
return ntokens, train_data, val_data, test_data
def batchify(data, bsz, TEXT, device):
data = TEXT.numericalize([data.examples[0].text])
nbatch = data.size(0) // bsz
data = data.narrow(0, 0, nbatch * bsz)
data = data.view(bsz, -1).t().contiguous()
return data.to(device)
def get_batch(source, i, bptt):
seq_len = min(bptt, len(source) - 1 - i)
data = source[i : i + seq_len]
target = source[i + 1 : i + 1 + seq_len].view(-1)
return data, target
def make_model(args, device, ntokens):
ninp = 2048 # embedding dimension
nhid = 2048 # the dimension of the feedforward network model in nn.TransformerEncoder
nhead = 32 # the number of heads in the multiheadattention models
dropout = 0
initrange = 0.1
ndecoder = args.num_decoder_layers
if args.lazy_construction:
layers = [
LazyModule(lambda: EmbeddingLayer(ntokens, ninp, initrange)),
LazyModule(lambda: PositionalEncodingLayer(ninp, dropout)),
]
for _ in range(ndecoder):
layers.append(LazyModule(lambda: TransformerDecoderLayer(ninp, nhead, nhid, dropout)))
layers.append(LazyModule(lambda: LinearLayer(ninp, ntokens, initrange)))
model = layers
else:
model = TransformerLMSequential(ntokens, ninp, nhead, nhid, dropout, initrange, ndecoder).to(device)
criterion = nn.CrossEntropyLoss()
lr = 0.01 # learning rate
def make_adam(model):
if args.ddp_zero:
return OSS(params=model.parameters(), optim=Adam, group=get_data_parallel_group(), lr=lr)
else:
return Adam(model.parameters(), lr=lr)
optimizer = make_adam
scaler = GradScaler()
return model, criterion, optimizer, scaler
def get_tensors_by_size_bucket():
from collections import defaultdict
import gc
size_buckets = defaultdict(int)
for obj in gc.get_objects():
if not isinstance(obj, torch.Tensor):
continue
if obj.device.type == "cuda":
size_buckets[(*obj.size(),) + (obj.element_size(),)] += 1
return size_buckets
def dump_size_buckets(size_buckets, prefix=""):
from functools import reduce
import operator
total = 0
for key, value in size_buckets.items():
this = reduce(operator.mul, key) * value
total += this
print(prefix + f"{key} : {value}, {this}")
print(prefix + f"total = {total}")
last_size_buckets = None
once = True
def safe_rank():
try:
return torch.distributed.get_rank()
except AssertionError:
return 0
def check_size_buckets():
global last_size_buckets
global once
size_buckets = get_tensors_by_size_bucket()
if last_size_buckets is not None:
if size_buckets != last_size_buckets:
print(f"difference is oustanding tensors: {safe-rank()}")
dump_size_buckets(last_size_buckets, "old: ")
dump_size_buckets(size_buckets, "new: ")
if once:
print(f"dumping buckets for: {safe_rank()}")
dump_size_buckets(last_size_buckets, "old: ")
dump_size_buckets(size_buckets, "new: ")
once = False
else:
print(f"size buckets none on {safe_rank()}")
last_size_buckets = size_buckets
def dump_cuda_tensors():
print(f"dumping cuda tensors...")
from functools import reduce
import gc
import operator
for obj in gc.get_objects():
if not isinstance(obj, torch.Tensor):
continue
if obj.device.type == "cuda":
size_buckets[(*obj.size(),) + (obj.element_size(),)] += 1
print(f"outstanding cuda tensors:")
total = 0
for key, value in size_buckets.items():
this = reduce(operator.mul, key) * value
total += this
print(f"{key} : {value}, {this}")
print(f"total size = {total}")
import pprint
pprint.pprint(torch.cuda.memory_stats())
def train(lm_dataloader, model, criterion, optimizer, vocab_size, args):
model.train()
from functools import reduce
import operator
num_params = reduce(operator.add, (reduce(operator.mul, x.size()) for x in model.parameters()))
if model.group:
total = torch.Tensor([num_params])
if torch.cuda.is_available():
total = total.cuda()
torch.distributed.all_reduce(total, group=model.group)
logging.info(
f"training model, #prams = {num_params}, group: {model.group.rank()}, grank:"
f" {torch.distributed.get_rank()}, sizes {model.group.size()}"
)
torch.distributed.barrier()
if model.group.rank() == 0:
logging.info(f"total #prams = {total.item()}")
else:
logging.info(f"training model, #prams = {num_params}")
vocab_size = 10000 # FIXME
total_loss = 0.0
start_time = time.time()
word_counter = 0
optimizer = optimizer(model)
def get_first_device(model):
if isinstance(model, DDP):
model = model.module
if not torch.cuda.is_available():
return torch.device("cpu")
if model.devices:
return model.devices[0]
else:
return torch.cuda.current_device()
def get_last_device(model):
if isinstance(model, DDP):
model = model.module
if not torch.cuda.is_available():
return torch.device("cpu")
if model.devices:
return model.devices[-1]
else:
return torch.cuda.current_device()
pipe_group = model.group
if args.ddp_zero:
model = DDP(
model,
device_ids=[torch.cuda.current_device()],
process_group=get_data_parallel_group(),
find_unused_parameters=False,
)
if pipe_group and pipe_group.rank() != 0 and pipe_group.rank() != (pipe_group.size() - 1):
thing = {"input": torch.zeros(args.batch_size)}
class FakeDataset:
def __getitem__(self, index):
return thing
def __len__(self):
return len(lm_dataloader)
lm_dataloader = FakeDataset()
for i, batch in enumerate(lm_dataloader):
bi = batch["input"]
if args.max_batch and i > args.max_batch:
break
optimizer.zero_grad()
try:
if (pipe_group is None or pipe_group.rank() == 0) and not args.ddp_zero:
tmp = batch["input"].to(get_first_device(model))
output = model(tmp)
else:
output = model(batch["input"])
except Exception as e:
raise RuntimeError(f"training failed on {torch.distributed.get_rank()}") from e
if pipe_group is None or pipe_group.rank() == pipe_group.size() - 1:
target = batch["target"].to(get_last_device(model))
output = output.to(target.device)
loss = criterion(output.view(-1, vocab_size), target.view(-1))
if args.ddp_zero:
ddp_group = get_data_parallel_group()
torch.distributed.all_reduce(loss, op=torch.distributed.ReduceOp.SUM, group=ddp_group)
loss /= ddp_group.size()
loss.backward()
del target
else:
if args.ddp_zero:
model.module.back_helper(output)
else:
model.back_helper(output)
del output
torch.nn.utils.clip_grad_value_(model.parameters(), 0.05)
optimizer.step()
if pipe_group is None or pipe_group.rank() == pipe_group.size() - 1:
total_loss += loss.item()
log_interval = 1
word_counter += batch["ntokens"]
if i % log_interval == 0 and i > 0:
cur_loss = total_loss / log_interval
elapsed = time.time() - start_time
print(
"| batch {:5d} | wps {:5.2f} | loss {:5.2f} | ppl {:8.2f}".format(
i, word_counter / elapsed, cur_loss, math.exp(cur_loss)
)
)
word_counter = 0
total_loss = 0
start_time = time.time()
# if i >= 10:
# break
# torch.cuda.empty_cache()
# check_size_buckets()
def evaluate(eval_model, data_source, criterion, bptt, ntokens):
eval_model.eval()
total_loss = 0.0
with torch.no_grad():
for i in range(0, data_source.size(0) - 1, bptt):
data, targets = get_batch(data_source, i, bptt)
output = eval_model(data)
output = output.to(targets.device)
output_flat = output.view(-1, ntokens)
total_loss += len(data) * criterion(output_flat, targets).item()
return total_loss / (len(data_source) - 1)
def get_number_of_words(data):
return data.size()[0] * data.size()[1]
def benchmark_language_model(train_data, val_data, test_data, model, criterion, optimizer, ntokens, args):
epoch = 1
bptt = 35
start_time = time.time()
print("-" * 110)
print("| start of epoch {:1d}".format(epoch))
print("-" * 110)
epoch_start_time = time.time()
train(train_data, model, criterion, optimizer, bptt, ntokens, args)
val_loss = 1 # evaluate(model, val_data, criterion, bptt, ntokens)
print("-" * 89)
print(
"| end of epoch {:1d} | time: {:5.2f}s | valid loss {:5.2f} ".format(
epoch, (time.time() - epoch_start_time), val_loss
)
)
print("-" * 110)
elapsed_time = time.time() - start_time
nwords = get_number_of_words(train_data) + get_number_of_words(val_data)
wps = nwords / elapsed_time
test_loss = 1 # evaluate(model, test_data, criterion, bptt, ntokens)
print("=" * 89)
print(
"| end of training | test loss {:5.2f} \n| time: {:5.2f}s | words: {:3d} | wps: {:5.2f}".format(
test_loss, elapsed_time, nwords, wps
)
)
print("=" * 110)
if can_benchmark and len(model.balance) == 4:
# Assert that words per second is within 3 standard deviations of the average
# of six golden runs
assert wps > 36954.4 - (3 * 116.825)
print("Peak allocated bytes on cuda:0: {:1d}".format(torch.cuda.memory_stats(0)["allocated_bytes.all.peak"]))
print("Peak allocated bytes on cuda:1: {:1d}".format(torch.cuda.memory_stats(1)["allocated_bytes.all.peak"]))
print("Peak allocated bytes on cuda:2: {:1d}".format(torch.cuda.memory_stats(2)["allocated_bytes.all.peak"]))
print("Peak allocated bytes on cuda:3: {:1d}".format(torch.cuda.memory_stats(3)["allocated_bytes.all.peak"]))
# Assert that memory usage on each GPU is within 10% of golden run
# Right-hand-side is golden run bytes * 110%
assert torch.cuda.memory_stats(0)["allocated_bytes.all.peak"] < 4061909504 * 1.1
assert torch.cuda.memory_stats(1)["allocated_bytes.all.peak"] < 4050944 * 1.1
assert torch.cuda.memory_stats(2)["allocated_bytes.all.peak"] < 10427392 * 1.1
assert torch.cuda.memory_stats(3)["allocated_bytes.all.peak"] < 2031824896 * 1.1
print("No regression detected")
def generate_balance_weighted(num_devices, num_layers, fraction=0.5):
balance = []
layers_assigned = 0
average_count = num_layers / num_devices
last_layers = int(average_count * fraction)
balance = generate_balance(num_devices - 1, num_layers - last_layers)
balance.append(last_layers)
return balance
def generate_balance(num_devices, num_layers):
balance = []
layers_assigned = 0
for i in range(num_devices):
x = (num_layers - layers_assigned) / (num_devices - i)
if x.is_integer():
balance.append(int(x))
layers_assigned += x
else:
balance.append(math.ceil(x))
layers_assigned += math.ceil(x)
return balance
def make_model_and_data(args, device, new_data: bool = True):
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
if new_data:
vocab_size = 10000
model, criterion, optimizer, scaler = make_model(args, device, vocab_size)
lm_dataset = BenchmarkLMDataset()
lm_dataloader = DataLoader(
lm_dataset, batch_size=args.batch_size, shuffle=True, num_workers=0, collate_fn=collate_sentences_lm
)
return {
"model": model,
"criterion": criterion,
"optimizer": optimizer,
"data": lm_dataloader,
"vocab_size": vocab_size,
}
else:
data = get_data(device)
ntokens, train_data, val_data, test_data = data
model, criterion, optimizer, scaler = make_model(args, device, ntokens)
return {
"model": model,
"criterion": criterion,
"optimizer": optimizer,
"data": data,
}
def bench_single_process(args):
num_devices = torch.cuda.device_count() if torch.cuda.is_available() else 1
assert num_devices > 0
init_random_seed(0)
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
new_data = True
blob = make_model_and_data(args, None, new_data=new_data)
model = blob["model"]
balance = generate_balance(min(num_devices, 4), len(model))
p = pipe.Pipe(
model, balance, chunks=args.chunks, pipelined_backward=args.pipelined_backward, checkpoint=args.checkpoint
)
del model
del blob["model"]
if new_data:
train(blob["data"], p, blob["criterion"], blob["optimizer"], blob["vocab_size"], args)
else:
ntokens, train_data, val_data, test_data = blob["data"]
benchmark_language_model(train_data, val_data, test_data, p, criterion, optimizer, ntokens, args)
def run_mp_worker(args, available_workers):
new_data = True
blob = make_model_and_data(args, None, new_data=new_data)
model = blob["model"]
balance = generate_balance_weighted(get_pipeline_parallel_group().size(), len(model), 0.8)
p = pipe.Pipe(
model,
balance,
style=Pipe.AsyncSchedule,
chunks=args.chunks,
worker_map=get_worker_map(),
input_device=torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu"),
pipelined_backward=args.pipelined_backward,
checkpoint=args.checkpoint,
# loss_fn=blob["criterion"],
)
if torch.cuda.is_available():
p = p.cuda()
if args.all_at_once and p.pipeline:
print(f"running all at once")
p.pipeline.all_at_once = True
if new_data:
train(blob["data"], p, blob["criterion"], blob["optimizer"], blob["vocab_size"], args)
else:
ntokens, train_data, val_data, test_data = blob["data"]
benchmark_language_model(train_data, val_data, test_data, p, criterion, optimizer, ntokens, args)
def run_worker(rank, world_size, args):
if args.world_size != 0:
world_size = args.world_size
dist_init(rank + args.rank_base, world_size, hostname=args.host)
initialize_model_parallel(1, world_size)
init_random_seed(0)
run_mp_worker(args, world_size)
rpc.shutdown()
torch.distributed.destroy_process_group()
def bench_multi_process(args, all_at_once=False):
if args.local_world_size != 0:
world_size = args.local_world_size
else:
world_size = min(torch.cuda.device_count(), 2)
mp.spawn(run_worker, args=(world_size, args), nprocs=world_size, join=True)
best_device_map = {
0: "mlx5_0:1",
1: "mlx5_0:1",
2: "mlx5_1:1",
3: "mlx5_1:1",
4: "mlx5_2:1",
5: "mlx5_2:1",
6: "mlx5_3:1",
7: "mlx5_3:1",
}
def bench_mpi(args):
guess_rank = int(os.environ["OMPI_COMM_WORLD_RANK"])
world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"])
local_rank = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"])
os.environ["UCX_NET_DEVICES"] = best_device_map[local_rank]
os.environ["MASTER_ADDR"] = args.host
os.environ["MASTER_PORT"] = "10638"
if args.socket_name:
os.environ["GLOO_SOCKET_IFNAME"] = args.socket_name
os.environ["TP_SOCKET_IFNAME"] = args.socket_name
torch.distributed.init_process_group(backend="gloo", rank=guess_rank, world_size=world_size)
os.environ["MASTER_ADDR"] = args.host
os.environ["MASTER_PORT"] = "10639"
init_method = f"tcp://{os.environ['MASTER_ADDR']}:{os.environ['MASTER_PORT']}"
rank = torch.distributed.get_rank()
world_size = torch.distributed.get_world_size()
torch.cuda.set_device(local_rank % torch.cuda.device_count())
rpc.init_rpc(
f"Test{rank}",
rank=rank,
world_size=world_size,
backend=rpc.BackendType.PROCESS_GROUP,
rpc_backend_options=rpc.ProcessGroupRpcBackendOptions(rpc_timeout=20, init_method=init_method),
)
backends = {"model_parallel_backend": "nccl", "pipeline_backend": "mpi", "ddp_backend": "nccl"}
if args.ddp_zero:
initialize_model_parallel(1, 4, **backends)
else:
initialize_model_parallel(1, world_size, **backends)
init_random_seed(0)
run_mp_worker(args, world_size)
rpc.shutdown()
torch.distributed.destroy_process_group()
parser = argparse.ArgumentParser(description="benchmark")
parser.add_argument("--local-world-size", "-l", type=int, default=0, help="local world size")
parser.add_argument("--world-size", "-w", type=int, default=0, help="world size")
parser.add_argument("--rank-base", "-r", type=int, help="rank base", default=0)
parser.add_argument("--host", "-o", type=str, default="localhost", help="hostname")
parser.add_argument("--no-mpi", action="store_true", default=False, help="disable mpi")
parser.add_argument("--chunks", type=int, default=1, help="number of microbatches per batch")
parser.add_argument("--batch-size", type=int, default=8, help="size of a batch")
parser.add_argument("--all-at-once", action="store_true", default=False, help="do backward pass on whole batch at once")
parser.add_argument("--max-batch", type=int, default=4, help="Max number of batches")
parser.add_argument("--socket-name", type=str, default=None, help="socket ifname for gloo/tp")
parser.add_argument("--num-decoder-layers", type=int, default=10, help="Number of decoder layers in the model")
parser.add_argument("--ddp-zero", action="store_true", default=False, help="enable ddp")
parser.add_argument(
"--lazy-construction", action="store_true", default=False, help="Number of decoder layers in the model"
)
parser.add_argument(
"--checkpoint", default="never", choices=["always", "except_last", "never"], help="Checkpointing strategy for pipe"
)
parser.add_argument(
"--pipelined-backward", dest="pipelined_backward", action="store_true", help="Pipelined backward pass"
)
parser.add_argument(
"--no-pipelined-backward", dest="pipelined_backward", action="store_false", help="Pipelined backward pass"
)
parser.set_defaults(pipelined_backward=True)
if __name__ == "__main__":
args = parser.parse_args()
# bench_multi_process(args, all_at_once=True)
if args.no_mpi or "OMPI_COMM_WORLD_RANK" not in os.environ:
print(f"Running benchmark with args: {args}")
bench_single_process(args)
else:
if os.environ["OMPI_COMM_WORLD_RANK"] == "0":
print(f"Running benchmark with args: {args}")
bench_mpi(args)
| 34.685345 | 120 | 0.645499 |
import argparse
import logging
import math
import os
import time
import warnings
from benchmark_dataset import BenchmarkLMDataset, collate_sentences_lm
import torch
from torch.distributed import rpc
import torch.multiprocessing as mp
import torch.nn as nn
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.utils.data import DataLoader
import torchtext
from torchtext.data.utils import get_tokenizer
from fairscale.nn import Pipe
from fairscale.nn.model_parallel import initialize_model_parallel
from fairscale.nn.model_parallel.initialize import get_data_parallel_group, get_pipeline_parallel_group
from fairscale.nn.pipe import LazyModule, pipe
from fairscale.optim import GradScaler
from fairscale.optim.oss import OSS
from fairscale.utils.testing import dist_init, get_worker_map
try:
from fairscale.optim import Adam
can_benchmark = True
except ImportError:
from torch.optim import Adam
can_benchmark = False
def init_random_seed(seed: int):
import numpy
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
numpy.random.seed(seed)
PIPE_CHUNKS = 2
iteration_count = 0
class EmbeddingLayer(nn.Embedding):
def __init__(self, ntoken, ninp, initrange):
super().__init__(ntoken, ninp)
self.ninp = ninp
self.weight.data.uniform_(-initrange, initrange)
def forward(self, src):
return super().forward(src) * math.sqrt(self.ninp)
class PositionalEncodingLayer(nn.Module):
def __init__(self, d_model, dropout=0.1, max_len=5000):
super(PositionalEncodingLayer, self).__init__()
self.dropout = nn.Dropout(p=dropout)
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
self.register_buffer("pe", pe)
def forward(self, x):
x = x + self.pe[: x.size(0), :]
return self.dropout(x)
class TransformerDecoderLayer(nn.TransformerEncoderLayer):
def __init__(self, ninp, nhead, nhid, droupout):
super().__init__(ninp, nhead, nhid, droupout)
self.src_mask = None
def _generate_square_subsequent_mask(self, sz):
mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float("-inf")).masked_fill(mask == 1, float(0.0))
return mask
def forward(self, src):
global iteration_count
iteration_count += 1
if self.src_mask is None or self.src_mask.size(0) != len(src):
device = src.device
mask = self._generate_square_subsequent_mask(len(src)).to(device)
self.src_mask = mask
return super().forward(src, self.src_mask)
class LinearLayer(nn.Linear):
def __init__(self, ninp, ntoken, initrange):
super().__init__(ninp, ntoken)
self.bias.data.zero_()
self.weight.data.uniform_(-initrange, initrange)
class TransformerLMSequential(nn.Sequential):
def __init__(self, ntokens, ninp, nhead, nhid, dropout, initrange, ndecoder):
layers = [
EmbeddingLayer(ntokens, ninp, initrange),
PositionalEncodingLayer(ninp, dropout),
]
for _ in range(ndecoder):
layers.append(TransformerDecoderLayer(ninp, nhead, nhid, dropout))
layers.append(LinearLayer(ninp, ntokens, initrange))
super(TransformerLMSequential, self).__init__(*layers)
def get_data(device):
with warnings.catch_warnings(record=True) as fjldska:
TEXT = torchtext.data.Field(
tokenize=get_tokenizer("basic_english"), init_token="<sos>", eos_token="<eos>", lower=True
)
train_txt, val_txt, test_txt = torchtext.datasets.WikiText2.splits(TEXT)
TEXT.build_vocab(train_txt)
ntokens = len(TEXT.vocab.stoi)
batch_size = 20
eval_batch_size = 10
train_data = batchify(train_txt, batch_size, TEXT, device)
val_data = batchify(val_txt, eval_batch_size, TEXT, device)
test_data = batchify(test_txt, eval_batch_size, TEXT, device)
return ntokens, train_data, val_data, test_data
def batchify(data, bsz, TEXT, device):
data = TEXT.numericalize([data.examples[0].text])
nbatch = data.size(0) // bsz
data = data.narrow(0, 0, nbatch * bsz)
data = data.view(bsz, -1).t().contiguous()
return data.to(device)
def get_batch(source, i, bptt):
seq_len = min(bptt, len(source) - 1 - i)
data = source[i : i + seq_len]
target = source[i + 1 : i + 1 + seq_len].view(-1)
return data, target
def make_model(args, device, ntokens):
ninp = 2048
nhid = 2048
nhead = 32
dropout = 0
initrange = 0.1
ndecoder = args.num_decoder_layers
if args.lazy_construction:
layers = [
LazyModule(lambda: EmbeddingLayer(ntokens, ninp, initrange)),
LazyModule(lambda: PositionalEncodingLayer(ninp, dropout)),
]
for _ in range(ndecoder):
layers.append(LazyModule(lambda: TransformerDecoderLayer(ninp, nhead, nhid, dropout)))
layers.append(LazyModule(lambda: LinearLayer(ninp, ntokens, initrange)))
model = layers
else:
model = TransformerLMSequential(ntokens, ninp, nhead, nhid, dropout, initrange, ndecoder).to(device)
criterion = nn.CrossEntropyLoss()
lr = 0.01
def make_adam(model):
if args.ddp_zero:
return OSS(params=model.parameters(), optim=Adam, group=get_data_parallel_group(), lr=lr)
else:
return Adam(model.parameters(), lr=lr)
optimizer = make_adam
scaler = GradScaler()
return model, criterion, optimizer, scaler
def get_tensors_by_size_bucket():
from collections import defaultdict
import gc
size_buckets = defaultdict(int)
for obj in gc.get_objects():
if not isinstance(obj, torch.Tensor):
continue
if obj.device.type == "cuda":
size_buckets[(*obj.size(),) + (obj.element_size(),)] += 1
return size_buckets
def dump_size_buckets(size_buckets, prefix=""):
from functools import reduce
import operator
total = 0
for key, value in size_buckets.items():
this = reduce(operator.mul, key) * value
total += this
print(prefix + f"{key} : {value}, {this}")
print(prefix + f"total = {total}")
last_size_buckets = None
once = True
def safe_rank():
try:
return torch.distributed.get_rank()
except AssertionError:
return 0
def check_size_buckets():
global last_size_buckets
global once
size_buckets = get_tensors_by_size_bucket()
if last_size_buckets is not None:
if size_buckets != last_size_buckets:
print(f"difference is oustanding tensors: {safe-rank()}")
dump_size_buckets(last_size_buckets, "old: ")
dump_size_buckets(size_buckets, "new: ")
if once:
print(f"dumping buckets for: {safe_rank()}")
dump_size_buckets(last_size_buckets, "old: ")
dump_size_buckets(size_buckets, "new: ")
once = False
else:
print(f"size buckets none on {safe_rank()}")
last_size_buckets = size_buckets
def dump_cuda_tensors():
print(f"dumping cuda tensors...")
from functools import reduce
import gc
import operator
for obj in gc.get_objects():
if not isinstance(obj, torch.Tensor):
continue
if obj.device.type == "cuda":
size_buckets[(*obj.size(),) + (obj.element_size(),)] += 1
print(f"outstanding cuda tensors:")
total = 0
for key, value in size_buckets.items():
this = reduce(operator.mul, key) * value
total += this
print(f"{key} : {value}, {this}")
print(f"total size = {total}")
import pprint
pprint.pprint(torch.cuda.memory_stats())
def train(lm_dataloader, model, criterion, optimizer, vocab_size, args):
model.train()
from functools import reduce
import operator
num_params = reduce(operator.add, (reduce(operator.mul, x.size()) for x in model.parameters()))
if model.group:
total = torch.Tensor([num_params])
if torch.cuda.is_available():
total = total.cuda()
torch.distributed.all_reduce(total, group=model.group)
logging.info(
f"training model, #prams = {num_params}, group: {model.group.rank()}, grank:"
f" {torch.distributed.get_rank()}, sizes {model.group.size()}"
)
torch.distributed.barrier()
if model.group.rank() == 0:
logging.info(f"total #prams = {total.item()}")
else:
logging.info(f"training model, #prams = {num_params}")
vocab_size = 10000
total_loss = 0.0
start_time = time.time()
word_counter = 0
optimizer = optimizer(model)
def get_first_device(model):
if isinstance(model, DDP):
model = model.module
if not torch.cuda.is_available():
return torch.device("cpu")
if model.devices:
return model.devices[0]
else:
return torch.cuda.current_device()
def get_last_device(model):
if isinstance(model, DDP):
model = model.module
if not torch.cuda.is_available():
return torch.device("cpu")
if model.devices:
return model.devices[-1]
else:
return torch.cuda.current_device()
pipe_group = model.group
if args.ddp_zero:
model = DDP(
model,
device_ids=[torch.cuda.current_device()],
process_group=get_data_parallel_group(),
find_unused_parameters=False,
)
if pipe_group and pipe_group.rank() != 0 and pipe_group.rank() != (pipe_group.size() - 1):
thing = {"input": torch.zeros(args.batch_size)}
class FakeDataset:
def __getitem__(self, index):
return thing
def __len__(self):
return len(lm_dataloader)
lm_dataloader = FakeDataset()
for i, batch in enumerate(lm_dataloader):
bi = batch["input"]
if args.max_batch and i > args.max_batch:
break
optimizer.zero_grad()
try:
if (pipe_group is None or pipe_group.rank() == 0) and not args.ddp_zero:
tmp = batch["input"].to(get_first_device(model))
output = model(tmp)
else:
output = model(batch["input"])
except Exception as e:
raise RuntimeError(f"training failed on {torch.distributed.get_rank()}") from e
if pipe_group is None or pipe_group.rank() == pipe_group.size() - 1:
target = batch["target"].to(get_last_device(model))
output = output.to(target.device)
loss = criterion(output.view(-1, vocab_size), target.view(-1))
if args.ddp_zero:
ddp_group = get_data_parallel_group()
torch.distributed.all_reduce(loss, op=torch.distributed.ReduceOp.SUM, group=ddp_group)
loss /= ddp_group.size()
loss.backward()
del target
else:
if args.ddp_zero:
model.module.back_helper(output)
else:
model.back_helper(output)
del output
torch.nn.utils.clip_grad_value_(model.parameters(), 0.05)
optimizer.step()
if pipe_group is None or pipe_group.rank() == pipe_group.size() - 1:
total_loss += loss.item()
log_interval = 1
word_counter += batch["ntokens"]
if i % log_interval == 0 and i > 0:
cur_loss = total_loss / log_interval
elapsed = time.time() - start_time
print(
"| batch {:5d} | wps {:5.2f} | loss {:5.2f} | ppl {:8.2f}".format(
i, word_counter / elapsed, cur_loss, math.exp(cur_loss)
)
)
word_counter = 0
total_loss = 0
start_time = time.time()
def evaluate(eval_model, data_source, criterion, bptt, ntokens):
eval_model.eval()
total_loss = 0.0
with torch.no_grad():
for i in range(0, data_source.size(0) - 1, bptt):
data, targets = get_batch(data_source, i, bptt)
output = eval_model(data)
output = output.to(targets.device)
output_flat = output.view(-1, ntokens)
total_loss += len(data) * criterion(output_flat, targets).item()
return total_loss / (len(data_source) - 1)
def get_number_of_words(data):
return data.size()[0] * data.size()[1]
def benchmark_language_model(train_data, val_data, test_data, model, criterion, optimizer, ntokens, args):
epoch = 1
bptt = 35
start_time = time.time()
print("-" * 110)
print("| start of epoch {:1d}".format(epoch))
print("-" * 110)
epoch_start_time = time.time()
train(train_data, model, criterion, optimizer, bptt, ntokens, args)
val_loss = 1
print("-" * 89)
print(
"| end of epoch {:1d} | time: {:5.2f}s | valid loss {:5.2f} ".format(
epoch, (time.time() - epoch_start_time), val_loss
)
)
print("-" * 110)
elapsed_time = time.time() - start_time
nwords = get_number_of_words(train_data) + get_number_of_words(val_data)
wps = nwords / elapsed_time
test_loss = 1
print("=" * 89)
print(
"| end of training | test loss {:5.2f} \n| time: {:5.2f}s | words: {:3d} | wps: {:5.2f}".format(
test_loss, elapsed_time, nwords, wps
)
)
print("=" * 110)
if can_benchmark and len(model.balance) == 4:
assert wps > 36954.4 - (3 * 116.825)
print("Peak allocated bytes on cuda:0: {:1d}".format(torch.cuda.memory_stats(0)["allocated_bytes.all.peak"]))
print("Peak allocated bytes on cuda:1: {:1d}".format(torch.cuda.memory_stats(1)["allocated_bytes.all.peak"]))
print("Peak allocated bytes on cuda:2: {:1d}".format(torch.cuda.memory_stats(2)["allocated_bytes.all.peak"]))
print("Peak allocated bytes on cuda:3: {:1d}".format(torch.cuda.memory_stats(3)["allocated_bytes.all.peak"]))
assert torch.cuda.memory_stats(0)["allocated_bytes.all.peak"] < 4061909504 * 1.1
assert torch.cuda.memory_stats(1)["allocated_bytes.all.peak"] < 4050944 * 1.1
assert torch.cuda.memory_stats(2)["allocated_bytes.all.peak"] < 10427392 * 1.1
assert torch.cuda.memory_stats(3)["allocated_bytes.all.peak"] < 2031824896 * 1.1
print("No regression detected")
def generate_balance_weighted(num_devices, num_layers, fraction=0.5):
balance = []
layers_assigned = 0
average_count = num_layers / num_devices
last_layers = int(average_count * fraction)
balance = generate_balance(num_devices - 1, num_layers - last_layers)
balance.append(last_layers)
return balance
def generate_balance(num_devices, num_layers):
balance = []
layers_assigned = 0
for i in range(num_devices):
x = (num_layers - layers_assigned) / (num_devices - i)
if x.is_integer():
balance.append(int(x))
layers_assigned += x
else:
balance.append(math.ceil(x))
layers_assigned += math.ceil(x)
return balance
def make_model_and_data(args, device, new_data: bool = True):
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
if new_data:
vocab_size = 10000
model, criterion, optimizer, scaler = make_model(args, device, vocab_size)
lm_dataset = BenchmarkLMDataset()
lm_dataloader = DataLoader(
lm_dataset, batch_size=args.batch_size, shuffle=True, num_workers=0, collate_fn=collate_sentences_lm
)
return {
"model": model,
"criterion": criterion,
"optimizer": optimizer,
"data": lm_dataloader,
"vocab_size": vocab_size,
}
else:
data = get_data(device)
ntokens, train_data, val_data, test_data = data
model, criterion, optimizer, scaler = make_model(args, device, ntokens)
return {
"model": model,
"criterion": criterion,
"optimizer": optimizer,
"data": data,
}
def bench_single_process(args):
num_devices = torch.cuda.device_count() if torch.cuda.is_available() else 1
assert num_devices > 0
init_random_seed(0)
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
new_data = True
blob = make_model_and_data(args, None, new_data=new_data)
model = blob["model"]
balance = generate_balance(min(num_devices, 4), len(model))
p = pipe.Pipe(
model, balance, chunks=args.chunks, pipelined_backward=args.pipelined_backward, checkpoint=args.checkpoint
)
del model
del blob["model"]
if new_data:
train(blob["data"], p, blob["criterion"], blob["optimizer"], blob["vocab_size"], args)
else:
ntokens, train_data, val_data, test_data = blob["data"]
benchmark_language_model(train_data, val_data, test_data, p, criterion, optimizer, ntokens, args)
def run_mp_worker(args, available_workers):
new_data = True
blob = make_model_and_data(args, None, new_data=new_data)
model = blob["model"]
balance = generate_balance_weighted(get_pipeline_parallel_group().size(), len(model), 0.8)
p = pipe.Pipe(
model,
balance,
style=Pipe.AsyncSchedule,
chunks=args.chunks,
worker_map=get_worker_map(),
input_device=torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu"),
pipelined_backward=args.pipelined_backward,
checkpoint=args.checkpoint,
)
if torch.cuda.is_available():
p = p.cuda()
if args.all_at_once and p.pipeline:
print(f"running all at once")
p.pipeline.all_at_once = True
if new_data:
train(blob["data"], p, blob["criterion"], blob["optimizer"], blob["vocab_size"], args)
else:
ntokens, train_data, val_data, test_data = blob["data"]
benchmark_language_model(train_data, val_data, test_data, p, criterion, optimizer, ntokens, args)
def run_worker(rank, world_size, args):
if args.world_size != 0:
world_size = args.world_size
dist_init(rank + args.rank_base, world_size, hostname=args.host)
initialize_model_parallel(1, world_size)
init_random_seed(0)
run_mp_worker(args, world_size)
rpc.shutdown()
torch.distributed.destroy_process_group()
def bench_multi_process(args, all_at_once=False):
if args.local_world_size != 0:
world_size = args.local_world_size
else:
world_size = min(torch.cuda.device_count(), 2)
mp.spawn(run_worker, args=(world_size, args), nprocs=world_size, join=True)
best_device_map = {
0: "mlx5_0:1",
1: "mlx5_0:1",
2: "mlx5_1:1",
3: "mlx5_1:1",
4: "mlx5_2:1",
5: "mlx5_2:1",
6: "mlx5_3:1",
7: "mlx5_3:1",
}
def bench_mpi(args):
guess_rank = int(os.environ["OMPI_COMM_WORLD_RANK"])
world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"])
local_rank = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"])
os.environ["UCX_NET_DEVICES"] = best_device_map[local_rank]
os.environ["MASTER_ADDR"] = args.host
os.environ["MASTER_PORT"] = "10638"
if args.socket_name:
os.environ["GLOO_SOCKET_IFNAME"] = args.socket_name
os.environ["TP_SOCKET_IFNAME"] = args.socket_name
torch.distributed.init_process_group(backend="gloo", rank=guess_rank, world_size=world_size)
os.environ["MASTER_ADDR"] = args.host
os.environ["MASTER_PORT"] = "10639"
init_method = f"tcp://{os.environ['MASTER_ADDR']}:{os.environ['MASTER_PORT']}"
rank = torch.distributed.get_rank()
world_size = torch.distributed.get_world_size()
torch.cuda.set_device(local_rank % torch.cuda.device_count())
rpc.init_rpc(
f"Test{rank}",
rank=rank,
world_size=world_size,
backend=rpc.BackendType.PROCESS_GROUP,
rpc_backend_options=rpc.ProcessGroupRpcBackendOptions(rpc_timeout=20, init_method=init_method),
)
backends = {"model_parallel_backend": "nccl", "pipeline_backend": "mpi", "ddp_backend": "nccl"}
if args.ddp_zero:
initialize_model_parallel(1, 4, **backends)
else:
initialize_model_parallel(1, world_size, **backends)
init_random_seed(0)
run_mp_worker(args, world_size)
rpc.shutdown()
torch.distributed.destroy_process_group()
parser = argparse.ArgumentParser(description="benchmark")
parser.add_argument("--local-world-size", "-l", type=int, default=0, help="local world size")
parser.add_argument("--world-size", "-w", type=int, default=0, help="world size")
parser.add_argument("--rank-base", "-r", type=int, help="rank base", default=0)
parser.add_argument("--host", "-o", type=str, default="localhost", help="hostname")
parser.add_argument("--no-mpi", action="store_true", default=False, help="disable mpi")
parser.add_argument("--chunks", type=int, default=1, help="number of microbatches per batch")
parser.add_argument("--batch-size", type=int, default=8, help="size of a batch")
parser.add_argument("--all-at-once", action="store_true", default=False, help="do backward pass on whole batch at once")
parser.add_argument("--max-batch", type=int, default=4, help="Max number of batches")
parser.add_argument("--socket-name", type=str, default=None, help="socket ifname for gloo/tp")
parser.add_argument("--num-decoder-layers", type=int, default=10, help="Number of decoder layers in the model")
parser.add_argument("--ddp-zero", action="store_true", default=False, help="enable ddp")
parser.add_argument(
"--lazy-construction", action="store_true", default=False, help="Number of decoder layers in the model"
)
parser.add_argument(
"--checkpoint", default="never", choices=["always", "except_last", "never"], help="Checkpointing strategy for pipe"
)
parser.add_argument(
"--pipelined-backward", dest="pipelined_backward", action="store_true", help="Pipelined backward pass"
)
parser.add_argument(
"--no-pipelined-backward", dest="pipelined_backward", action="store_false", help="Pipelined backward pass"
)
parser.set_defaults(pipelined_backward=True)
if __name__ == "__main__":
args = parser.parse_args()
if args.no_mpi or "OMPI_COMM_WORLD_RANK" not in os.environ:
print(f"Running benchmark with args: {args}")
bench_single_process(args)
else:
if os.environ["OMPI_COMM_WORLD_RANK"] == "0":
print(f"Running benchmark with args: {args}")
bench_mpi(args)
| true | true |
f71bbb4c268959f437ff99244e9ab109af82e1a0 | 1,774 | py | Python | jina/drivers/querylang/slice.py | kaushikb11/jina | 2856809cc5acb16b5a3e76e5c14af0c7b2346d09 | [
"Apache-2.0"
] | null | null | null | jina/drivers/querylang/slice.py | kaushikb11/jina | 2856809cc5acb16b5a3e76e5c14af0c7b2346d09 | [
"Apache-2.0"
] | null | null | null | jina/drivers/querylang/slice.py | kaushikb11/jina | 2856809cc5acb16b5a3e76e5c14af0c7b2346d09 | [
"Apache-2.0"
] | null | null | null | __copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
from typing import Iterable
from .. import QuerySetReader, BaseRecursiveDriver
if False:
from ...proto import jina_pb2
class SliceQL(QuerySetReader, BaseRecursiveDriver):
"""Restrict the size of the ``docs`` to ``k`` (given by the request)
Example::
- !ReduceAllDriver
with:
granularity_range: [0, 0]
adjacency_range: [0, 1]
- !SortQL
with:
reverse: true
field: 'score.value'
granularity_range: [0, 0]
adjacency_range: [0, 1]
- !SliceQL
with:
start: 0
end: 50
granularity_range: [0, 0]
adjacency_range: [0, 1]
`SliceQL` will ensure that only the first 50 documents are returned from this `Pod`
"""
def __init__(self, start: int, end: int = None, *args, **kwargs):
"""
:param start: Zero-based index at which to start extraction.
:param end: Zero-based index before which to end extraction.
slice extracts up to but not including end. For example, take(1,4) extracts
the second element through the fourth element (elements indexed 1, 2, and 3).
"""
super().__init__(*args, **kwargs)
self._start = int(start)
self._end = int(end)
self.is_apply = False
def _apply_all(self, docs: Iterable['jina_pb2.Document'], *args, **kwargs):
if self.start <= 0 and (self.end is None or self.end >= len(docs)):
pass
else:
del docs[int(self.end):]
del docs[:int(self.start)]
| 32.254545 | 93 | 0.559752 | __copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
from typing import Iterable
from .. import QuerySetReader, BaseRecursiveDriver
if False:
from ...proto import jina_pb2
class SliceQL(QuerySetReader, BaseRecursiveDriver):
def __init__(self, start: int, end: int = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self._start = int(start)
self._end = int(end)
self.is_apply = False
def _apply_all(self, docs: Iterable['jina_pb2.Document'], *args, **kwargs):
if self.start <= 0 and (self.end is None or self.end >= len(docs)):
pass
else:
del docs[int(self.end):]
del docs[:int(self.start)]
| true | true |
f71bbc336e9b9e428d917d26e1d743874331530d | 9,514 | py | Python | build/PureCloudPlatformClientV2/models/outbound_route_base_entity_listing.py | cjohnson-ctl/platform-client-sdk-python | 38ce53bb8012b66e8a43cc8bd6ff00cf6cc99100 | [
"MIT"
] | 1 | 2021-10-08T20:46:45.000Z | 2021-10-08T20:46:45.000Z | libs/PureCloudPlatformClientV2/models/outbound_route_base_entity_listing.py | rocketbot-cl/genesysCloud | dd9d9b5ebb90a82bab98c0d88b9585c22c91f333 | [
"MIT"
] | null | null | null | libs/PureCloudPlatformClientV2/models/outbound_route_base_entity_listing.py | rocketbot-cl/genesysCloud | dd9d9b5ebb90a82bab98c0d88b9585c22c91f333 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class OutboundRouteBaseEntityListing(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
OutboundRouteBaseEntityListing - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'entities': 'list[OutboundRouteBase]',
'page_size': 'int',
'page_number': 'int',
'total': 'int',
'first_uri': 'str',
'self_uri': 'str',
'next_uri': 'str',
'last_uri': 'str',
'previous_uri': 'str',
'page_count': 'int'
}
self.attribute_map = {
'entities': 'entities',
'page_size': 'pageSize',
'page_number': 'pageNumber',
'total': 'total',
'first_uri': 'firstUri',
'self_uri': 'selfUri',
'next_uri': 'nextUri',
'last_uri': 'lastUri',
'previous_uri': 'previousUri',
'page_count': 'pageCount'
}
self._entities = None
self._page_size = None
self._page_number = None
self._total = None
self._first_uri = None
self._self_uri = None
self._next_uri = None
self._last_uri = None
self._previous_uri = None
self._page_count = None
@property
def entities(self):
"""
Gets the entities of this OutboundRouteBaseEntityListing.
:return: The entities of this OutboundRouteBaseEntityListing.
:rtype: list[OutboundRouteBase]
"""
return self._entities
@entities.setter
def entities(self, entities):
"""
Sets the entities of this OutboundRouteBaseEntityListing.
:param entities: The entities of this OutboundRouteBaseEntityListing.
:type: list[OutboundRouteBase]
"""
self._entities = entities
@property
def page_size(self):
"""
Gets the page_size of this OutboundRouteBaseEntityListing.
:return: The page_size of this OutboundRouteBaseEntityListing.
:rtype: int
"""
return self._page_size
@page_size.setter
def page_size(self, page_size):
"""
Sets the page_size of this OutboundRouteBaseEntityListing.
:param page_size: The page_size of this OutboundRouteBaseEntityListing.
:type: int
"""
self._page_size = page_size
@property
def page_number(self):
"""
Gets the page_number of this OutboundRouteBaseEntityListing.
:return: The page_number of this OutboundRouteBaseEntityListing.
:rtype: int
"""
return self._page_number
@page_number.setter
def page_number(self, page_number):
"""
Sets the page_number of this OutboundRouteBaseEntityListing.
:param page_number: The page_number of this OutboundRouteBaseEntityListing.
:type: int
"""
self._page_number = page_number
@property
def total(self):
"""
Gets the total of this OutboundRouteBaseEntityListing.
:return: The total of this OutboundRouteBaseEntityListing.
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""
Sets the total of this OutboundRouteBaseEntityListing.
:param total: The total of this OutboundRouteBaseEntityListing.
:type: int
"""
self._total = total
@property
def first_uri(self):
"""
Gets the first_uri of this OutboundRouteBaseEntityListing.
:return: The first_uri of this OutboundRouteBaseEntityListing.
:rtype: str
"""
return self._first_uri
@first_uri.setter
def first_uri(self, first_uri):
"""
Sets the first_uri of this OutboundRouteBaseEntityListing.
:param first_uri: The first_uri of this OutboundRouteBaseEntityListing.
:type: str
"""
self._first_uri = first_uri
@property
def self_uri(self):
"""
Gets the self_uri of this OutboundRouteBaseEntityListing.
:return: The self_uri of this OutboundRouteBaseEntityListing.
:rtype: str
"""
return self._self_uri
@self_uri.setter
def self_uri(self, self_uri):
"""
Sets the self_uri of this OutboundRouteBaseEntityListing.
:param self_uri: The self_uri of this OutboundRouteBaseEntityListing.
:type: str
"""
self._self_uri = self_uri
@property
def next_uri(self):
"""
Gets the next_uri of this OutboundRouteBaseEntityListing.
:return: The next_uri of this OutboundRouteBaseEntityListing.
:rtype: str
"""
return self._next_uri
@next_uri.setter
def next_uri(self, next_uri):
"""
Sets the next_uri of this OutboundRouteBaseEntityListing.
:param next_uri: The next_uri of this OutboundRouteBaseEntityListing.
:type: str
"""
self._next_uri = next_uri
@property
def last_uri(self):
"""
Gets the last_uri of this OutboundRouteBaseEntityListing.
:return: The last_uri of this OutboundRouteBaseEntityListing.
:rtype: str
"""
return self._last_uri
@last_uri.setter
def last_uri(self, last_uri):
"""
Sets the last_uri of this OutboundRouteBaseEntityListing.
:param last_uri: The last_uri of this OutboundRouteBaseEntityListing.
:type: str
"""
self._last_uri = last_uri
@property
def previous_uri(self):
"""
Gets the previous_uri of this OutboundRouteBaseEntityListing.
:return: The previous_uri of this OutboundRouteBaseEntityListing.
:rtype: str
"""
return self._previous_uri
@previous_uri.setter
def previous_uri(self, previous_uri):
"""
Sets the previous_uri of this OutboundRouteBaseEntityListing.
:param previous_uri: The previous_uri of this OutboundRouteBaseEntityListing.
:type: str
"""
self._previous_uri = previous_uri
@property
def page_count(self):
"""
Gets the page_count of this OutboundRouteBaseEntityListing.
:return: The page_count of this OutboundRouteBaseEntityListing.
:rtype: int
"""
return self._page_count
@page_count.setter
def page_count(self, page_count):
"""
Sets the page_count of this OutboundRouteBaseEntityListing.
:param page_count: The page_count of this OutboundRouteBaseEntityListing.
:type: int
"""
self._page_count = page_count
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""
Returns the model as raw JSON
"""
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 26.065753 | 85 | 0.590814 |
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class OutboundRouteBaseEntityListing(object):
def __init__(self):
self.swagger_types = {
'entities': 'list[OutboundRouteBase]',
'page_size': 'int',
'page_number': 'int',
'total': 'int',
'first_uri': 'str',
'self_uri': 'str',
'next_uri': 'str',
'last_uri': 'str',
'previous_uri': 'str',
'page_count': 'int'
}
self.attribute_map = {
'entities': 'entities',
'page_size': 'pageSize',
'page_number': 'pageNumber',
'total': 'total',
'first_uri': 'firstUri',
'self_uri': 'selfUri',
'next_uri': 'nextUri',
'last_uri': 'lastUri',
'previous_uri': 'previousUri',
'page_count': 'pageCount'
}
self._entities = None
self._page_size = None
self._page_number = None
self._total = None
self._first_uri = None
self._self_uri = None
self._next_uri = None
self._last_uri = None
self._previous_uri = None
self._page_count = None
@property
def entities(self):
return self._entities
@entities.setter
def entities(self, entities):
self._entities = entities
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, page_size):
self._page_size = page_size
@property
def page_number(self):
return self._page_number
@page_number.setter
def page_number(self, page_number):
self._page_number = page_number
@property
def total(self):
return self._total
@total.setter
def total(self, total):
self._total = total
@property
def first_uri(self):
return self._first_uri
@first_uri.setter
def first_uri(self, first_uri):
self._first_uri = first_uri
@property
def self_uri(self):
return self._self_uri
@self_uri.setter
def self_uri(self, self_uri):
self._self_uri = self_uri
@property
def next_uri(self):
return self._next_uri
@next_uri.setter
def next_uri(self, next_uri):
self._next_uri = next_uri
@property
def last_uri(self):
return self._last_uri
@last_uri.setter
def last_uri(self, last_uri):
self._last_uri = last_uri
@property
def previous_uri(self):
return self._previous_uri
@previous_uri.setter
def previous_uri(self, previous_uri):
self._previous_uri = previous_uri
@property
def page_count(self):
return self._page_count
@page_count.setter
def page_count(self, page_count):
self._page_count = page_count
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f71bbce66ac31c7a95044b6cdf45b22688c6321e | 10,895 | py | Python | salt/transport/road/raet/behaving.py | thinkbox/salt | 35c7c52a89eb98d7a1b6b2c59596d4fe19ab6db3 | [
"Apache-2.0"
] | 1 | 2019-06-27T13:03:07.000Z | 2019-06-27T13:03:07.000Z | salt/transport/road/raet/behaving.py | thinkbox/salt | 35c7c52a89eb98d7a1b6b2c59596d4fe19ab6db3 | [
"Apache-2.0"
] | null | null | null | salt/transport/road/raet/behaving.py | thinkbox/salt | 35c7c52a89eb98d7a1b6b2c59596d4fe19ab6db3 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
behaving.py raet ioflo behaviors
See raeting.py for data format and packet field details.
Layout in DataStore
raet.udp.stack.stack
value StackUdp
raet.udp.stack.txmsgs
value deque()
raet.udp.stack.rxmsgs
value deque()
raet.udp.stack.local
name host port sigkey prikey
raet.udp.stack.status
joined allowed idle
raet.udp.stack.destination
value deid
'''
# pylint: skip-file
# pylint: disable=W0611
# Import Python libs
from collections import deque
try:
import simplejson as json
except ImportError:
import json
# Import ioflo libs
from ioflo.base.odicting import odict
from ioflo.base.globaling import *
from ioflo.base import aiding
from ioflo.base import storing
from ioflo.base import deeding
from ioflo.base.consoling import getConsole
console = getConsole()
from . import raeting, packeting, estating, yarding, stacking
class StackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
StackUdpRaet initialize and run raet udp stack
'''
Ioinits = odict(
inode="raet.udp.stack.",
stack='stack',
txmsgs=odict(ipath='txmsgs', ival=deque()),
rxmsgs=odict(ipath='rxmsgs', ival=deque()),
local=odict(ipath='local', ival=odict( name='master',
eid=0,
host='0.0.0.0',
port=raeting.RAET_PORT,
sigkey=None,
prikey=None)),)
def postinitio(self):
'''
Setup stack instance
'''
sigkey = self.local.data.sigkey
prikey = self.local.data.prikey
ha = (self.local.data.host, self.local.data.port)
name = self.local.data.name
eid = self.local.data.eid
estate = estating.LocalEstate( eid=eid,
name=name,
ha=ha,
sigkey=sigkey,
prikey=prikey,)
txMsgs = self.txmsgs.value
rxMsgs = self.rxmsgs.value
self.stack.value = stacking.StackUdp(estate=estate,
store=self.store,
name=name,
txMsgs=txMsgs,
rxMsgs=rxMsgs, )
def action(self, **kwa):
'''
Service all the deques for the stack
'''
self.stack.value.serviceAll()
class CloserStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
CloserStackUdpRaet closes stack server socket connection
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack', )
def action(self, **kwa):
'''
Close udp socket
'''
if self.stack.value and isinstance(self.stack.value, stacking.StackUdp):
self.stack.value.serverUdp.close()
class JoinerStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Initiates join transaction with master
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',)
def action(self, **kwa):
'''
'''
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUdp):
stack.join()
class JoinedStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Updates status with .joined of zeroth remote estate (master)
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',
status=odict(ipath='status', ival=odict(joined=False,
allowed=False,
idle=False, )))
def action(self, **kwa):
'''
Update .status share
'''
stack = self.stack.value
joined = False
if stack and isinstance(stack, stacking.StackUdp):
if stack.estates:
joined = stack.estates.values()[0].joined
self.status.update(joined=joined)
class AllowerStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Initiates allow (CurveCP handshake) transaction with master
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack', )
def action(self, **kwa):
'''
Receive any udp packets on server socket and put in rxes
Send any packets in txes
'''
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUdp):
stack.allow()
return None
class AllowedStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Updates status with .allowed of zeroth remote estate (master)
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',
status=odict(ipath='status', ival=odict(joined=False,
allowed=False,
idle=False, )))
def action(self, **kwa):
'''
Update .status share
'''
stack = self.stack.value
allowed = False
if stack and isinstance(stack, stacking.StackUdp):
if stack.estates:
allowed = stack.estates.values()[0].allowed
self.status.update(allowed=allowed)
class IdledStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Updates idle status to true if there are no oustanding transactions in stack
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',
status=odict(ipath='status', ival=odict(joined=False,
allowed=False,
idle=False, )))
def action(self, **kwa):
'''
Update .status share
'''
stack = self.stack.value
idled = False
if stack and isinstance(stack, stacking.StackUdp):
if not stack.transactions:
idled = True
self.status.update(idled=idled)
class MessengerStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Puts message on txMsgs deque sent to deid
Message is composed fields that are parameters to action method
and is sent to remote estate deid
'''
Ioinits = odict(
inode=".raet.udp.stack.",
stack="stack",
destination="destination",)
def action(self, **kwa):
'''
Queue up message
'''
if kwa:
msg = odict(kwa)
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUdp):
deid = self.destination.value
stack.txMsg(msg=msg, deid=deid)
class PrinterStackUdpRaet(deeding.Deed): # pylint: disable=W0232
'''
Prints out messages on rxMsgs queue
'''
Ioinits = odict(
inode=".raet.udp.stack.",
rxmsgs=odict(ipath='rxmsgs', ival=deque()),)
def action(self, **kwa):
'''
Queue up message
'''
rxMsgs = self.rxmsgs.value
while rxMsgs:
msg = rxMsgs.popleft()
console.terse("\nReceived....\n{0}\n".format(msg))
class StackUxdRaet(deeding.Deed): # pylint: disable=W0232
'''
StackUxdRaet initialize and run raet uxd stack
'''
Ioinits = odict(
inode="raet.uxd.stack.",
stack='stack',
txmsgs=odict(ipath='txmsgs', ival=deque()),
rxmsgs=odict(ipath='rxmsgs', ival=deque()),
local=odict(ipath='local', ival=odict(name='minion',
yardname="",
lane="maple")),)
def postinitio(self):
'''
Setup stack instance
'''
name = self.local.data.name
yardname = self.local.data.yardname
lane = self.local.data.lane
txMsgs = self.txmsgs.value
rxMsgs = self.rxmsgs.value
self.stack.value = stacking.StackUxd(
store=self.store,
name=name,
yardname=yardname,
lanename=lane,
txMsgs=txMsgs,
rxMsgs=rxMsgs, )
def action(self, **kwa):
'''
Service all the deques for the stack
'''
self.stack.value.serviceAll()
class CloserStackUxdRaet(deeding.Deed): # pylint: disable=W0232
'''
CloserStackUxdRaet closes stack server socket connection
'''
Ioinits = odict(
inode=".raet.uxd.stack.",
stack='stack',)
def action(self, **kwa):
'''
Close uxd socket
'''
if self.stack.value and isinstance(self.stack.value, stacking.StackUxd):
self.stack.value.serverUxd.close()
class AddYardStackUxdRaet(deeding.Deed): # pylint: disable=W0232
'''
AddYardStackUxdRaet closes stack server socket connection
'''
Ioinits = odict(
inode=".raet.uxd.stack.",
stack='stack',
yard='yard',
local=odict(ipath='local', ival=odict(name=None, lane="maple")),)
def action(self, lane="lane", name=None, **kwa):
'''
Adds new yard to stack on lane with yid
'''
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUxd):
yard = yarding.Yard(stack=stack, prefix=lane, name=name)
stack.addRemoteYard(yard)
self.yard.value = yard
class TransmitStackUxdRaet(deeding.Deed): # pylint: disable=W0232
'''
Puts message on txMsgs deque sent to deid
Message is composed fields that are parameters to action method
and is sent to remote estate deid
'''
Ioinits = odict(
inode=".raet.uxd.stack.",
stack="stack",
dest="dest",)
def action(self, **kwa):
'''
Queue up message
'''
if kwa:
msg = odict(kwa)
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUxd):
name = self.dest.value #destination yard name
stack.transmit(msg=msg, name=name)
class PrinterStackUxdRaet(deeding.Deed): # pylint: disable=W0232
'''
Prints out messages on rxMsgs queue
'''
Ioinits = odict(
inode=".raet.uxd.stack.",
stack="stack",
rxmsgs=odict(ipath='rxmsgs', ival=deque()),)
def action(self, **kwa):
'''
Queue up message
'''
rxMsgs = self.rxmsgs.value
stack = self.stack.value
while rxMsgs:
msg = rxMsgs.popleft()
console.terse("\n{0} Received....\n{1}\n".format(stack.name, msg))
| 29.76776 | 80 | 0.536485 |
from collections import deque
try:
import simplejson as json
except ImportError:
import json
from ioflo.base.odicting import odict
from ioflo.base.globaling import *
from ioflo.base import aiding
from ioflo.base import storing
from ioflo.base import deeding
from ioflo.base.consoling import getConsole
console = getConsole()
from . import raeting, packeting, estating, yarding, stacking
class StackUdpRaet(deeding.Deed):
Ioinits = odict(
inode="raet.udp.stack.",
stack='stack',
txmsgs=odict(ipath='txmsgs', ival=deque()),
rxmsgs=odict(ipath='rxmsgs', ival=deque()),
local=odict(ipath='local', ival=odict( name='master',
eid=0,
host='0.0.0.0',
port=raeting.RAET_PORT,
sigkey=None,
prikey=None)),)
def postinitio(self):
sigkey = self.local.data.sigkey
prikey = self.local.data.prikey
ha = (self.local.data.host, self.local.data.port)
name = self.local.data.name
eid = self.local.data.eid
estate = estating.LocalEstate( eid=eid,
name=name,
ha=ha,
sigkey=sigkey,
prikey=prikey,)
txMsgs = self.txmsgs.value
rxMsgs = self.rxmsgs.value
self.stack.value = stacking.StackUdp(estate=estate,
store=self.store,
name=name,
txMsgs=txMsgs,
rxMsgs=rxMsgs, )
def action(self, **kwa):
self.stack.value.serviceAll()
class CloserStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack', )
def action(self, **kwa):
if self.stack.value and isinstance(self.stack.value, stacking.StackUdp):
self.stack.value.serverUdp.close()
class JoinerStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',)
def action(self, **kwa):
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUdp):
stack.join()
class JoinedStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',
status=odict(ipath='status', ival=odict(joined=False,
allowed=False,
idle=False, )))
def action(self, **kwa):
stack = self.stack.value
joined = False
if stack and isinstance(stack, stacking.StackUdp):
if stack.estates:
joined = stack.estates.values()[0].joined
self.status.update(joined=joined)
class AllowerStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack', )
def action(self, **kwa):
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUdp):
stack.allow()
return None
class AllowedStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',
status=odict(ipath='status', ival=odict(joined=False,
allowed=False,
idle=False, )))
def action(self, **kwa):
stack = self.stack.value
allowed = False
if stack and isinstance(stack, stacking.StackUdp):
if stack.estates:
allowed = stack.estates.values()[0].allowed
self.status.update(allowed=allowed)
class IdledStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack='stack',
status=odict(ipath='status', ival=odict(joined=False,
allowed=False,
idle=False, )))
def action(self, **kwa):
stack = self.stack.value
idled = False
if stack and isinstance(stack, stacking.StackUdp):
if not stack.transactions:
idled = True
self.status.update(idled=idled)
class MessengerStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
stack="stack",
destination="destination",)
def action(self, **kwa):
if kwa:
msg = odict(kwa)
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUdp):
deid = self.destination.value
stack.txMsg(msg=msg, deid=deid)
class PrinterStackUdpRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.udp.stack.",
rxmsgs=odict(ipath='rxmsgs', ival=deque()),)
def action(self, **kwa):
rxMsgs = self.rxmsgs.value
while rxMsgs:
msg = rxMsgs.popleft()
console.terse("\nReceived....\n{0}\n".format(msg))
class StackUxdRaet(deeding.Deed):
Ioinits = odict(
inode="raet.uxd.stack.",
stack='stack',
txmsgs=odict(ipath='txmsgs', ival=deque()),
rxmsgs=odict(ipath='rxmsgs', ival=deque()),
local=odict(ipath='local', ival=odict(name='minion',
yardname="",
lane="maple")),)
def postinitio(self):
name = self.local.data.name
yardname = self.local.data.yardname
lane = self.local.data.lane
txMsgs = self.txmsgs.value
rxMsgs = self.rxmsgs.value
self.stack.value = stacking.StackUxd(
store=self.store,
name=name,
yardname=yardname,
lanename=lane,
txMsgs=txMsgs,
rxMsgs=rxMsgs, )
def action(self, **kwa):
self.stack.value.serviceAll()
class CloserStackUxdRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.uxd.stack.",
stack='stack',)
def action(self, **kwa):
if self.stack.value and isinstance(self.stack.value, stacking.StackUxd):
self.stack.value.serverUxd.close()
class AddYardStackUxdRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.uxd.stack.",
stack='stack',
yard='yard',
local=odict(ipath='local', ival=odict(name=None, lane="maple")),)
def action(self, lane="lane", name=None, **kwa):
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUxd):
yard = yarding.Yard(stack=stack, prefix=lane, name=name)
stack.addRemoteYard(yard)
self.yard.value = yard
class TransmitStackUxdRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.uxd.stack.",
stack="stack",
dest="dest",)
def action(self, **kwa):
if kwa:
msg = odict(kwa)
stack = self.stack.value
if stack and isinstance(stack, stacking.StackUxd):
name = self.dest.value
stack.transmit(msg=msg, name=name)
class PrinterStackUxdRaet(deeding.Deed):
Ioinits = odict(
inode=".raet.uxd.stack.",
stack="stack",
rxmsgs=odict(ipath='rxmsgs', ival=deque()),)
def action(self, **kwa):
rxMsgs = self.rxmsgs.value
stack = self.stack.value
while rxMsgs:
msg = rxMsgs.popleft()
console.terse("\n{0} Received....\n{1}\n".format(stack.name, msg))
| true | true |
f71bbd57b2470bbde34e554ff23f62fc28873ebf | 2,314 | py | Python | loss_functions/loss_oracle.py | konstmish/opt_methods | ae73d9bd89ae5c463e70328d73cbd190175df98c | [
"MIT"
] | 13 | 2020-07-19T12:02:43.000Z | 2022-03-02T14:34:03.000Z | loss_functions/loss_oracle.py | konstmish/opt_methods | ae73d9bd89ae5c463e70328d73cbd190175df98c | [
"MIT"
] | 1 | 2020-12-25T02:05:00.000Z | 2021-01-01T11:24:51.000Z | loss_functions/loss_oracle.py | konstmish/opt_methods | ae73d9bd89ae5c463e70328d73cbd190175df98c | [
"MIT"
] | 2 | 2020-07-17T08:45:48.000Z | 2021-12-10T03:24:57.000Z | import copy
import numpy as np
import warnings
from .regularizer import Regularizer
class Oracle():
"""
Base class for all objectives. Can provide objective values,
gradients and its Hessians as functions that take parameters as input.
Takes as input the values of l1 and l2 regularization.
"""
def __init__(self, l1=0, l2=0, l2_in_prox=False, regularizer=None, seed=42):
if l1 < 0.0:
raise ValueError("Invalid value for l1 regularization: {}".format(l1))
if l2 < 0.0:
raise ValueError("Invalid value for l2 regularization: {}".format(l2))
if l2 == 0. and l2_in_prox:
warnings.warn("The value of l2 is set to 0, so l2_in_prox is changed to False.")
l2_in_prox = False
self.l1 = l1
self.l2 = 0 if l2_in_prox else l2
self.l2_in_prox = l2_in_prox
self.x_opt = None
self.f_opt = np.inf
self.regularizer = regularizer
self.seed = seed
if (l1 > 0 or l2_in_prox) and regularizer is None:
l2_prox = l2 if l2_in_prox else 0
self.regularizer = Regularizer(l1=l1, l2=l2_prox)
self.rng = np.random.default_rng(seed)
self._smoothness = None
self._max_smoothness = None
self._ave_smoothness = None
self._importance_probs = None
self._individ_smoothness = None
def value(self, x):
value = self._value(x)
if self.regularizer is not None:
value += self.regularizer(x)
if value < self.f_opt:
self.x_opt = copy.deepcopy(x)
self.f_opt = value
return value
def gradient(self, x):
pass
def hessian(self, x):
pass
def hess_vec_prod(self, x, v, grad_dif=False, eps=None):
pass
@property
def smoothness(self):
pass
@property
def max_smoothness(self):
pass
@property
def average_smoothness(self):
pass
def batch_smoothness(self, batch_size):
pass
@staticmethod
def norm(x):
pass
@staticmethod
def inner_prod(x, y):
pass
@staticmethod
def outer_prod(x, y):
pass
@staticmethod
def is_equal(x, y):
pass
| 26.295455 | 92 | 0.583838 | import copy
import numpy as np
import warnings
from .regularizer import Regularizer
class Oracle():
def __init__(self, l1=0, l2=0, l2_in_prox=False, regularizer=None, seed=42):
if l1 < 0.0:
raise ValueError("Invalid value for l1 regularization: {}".format(l1))
if l2 < 0.0:
raise ValueError("Invalid value for l2 regularization: {}".format(l2))
if l2 == 0. and l2_in_prox:
warnings.warn("The value of l2 is set to 0, so l2_in_prox is changed to False.")
l2_in_prox = False
self.l1 = l1
self.l2 = 0 if l2_in_prox else l2
self.l2_in_prox = l2_in_prox
self.x_opt = None
self.f_opt = np.inf
self.regularizer = regularizer
self.seed = seed
if (l1 > 0 or l2_in_prox) and regularizer is None:
l2_prox = l2 if l2_in_prox else 0
self.regularizer = Regularizer(l1=l1, l2=l2_prox)
self.rng = np.random.default_rng(seed)
self._smoothness = None
self._max_smoothness = None
self._ave_smoothness = None
self._importance_probs = None
self._individ_smoothness = None
def value(self, x):
value = self._value(x)
if self.regularizer is not None:
value += self.regularizer(x)
if value < self.f_opt:
self.x_opt = copy.deepcopy(x)
self.f_opt = value
return value
def gradient(self, x):
pass
def hessian(self, x):
pass
def hess_vec_prod(self, x, v, grad_dif=False, eps=None):
pass
@property
def smoothness(self):
pass
@property
def max_smoothness(self):
pass
@property
def average_smoothness(self):
pass
def batch_smoothness(self, batch_size):
pass
@staticmethod
def norm(x):
pass
@staticmethod
def inner_prod(x, y):
pass
@staticmethod
def outer_prod(x, y):
pass
@staticmethod
def is_equal(x, y):
pass
| true | true |
f71bbe3d8e47628a35517152c6c8c4ac6e8bf38e | 1,192 | py | Python | setup.py | avictor0826/bigsuds | a4cfd622ab38a3b8fd65cccdcccc117691a7c091 | [
"MIT"
] | 33 | 2015-08-23T21:32:34.000Z | 2019-07-03T16:14:26.000Z | setup.py | avictor0826/bigsuds | a4cfd622ab38a3b8fd65cccdcccc117691a7c091 | [
"MIT"
] | 9 | 2015-09-28T18:22:16.000Z | 2022-03-26T00:22:51.000Z | setup.py | avictor0826/bigsuds | a4cfd622ab38a3b8fd65cccdcccc117691a7c091 | [
"MIT"
] | 22 | 2015-09-25T22:47:28.000Z | 2022-03-28T12:54:59.000Z | from setuptools import setup
import re
def extract_version(filename):
contents = open(filename).read()
match = re.search('^__version__\s+=\s+[\'"](.*)[\'"]\s*$', contents, re.MULTILINE)
if match is not None:
return match.group(1)
setup(
name="bigsuds",
version=extract_version('bigsuds.py'),
description='Library for F5 Networks iControl API',
license='https://devcentral.f5.com/resources/devcentral-eula',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='f5 icontrol',
author='F5 Networks, Inc.',
author_email='devcentral@f5.com',
url='http://devcentral.f5.com',
install_requires=['suds-jurko>=0.6'],
py_modules=['bigsuds'],
test_suite='nose.collector',
tests_require=['nose', 'mock', 'mox'],
)
| 33.111111 | 86 | 0.618289 | from setuptools import setup
import re
def extract_version(filename):
contents = open(filename).read()
match = re.search('^__version__\s+=\s+[\'"](.*)[\'"]\s*$', contents, re.MULTILINE)
if match is not None:
return match.group(1)
setup(
name="bigsuds",
version=extract_version('bigsuds.py'),
description='Library for F5 Networks iControl API',
license='https://devcentral.f5.com/resources/devcentral-eula',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='f5 icontrol',
author='F5 Networks, Inc.',
author_email='devcentral@f5.com',
url='http://devcentral.f5.com',
install_requires=['suds-jurko>=0.6'],
py_modules=['bigsuds'],
test_suite='nose.collector',
tests_require=['nose', 'mock', 'mox'],
)
| true | true |
f71bbea834d6298b1b749394c01ad48a8ce0536c | 76,454 | py | Python | tests/lax_numpy_test.py | zhongwen/jax | 76d2a87915863d3a32732837cc7bf61b7b2f9e5b | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/lax_numpy_test.py | zhongwen/jax | 76d2a87915863d3a32732837cc7bf61b7b2f9e5b | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/lax_numpy_test.py | zhongwen/jax | 76d2a87915863d3a32732837cc7bf61b7b2f9e5b | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
from functools import partial
import itertools
import operator
import unittest
from unittest import SkipTest
from absl.testing import absltest
from absl.testing import parameterized
import six
import numpy as onp
import jax.ops
from jax import api
from jax import lax
from jax import numpy as lnp
from jax import test_util as jtu
from jax.lib import xla_bridge
from jax.config import config
config.parse_flags_with_absl()
FLAGS = config.FLAGS
nonempty_nonscalar_array_shapes = [(4,), (3, 4), (3, 1), (1, 4), (2, 1, 4), (2, 3, 4)]
nonempty_array_shapes = [()] + nonempty_nonscalar_array_shapes
empty_array_shapes = [(0,), (0, 4), (3, 0),]
scalar_shapes = [jtu.NUMPY_SCALAR_SHAPE, jtu.PYTHON_SCALAR_SHAPE]
array_shapes = nonempty_array_shapes + empty_array_shapes
nonzerodim_shapes = nonempty_nonscalar_array_shapes + empty_array_shapes
nonempty_shapes = scalar_shapes + nonempty_array_shapes
all_shapes = scalar_shapes + array_shapes
float_dtypes = [onp.float32, onp.float64]
complex_dtypes = [onp.complex64, onp.complex128]
int_dtypes = [onp.int32, onp.int64]
unsigned_dtypes = [onp.uint32, onp.uint64]
bool_dtypes = [onp.bool_]
default_dtypes = float_dtypes + int_dtypes
inexact_dtypes = float_dtypes + complex_dtypes
number_dtypes = float_dtypes + complex_dtypes + int_dtypes
all_dtypes = number_dtypes + bool_dtypes
OpRecord = collections.namedtuple(
"OpRecord",
["name", "nargs", "dtypes", "shapes", "rng", "diff_modes", "test_name",
"check_dtypes"])
def op_record(name, nargs, dtypes, shapes, rng, diff_modes, test_name=None,
check_dtypes=True):
test_name = test_name or name
return OpRecord(name, nargs, dtypes, shapes, rng, diff_modes, test_name,
check_dtypes)
JAX_ONE_TO_ONE_OP_RECORDS = [
op_record("abs", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("add", 2, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("ceil", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("conj", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("equal", 2, all_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("exp", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("fabs", 1, float_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("float_power", 2, inexact_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("floor", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("greater", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("greater_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("less", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("less_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("log", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("logical_and", 2, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("logical_not", 1, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("logical_or", 2, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("logical_xor", 2, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("maximum", 2, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("minimum", 2, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("multiply", 2, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("negative", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("not_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), ["rev"]),
op_record("array_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), ["rev"]),
op_record("reciprocal", 1, inexact_dtypes, all_shapes, jtu.rand_default(), []),
op_record("subtract", 2, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("sin", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("cos", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("tan", 1, number_dtypes, all_shapes, jtu.rand_uniform(-1.5, 1.5),
["rev"]),
op_record("sinh", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("cosh", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("tanh", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("arcsin", 1, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arccos", 1, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arctan", 1, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arctan2", 2, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arcsinh", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("arccosh", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("arctanh", 1, number_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
]
JAX_COMPOUND_OP_RECORDS = [
# angle has inconsistent 32/64-bit return types across numpy versions.
op_record("angle", 1, number_dtypes, all_shapes, jtu.rand_default(), [],
check_dtypes=False),
op_record("atleast_1d", 1, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("atleast_2d", 1, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("atleast_3d", 1, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("cbrt", 1, default_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("conjugate", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("deg2rad", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("divide", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), ["rev"]),
op_record("exp2", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("expm1", 1, number_dtypes, all_shapes, jtu.rand_positive(), [],
test_name="expm1_large"),
op_record("expm1", 1, number_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("fix", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("floor_divide", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), ["rev"]),
op_record("heaviside", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("hypot", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("kron", 2, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("outer", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("imag", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("iscomplex", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("isfinite", 1, inexact_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isinf", 1, inexact_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isnan", 1, inexact_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isneginf", 1, float_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isposinf", 1, float_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isreal", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("isrealobj", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("log2", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("log10", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("log1p", 1, number_dtypes, all_shapes, jtu.rand_positive(), [],
test_name="log1p_large"),
op_record("log1p", 1, number_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("logaddexp", 2, float_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("logaddexp2", 2, float_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("polyval", 2, number_dtypes, nonempty_nonscalar_array_shapes, jtu.rand_default(), []),
op_record("positive", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("power", 2, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("rad2deg", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("ravel", 1, all_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("real", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("remainder", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("mod", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("sinc", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("square", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("sqrt", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("transpose", 1, all_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("true_divide", 2, all_dtypes, all_shapes, jtu.rand_nonzero(), ["rev"]),
op_record("where", 3, (onp.float32, onp.int64), all_shapes, jtu.rand_some_zero(), []),
op_record("diff", 1, number_dtypes, nonzerodim_shapes, jtu.rand_default(), ["rev"]),
]
JAX_BITWISE_OP_RECORDS = [
op_record("bitwise_and", 2, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
op_record("bitwise_not", 1, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
op_record("bitwise_or", 2, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
op_record("bitwise_xor", 2, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
]
JAX_REDUCER_RECORDS = [
op_record("mean", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("prod", 1, number_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("sum", 1, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("var", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("std", 1, inexact_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
JAX_REDUCER_NO_DTYPE_RECORDS = [
op_record("all", 1, all_dtypes, all_shapes, jtu.rand_some_zero(), []),
op_record("any", 1, all_dtypes, all_shapes, jtu.rand_some_zero(), []),
op_record("max", 1, all_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("min", 1, all_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
JAX_ARGMINMAX_RECORDS = [
op_record("argmin", 1, all_dtypes, nonempty_shapes, jtu.rand_some_equal(), []),
op_record("argmax", 1, all_dtypes, nonempty_shapes, jtu.rand_some_equal(), []),
]
JAX_OPERATOR_OVERLOADS = [
op_record("__add__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__sub__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__mul__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__eq__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__ne__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__lt__", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__gt__", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__ge__", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__neg__", 1, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__pow__", 2, inexact_dtypes, all_shapes, jtu.rand_positive(), []),
op_record("__mod__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__floordiv__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__truediv__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__abs__", 1, number_dtypes, all_shapes, jtu.rand_default(), []),
# TODO(mattjj): __invert__ fails on bool dtypes because ~True == -2
op_record("__invert__", 1, int_dtypes, all_shapes, jtu.rand_default(), []),
# TODO(mattjj): investigate these failures
# op_record("__or__", 2, number_dtypes, all_shapes, jtu.rand_bool(), []),
# op_record("__and__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
# op_record("__xor__", 2, number_dtypes, all_shapes, jtu.rand_bool(), []),
# op_record("__divmod__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
# TODO(mattjj): lshift, rshift
]
JAX_RIGHT_OPERATOR_OVERLOADS = [
op_record("__radd__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__rsub__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__rmul__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__rpow__", 2, inexact_dtypes, all_shapes, jtu.rand_positive(), []),
op_record("__rmod__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__rfloordiv__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__rtruediv__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
# op_record("__ror__", 2, number_dtypes, all_shapes, jtu.rand_bool(), []),
# op_record("__rand__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
# op_record("__rxor__", 2, number_dtypes, all_shapes, jtu.rand_bool(), []),
# op_record("__rdivmod__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
]
numpy_version = tuple(map(int, onp.version.version.split('.')))
if numpy_version >= (1, 15):
JAX_COMPOUND_OP_RECORDS += [
op_record("isclose", 2, all_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("gcd", 2, int_dtypes, all_shapes, jtu.rand_default(), []),
op_record("lcm", 2, int_dtypes, all_shapes, jtu.rand_default(), []),
]
JAX_REDUCER_NO_DTYPE_RECORDS += [
op_record("ptp", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
if six.PY2:
JAX_OPERATOR_OVERLOADS += [
op_record("__div__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
]
JAX_RIGHT_OPERATOR_OVERLOADS += [
op_record("__rdiv__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
]
CombosWithReplacement = itertools.combinations_with_replacement
def _dtypes_are_compatible_for_bitwise_ops(args):
if len(args) <= 1:
return True
is_signed = lambda dtype: onp.issubdtype(dtype, onp.signedinteger)
width = lambda dtype: onp.iinfo(dtype).bits
x, y = args
if width(x) > width(y):
x, y = y, x
# The following condition seems a little ad hoc, but seems to capture what
# numpy actually implements.
return (
is_signed(x) == is_signed(y)
or (width(x) == 32 and width(y) == 32)
or (width(x) == 32 and width(y) == 64 and is_signed(y)))
def _shapes_are_broadcast_compatible(shapes):
accumulator = onp.zeros([])
for shape in shapes:
try:
accumulator = accumulator + onp.zeros(shape)
except ValueError:
return False
return True
class LaxBackedNumpyTests(jtu.JaxTestCase):
"""Tests for LAX-backed Numpy implementation."""
def _GetArgsMaker(self, rng, shapes, dtypes):
return lambda: [rng(shape, dtype) for shape, dtype in zip(shapes, dtypes)]
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(rec.test_name, shapes,
dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"check_dtypes": rec.check_dtypes}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in CombosWithReplacement(rec.dtypes, rec.nargs))
for rec in itertools.chain(JAX_ONE_TO_ONE_OP_RECORDS,
JAX_COMPOUND_OP_RECORDS)))
def testOp(self, onp_op, lnp_op, rng, shapes, dtypes, check_dtypes):
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
py_scalar_arg = jtu.PYTHON_SCALAR_SHAPE in shapes
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker,
check_dtypes=check_dtypes and not py_scalar_arg)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=check_dtypes)
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(rec.test_name, shapes,
dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes, "name": rec.name}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in CombosWithReplacement(rec.dtypes, rec.nargs))
for rec in JAX_OPERATOR_OVERLOADS))
def testOperatorOverload(self, name, rng, shapes, dtypes):
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
fun = lambda *xs: getattr(operator, name.strip('_'))(*xs)
self._CompileAndCheck(fun, args_maker,
check_dtypes=jtu.PYTHON_SCALAR_SHAPE not in shapes)
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(rec.test_name, shapes,
dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes, "name": rec.name}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in CombosWithReplacement(rec.dtypes, rec.nargs))
for rec in JAX_RIGHT_OPERATOR_OVERLOADS))
def testRightOperatorOverload(self, name, rng, shapes, dtypes):
if shapes[1] is jtu.PYTHON_SCALAR_SHAPE:
raise SkipTest() # TODO(mattjj): clean up
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
fun = lambda fst, snd: getattr(snd, name)(fst)
self._CompileAndCheck(fun, args_maker,
check_dtypes=jtu.PYTHON_SCALAR_SHAPE not in shapes)
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(
rec.test_name, shapes, dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name)}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in filter(
_dtypes_are_compatible_for_bitwise_ops,
CombosWithReplacement(rec.dtypes, rec.nargs)))
for rec in JAX_BITWISE_OP_RECORDS))
def testBitwiseOp(self, onp_op, lnp_op, rng, shapes, dtypes):
if not FLAGS.jax_enable_x64 and any(
onp.iinfo(dtype).bits == 64 for dtype in dtypes):
self.skipTest("x64 types are disabled by jax_enable_x64")
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker,
check_dtypes=jtu.PYTHON_SCALAR_SHAPE not in shapes)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "{}_inshape={}_axis={}_dtype={}_keepdims={}".format(
rec.test_name.capitalize(),
jtu.format_shape_dtype_string(shape, dtype), axis,
"None" if out_dtype is None else onp.dtype(out_dtype).name, keepdims),
"rng": rec.rng, "shape": shape, "dtype": dtype, "out_dtype": out_dtype,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"axis": axis, "keepdims": keepdims}
for rec in JAX_REDUCER_RECORDS
for shape in rec.shapes for dtype in rec.dtypes
for out_dtype in [None] + rec.dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])
for keepdims in [False, True]))
def testReducer(self, onp_op, lnp_op, rng, shape, dtype, out_dtype, axis, keepdims):
onp_fun = lambda x: onp_op(x, axis, dtype=out_dtype, keepdims=keepdims)
lnp_fun = lambda x: lnp_op(x, axis, dtype=out_dtype, keepdims=keepdims)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "{}_inshape={}_axis={}_keepdims={}".format(
rec.test_name.capitalize(),
jtu.format_shape_dtype_string(shape, dtype), axis, keepdims),
"rng": rec.rng, "shape": shape, "dtype": dtype,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"axis": axis, "keepdims": keepdims}
for rec in JAX_REDUCER_NO_DTYPE_RECORDS
for shape in rec.shapes for dtype in rec.dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])
for keepdims in [False, True]))
def testReducerNoDtype(self, onp_op, lnp_op, rng, shape, dtype, axis, keepdims):
onp_fun = lambda x: onp_op(x, axis, keepdims=keepdims)
lnp_fun = lambda x: lnp_op(x, axis, keepdims=keepdims)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis),
"shape": shape, "dtype": dtype, "axis": axis}
for shape in all_shapes for dtype in all_dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])))
def testCountNonzero(self, shape, dtype, axis):
rng = jtu.rand_some_zero()
onp_fun = lambda x: onp.count_nonzero(x, axis)
lnp_fun = lambda x: lnp.count_nonzero(x, axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "{}_inshape={}_axis={}".format(
rec.test_name.capitalize(),
jtu.format_shape_dtype_string(shape, dtype), axis),
"rng": rec.rng, "shape": shape, "dtype": dtype,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"axis": axis}
for rec in JAX_ARGMINMAX_RECORDS
for shape in rec.shapes for dtype in rec.dtypes
for axis in range(-len(shape), len(shape))))
def testArgMinMax(self, onp_op, lnp_op, rng, shape, dtype, axis):
if (dtype == onp.complex128 and FLAGS.jax_test_dut and
FLAGS.jax_test_dut.startswith("gpu")):
raise unittest.SkipTest("complex128 reductions not supported on GPU")
def onp_fun(array_to_reduce):
return onp_op(array_to_reduce, axis)
def lnp_fun(array_to_reduce):
return lnp_op(array_to_reduce, axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype),
axes),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"axes": axes, "rng": rng}
for rng in [jtu.rand_default()]
for lhs_shape, rhs_shape, axes in [
[(2,), (2,), (-1, -1, -1, None)], # scalar output
[(2, 4), (2, 4), (-1, -1, -1, 0)], # 2D vectors
[(3, 4), (3, 4), (-1, -1, -1, 0)], # 3D vectors
[(3, 4), (3, 6, 5, 4), (-1, -1, -1, 0)], # broadcasting
[(4, 3), (3, 6, 5, 4), (1, 0, -1, None)], # different axes
[(6, 1, 3), (5, 3), (-1, -1, -1, None)], # more broadcasting
[(6, 1, 2), (5, 3), (-1, -1, -1, None)], # mixed 2D and 3D vectors
[(10, 5, 2, 8), (1, 5, 1, 3), (-2, -1, -3, None)], # axes/broadcasting
[(4, 5, 2), (4, 5, 2), (-1, -1, 0, None)], # axisc should do nothing
[(4, 5, 2), (4, 5, 2), (-1, -1, -1, None)] # same as before
]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testCross(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, axes, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
axisa, axisb, axisc, axis = axes
lnp_fun = lambda a, b: lnp.cross(a, b, axisa, axisb, axisc, axis)
onp_fun = lambda a, b: onp.cross(a, b, axisa, axisb, axisc, axis)
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
name,
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"rng": rng}
for rng in [jtu.rand_default()]
for name, lhs_shape, rhs_shape in [
("matrix-scalar", (3, 3), ()),
("scalar-matrix", (), (3, 3)),
("matrix-vector", (4, 5), (5,)),
("vector-matrix", (6,), (6, 4)),
("matrix-matrix", (3, 4), (4, 5)),
("tensor-vector", (4, 3, 2), (2,)),
("vector-tensor", (2,), (3, 2, 4)),
("tensor-matrix", (4, 3, 2), (2, 5)),
("matrix-tensor", (5, 2), (3, 2, 4)),
("tensor-tensor", (2, 3, 4), (5, 4, 1))]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testDot(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
self._CheckAgainstNumpy(onp.dot, lnp.dot, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp.dot, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
name,
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"rng": rng}
for rng in [jtu.rand_default()]
for name, lhs_shape, rhs_shape in [
("vector-vector", (3,), (3,)),
("matrix-vector", (3, 3), (3,)),
("vector-matrix", (3,), (3, 3)),
("matrix-matrix", (3, 3), (3, 3)),
("vector-tensor", (3,), (5, 3, 2)),
("tensor-vector", (5, 3, 2), (2,)),
("matrix-tensor", (5, 2), (3, 2, 4)),
("tensor-matrix", (5, 2, 3), (3, 2)),
("tensor-tensor", (5, 3, 4), (5, 4, 1)),
("tensor-tensor-broadcast", (3, 1, 3, 4), (5, 4, 1))]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testMatmul(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
self._CheckAgainstNumpy(onp.matmul, lnp.matmul, args_maker,
check_dtypes=True)
self._CompileAndCheck(lnp.matmul, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype),
axes),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"axes": axes, "rng": rng}
for rng in [jtu.rand_default()]
for lhs_shape, rhs_shape, axes in [
[(2, 3, 4), (5, 6, 7), 0], # from issue #740
[(2, 3, 4), (3, 4, 5, 6), 2],
[(2, 3, 4), (5, 4, 3, 6), [1, 2]],
[(2, 3, 4), (5, 4, 3, 6), [[1, 2], [2, 1]]],
[(1, 2, 3, 4), (4, 5, 3, 6), [[2, 3], [2, 0]]],
]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testTensordot(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, axes, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
lnp_fun = lambda a, b: lnp.tensordot(a, b, axes)
onp_fun = lambda a, b: onp.tensordot(a, b, axes)
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"rng": jtu.rand_default()}
# TODO(phawkins): support integer dtypes too.
for lhs_dtype, rhs_dtype in CombosWithReplacement(inexact_dtypes, 2)
for lhs_shape, rhs_shape in [
(l, r) for l, r in CombosWithReplacement(all_shapes, 2)
if len(jtu._dims_of_shape(l)) == 0
or len(jtu._dims_of_shape(r)) == 0
or l[-1] == r[-1]]))
def testInner(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
onp_fun = lambda lhs, rhs: onp.inner(lhs, rhs)
lnp_fun = lambda lhs, rhs: lnp.inner(lhs, rhs)
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_amin={}_amax={}".format(
jtu.format_shape_dtype_string(shape, dtype), a_min, a_max),
"shape": shape, "dtype": dtype, "a_min": a_min, "a_max": a_max,
"rng": jtu.rand_default()}
for shape in all_shapes for dtype in number_dtypes
for a_min, a_max in [(-1, None), (None, 1), (-1, 1)]))
def testClipStaticBounds(self, shape, dtype, a_min, a_max, rng):
onp_fun = lambda x: onp.clip(x, a_min=a_min, a_max=a_max)
lnp_fun = lambda x: lnp.clip(x, a_min=a_min, a_max=a_max)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_decimals={}".format(
jtu.format_shape_dtype_string(shape, dtype), decimals),
"shape": shape, "dtype": dtype, "decimals": decimals,
"rng": jtu.rand_default()}
for shape in all_shapes for dtype in number_dtypes
for decimals in [0, 1, -2]))
def testRoundStaticDecimals(self, shape, dtype, decimals, rng):
if onp.issubdtype(dtype, onp.integer) and decimals < 0:
self.skipTest("Integer rounding with decimals < 0 not implemented")
onp_fun = lambda x: onp.round(x, decimals=decimals)
lnp_fun = lambda x: lnp.round(x, decimals=decimals)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_mode={}_rpadwidth={}_rconstantvalues={}".format(
jtu.format_shape_dtype_string(shape, dtype), mode, pad_width_rank,
constant_values_rank),
"shape": shape, "dtype": dtype, "mode": mode,
"pad_width_rank": pad_width_rank,
"constant_values_rank": constant_values_rank, "rng": jtu.rand_default(),
"irng": jtu.rand_int(3)}
for mode, constant_values_rank, shapes in [
('constant', 0, all_shapes),
('constant', 1, all_shapes),
('constant', 2, all_shapes),
('symmetric', None, nonempty_shapes),
('reflect', None, nonempty_shapes),
('wrap', None, nonempty_shapes),
]
for shape in shapes for dtype in all_dtypes
for pad_width_rank in range(3)))
def testPad(self, shape, dtype, mode, pad_width_rank, constant_values_rank,
rng, irng):
pad_width = irng([len(shape), 2][2 - pad_width_rank:], onp.int32)
def onp_fun(x, kwargs):
if pad_width.size == 0:
return x
return onp.pad(x, pad_width, mode=mode, **kwargs)
def lnp_fun(x, kwargs):
return lnp.pad(x, pad_width, mode=mode, **kwargs)
def args_maker():
kwargs = {}
if constant_values_rank:
kwargs["constant_values"] = rng(
[len(shape), 2][2 - constant_values_rank:], dtype)
return rng(shape, dtype), kwargs
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape=[{}]_reps={}".format(
jtu.format_shape_dtype_string(shape, dtype), reps),
"shape": shape, "dtype": dtype, "reps": reps,
"rng": jtu.rand_default()}
for reps in [(), (2,), (3, 4), (2, 3, 4)]
for dtype in default_dtypes
for shape in all_shapes
))
def testTile(self, shape, dtype, reps, rng):
onp_fun = lambda arg: onp.tile(arg, reps)
lnp_fun = lambda arg: lnp.tile(arg, reps)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_axis={}_baseshape=[{}]_dtypes=[{}]".format(
axis, ",".join(str(d) for d in base_shape),
",".join(onp.dtype(dtype).name for dtype in dtypes)),
"axis": axis, "base_shape": base_shape, "dtypes": dtypes,
"rng": jtu.rand_default()}
for num_arrs in [3]
for dtypes in CombosWithReplacement(default_dtypes, num_arrs)
for base_shape in [(4,), (3, 4), (2, 3, 4)]
for axis in range(-len(base_shape)+1, len(base_shape))))
def testConcatenate(self, axis, base_shape, dtypes, rng):
wrapped_axis = axis % len(base_shape)
shapes = [base_shape[:wrapped_axis] + (size,) + base_shape[wrapped_axis+1:]
for size, _ in zip(itertools.cycle([3, 1, 4]), dtypes)]
onp_fun = lambda *args: onp.concatenate(args, axis=axis)
lnp_fun = lambda *args: lnp.concatenate(args, axis=axis)
def args_maker():
return [rng(shape, dtype) for shape, dtype in zip(shapes, dtypes)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_axis={}_baseshape=[{}]_dtypes=[{}]".format(
axis, ",".join(str(d) for d in base_shape),
",".join(onp.dtype(dtype).name for dtype in dtypes)),
"axis": axis, "base_shape": base_shape, "dtypes": dtypes,
"rng": jtu.rand_default()}
for dtypes in CombosWithReplacement(default_dtypes, 2)
for base_shape in [(4,), (3, 4), (2, 3, 4)]
for axis in range(-len(base_shape)+1, len(base_shape))))
def testAppend(self, axis, base_shape, dtypes, rng):
wrapped_axis = axis % len(base_shape)
shapes = [base_shape[:wrapped_axis] + (size,) + base_shape[wrapped_axis+1:]
for size, _ in zip(itertools.cycle([3, 1, 4]), dtypes)]
onp_fun = lambda arr, values: onp.append(arr, values, axis=axis)
lnp_fun = lambda arr, values: lnp.append(arr, values, axis=axis)
def args_maker():
return [rng(shape, dtype) for shape, dtype in zip(shapes, dtypes)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape=[{}]_axis={}_repeats={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis, repeats),
"axis": axis, "shape": shape, "dtype": dtype, "repeats": repeats,
"rng": jtu.rand_default()}
for repeats in [0, 1, 2]
for dtype in default_dtypes
for shape in all_shapes
for axis in [None] + list(range(-len(shape), len(shape)))))
def testRepeat(self, axis, shape, dtype, repeats, rng):
onp_fun = lambda arg: onp.repeat(arg, repeats=repeats, axis=axis)
lnp_fun = lambda arg: lnp.repeat(arg, repeats=repeats, axis=axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "op={}_shape=[{}]_axis={}_out_dtype={}".format(
op, jtu.format_shape_dtype_string(shape, dtype), axis, out_dtype),
"axis": axis, "shape": shape, "dtype": dtype, "out_dtype": out_dtype,
"rng": jtu.rand_default(), "lnp_op": getattr(lnp, op),
"onp_op": getattr(onp, op)}
for op in ["cumsum", "cumprod"]
# TODO(phawkins): replace both type lists with default_dtypes after a
# Jaxlib update includes
# https://github.com/google/jax/commit/86f5d189cf563b027c3cd00eea38072c003905c8
for dtype in [onp.float32, onp.int32]
for out_dtype in [onp.float32, onp.int32]
for shape in all_shapes
for axis in [None] + list(range(-len(shape), len(shape)))))
def testCumSumProd(self, axis, shape, dtype, out_dtype, onp_op, lnp_op, rng):
onp_fun = lambda arg: onp_op(arg, axis=axis, dtype=out_dtype)
lnp_fun = lambda arg: lnp_op(arg, axis=axis, dtype=out_dtype)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_dtype={}_m={}_n={}_k={}".format(
onp.dtype(dtype).name, m, n, k),
"m": m, "n": n, "k": k, "dtype": dtype, "rng": jtu.rand_default()}
for dtype in default_dtypes
for n in [0, 4]
for m in [None, 0, 1, 3, 4]
for k in list(range(-4, 4))))
def testTri(self, m, n, k, dtype, rng):
onp_fun = lambda: onp.tri(n, M=m, k=k, dtype=dtype)
lnp_fun = lambda: lnp.tri(n, M=m, k=k, dtype=dtype)
args_maker = lambda: []
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_op={}_shape={}_k={}".format(
op, jtu.format_shape_dtype_string(shape, dtype), k),
"dtype": dtype, "shape": shape, "op": op, "k": k,
"rng": jtu.rand_default()}
for dtype in default_dtypes
for shape in [shape for shape in all_shapes if len(shape) >= 2]
for op in ["tril", "triu"]
for k in list(range(-3, 3))))
def testTriLU(self, dtype, shape, op, k, rng):
onp_fun = lambda arg: getattr(onp, op)(arg, k=k)
lnp_fun = lambda arg: getattr(lnp, op)(arg, k=k)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_k={}".format(
jtu.format_shape_dtype_string(shape, dtype), k),
"dtype": dtype, "shape": shape, "k": k, "rng": jtu.rand_default()}
for dtype in default_dtypes
for shape in [shape for shape in all_shapes if len(shape) in (1, 2)]
for k in list(range(-4, 4))))
def testDiag(self, shape, dtype, k, rng):
onp_fun = lambda arg: onp.diag(arg, k)
lnp_fun = lambda arg: lnp.diag(arg, k)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_offset={}_axis1={}_axis2={}".format(
jtu.format_shape_dtype_string(shape, dtype), offset, axis1, axis2),
"dtype": dtype, "shape": shape, "offset": offset, "axis1": axis1,
"axis2": axis2, "rng": jtu.rand_default()}
for dtype in default_dtypes
for shape in [shape for shape in all_shapes if len(shape) >= 2]
for axis1 in range(-len(shape), len(shape))
for axis2 in [a for a in range(-len(shape), len(shape))
if a % len(shape) != axis1 % len(shape)]
for offset in list(range(-4, 4))))
def testDiagonal(self, shape, dtype, offset, axis1, axis2, rng):
onp_fun = lambda arg: onp.diagonal(arg, offset, axis1, axis2)
lnp_fun = lambda arg: lnp.diagonal(arg, offset, axis1, axis2)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_n={}".format(onp.dtype(dtype).name, n),
"dtype": dtype, "n": n}
for dtype in default_dtypes
for n in list(range(4))))
def testIdentity(self, n, dtype):
onp_fun = lambda: onp.identity(n, dtype)
lnp_fun = lambda: lnp.identity(n, dtype)
args_maker = lambda: []
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_dtype_{}_offset={}_axis1={}_axis2={}".format(
jtu.format_shape_dtype_string(shape, dtype),
out_dtype, offset, axis1, axis2),
"dtype": dtype, "out_dtype": out_dtype, "shape": shape, "offset": offset,
"axis1": axis1, "axis2": axis2, "rng": jtu.rand_default()}
for dtype in default_dtypes
for out_dtype in [None] + number_dtypes
for shape in [shape for shape in all_shapes if len(shape) >= 2]
for axis1 in range(-len(shape), len(shape))
for axis2 in range(-len(shape), len(shape))
if (axis1 % len(shape)) != (axis2 % len(shape))
for offset in list(range(-4, 4))))
def testTrace(self, shape, dtype, out_dtype, offset, axis1, axis2, rng):
onp_fun = lambda arg: onp.trace(arg, offset, axis1, axis2, out_dtype)
lnp_fun = lambda arg: lnp.trace(arg, offset, axis1, axis2, out_dtype)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}".format(
jtu.format_test_name_suffix("", [shape] * len(dtypes), dtypes), axis),
"shape": shape, "axis": axis, "dtypes": dtypes, "rng": rng}
for dtypes in [
[onp.float32],
[onp.float32, onp.float32],
[onp.float32, onp.int32, onp.float32],
[onp.float32, onp.int64, onp.float32],
[onp.float32, onp.int32, onp.float64],
]
for shape in [(), (2,), (3, 4), (1, 100)]
for axis in range(-len(shape), len(shape) + 1)
for rng in [jtu.rand_default()]))
def testStack(self, shape, axis, dtypes, rng):
args_maker = lambda: [[rng(shape, dtype) for dtype in dtypes]]
onp_fun = partial(onp.stack, axis=axis)
lnp_fun = partial(lnp.stack, axis=axis)
self._CheckAgainstNumpy(lnp_fun, onp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_op={}_{}".format(
op, jtu.format_test_name_suffix("", [shape] * len(dtypes), dtypes)),
"shape": shape, "op": op, "dtypes": dtypes, "rng": rng}
for op in ["hstack", "vstack", "dstack"]
for dtypes in [
[onp.float32],
[onp.float32, onp.float32],
[onp.float32, onp.int32, onp.float32],
[onp.float32, onp.int64, onp.float32],
[onp.float32, onp.int32, onp.float64],
]
for shape in [(), (2,), (3, 4), (1, 100), (2, 3, 4)]
for rng in [jtu.rand_default()]))
def testHVDStack(self, shape, op, dtypes, rng):
args_maker = lambda: [[rng(shape, dtype) for dtype in dtypes]]
onp_fun = getattr(onp, op)
lnp_fun = getattr(lnp, op)
self._CheckAgainstNumpy(lnp_fun, onp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_outdtype={}".format(
jtu.format_shape_dtype_string(shape, fill_value_dtype),
onp.dtype(out_dtype).name if out_dtype else "None"),
"shape": shape, "fill_value_dtype": fill_value_dtype,
"out_dtype": out_dtype, "rng": jtu.rand_default()}
for shape in array_shapes
for fill_value_dtype in default_dtypes
for out_dtype in [None] + default_dtypes))
def testFull(self, shape, fill_value_dtype, out_dtype, rng):
onp_fun = lambda fill_value: onp.full(shape, fill_value, dtype=out_dtype)
lnp_fun = lambda fill_value: lnp.full(shape, fill_value, dtype=out_dtype)
args_maker = lambda: [rng((), fill_value_dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_filldtype={}_outdtype={}".format(
jtu.format_shape_dtype_string(shape, in_dtype),
onp.dtype(fill_value_dtype).name,
onp.dtype(out_dtype).name),
"shape": shape, "in_dtype": in_dtype,
"fill_value_dtype": fill_value_dtype, "out_dtype": out_dtype,
"rng": jtu.rand_default()}
for shape in array_shapes
for in_dtype in default_dtypes
for fill_value_dtype in default_dtypes
for out_dtype in default_dtypes))
def testFullLike(self, shape, in_dtype, fill_value_dtype, out_dtype, rng):
onp_fun = lambda x, fill_value: onp.full_like(x, fill_value, dtype=out_dtype)
lnp_fun = lambda x, fill_value: lnp.full_like(x, fill_value, dtype=out_dtype)
args_maker = lambda: [rng(shape, in_dtype), rng((), fill_value_dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}_{}sections".format(
jtu.format_shape_dtype_string(shape, dtype), axis, num_sections),
"shape": shape, "num_sections": num_sections, "axis": axis,
"dtype": dtype, "rng": jtu.rand_default()}
for shape, axis, num_sections in [
((3,), 0, 3), ((12,), 0, 3), ((12, 4), 0, 4), ((12, 4), 1, 2),
((2, 3, 4), -1, 2), ((2, 3, 4), -2, 3)]
for dtype in default_dtypes))
def testSplitStaticInt(self, shape, num_sections, axis, dtype, rng):
onp_fun = lambda x: onp.split(x, num_sections, axis=axis)
lnp_fun = lambda x: lnp.split(x, num_sections, axis=axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}_{}sections".format(
jtu.format_shape_dtype_string(shape, dtype), axis, num_sections),
"shape": shape, "num_sections": num_sections, "axis": axis,
"dtype": dtype, "rng": jtu.rand_default()}
for shape, axis, num_sections in [
((12, 4), 0, 4), ((12, 4), 1, 2),
((2, 3, 4), 2, 2), ((4, 3, 4), 0, 2)]
for dtype in default_dtypes))
def testHVDSplit(self, shape, num_sections, axis, dtype, rng):
def fn(module, axis):
if axis == 0:
return module.vsplit
elif axis == 1:
return module.hsplit
else:
assert axis == 2
return module.dsplit
onp_fun = lambda x: fn(onp, axis)(x, num_sections)
lnp_fun = lambda x: fn(lnp, axis)(x, num_sections)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_outshape={}_order={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype),
jtu.format_shape_dtype_string(out_shape, dtype),
order),
"arg_shape": arg_shape, "out_shape": out_shape, "dtype": dtype,
"order": order, "rng": jtu.rand_default()}
for dtype in default_dtypes
for order in ["C", "F"]
for arg_shape, out_shape in [
(jtu.NUMPY_SCALAR_SHAPE, (1, 1, 1)),
((), (1, 1, 1)),
((7, 0), (0, 42, 101)),
((3, 4), 12),
((3, 4), (12,)),
((3, 4), -1),
((2, 1, 4), (-1,)),
((2, 2, 4), (2, 8))
]))
def testReshape(self, arg_shape, out_shape, dtype, order, rng):
onp_fun = lambda x: onp.reshape(x, out_shape, order=order)
lnp_fun = lambda x: lnp.reshape(x, out_shape, order=order)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_outshape={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype),
jtu.format_shape_dtype_string(out_shape, dtype)),
"arg_shape": arg_shape, "out_shape": out_shape, "dtype": dtype,
"rng": jtu.rand_default()}
for dtype in default_dtypes
for arg_shape, out_shape in [
((7, 0), (0, 42, 101)),
((2, 1, 4), (-1,)),
((2, 2, 4), (2, 8))
]))
def testReshapeMethod(self, arg_shape, out_shape, dtype, rng):
onp_fun = lambda x: onp.reshape(x, out_shape)
lnp_fun = lambda x: x.reshape(*out_shape)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_expanddim={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype), dim),
"arg_shape": arg_shape, "dtype": dtype, "dim": dim,
"rng": jtu.rand_default()}
for arg_shape in [(), (3,), (3, 4)]
for dtype in default_dtypes
for dim in range(-len(arg_shape)+1, len(arg_shape))))
def testExpandDimsStaticDim(self, arg_shape, dtype, dim, rng):
onp_fun = lambda x: onp.expand_dims(x, dim)
lnp_fun = lambda x: lnp.expand_dims(x, dim)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_axes=({},{})".format(
jtu.format_shape_dtype_string(arg_shape, dtype), ax1, ax2),
"arg_shape": arg_shape, "dtype": dtype, "ax1": ax1, "ax2": ax2,
"rng": jtu.rand_default()}
for arg_shape, ax1, ax2 in [
((3, 4), 0, 1), ((3, 4), 1, 0), ((3, 4, 5), 1, 2),
((3, 4, 5), -1, -2), ((3, 4, 5), 0, 1)]
for dtype in default_dtypes))
def testSwapAxesStaticAxes(self, arg_shape, dtype, ax1, ax2, rng):
onp_fun = lambda x: onp.swapaxes(x, ax1, ax2)
lnp_fun = lambda x: lnp.swapaxes(x, ax1, ax2)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_axis={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype), ax),
"arg_shape": arg_shape, "dtype": dtype, "ax": ax,
"rng": jtu.rand_default()}
for arg_shape, ax in [
((3, 1), None),
((3, 1), 1),
((1, 3, 1), (0, 2)),
((1, 4, 1), (0,))]
for dtype in default_dtypes))
def testSqueeze(self, arg_shape, dtype, ax, rng):
onp_fun = lambda x: onp.squeeze(x, ax)
lnp_fun = lambda x: lnp.squeeze(x, ax)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_axis={}_weights={}_returned={}".format(
jtu.format_shape_dtype_string(shape, dtype),
axis,
(None if weights_shape == None else jtu.format_shape_dtype_string(weights_shape, dtype)),
returned),
"rng": jtu.rand_default(), "shape": shape, "dtype": dtype, "axis": axis,
"weights_shape": weights_shape, "returned": returned}
for shape in nonempty_shapes
for dtype in number_dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])
# `weights_shape` is either `None`, same as the averaged axis, or same as
# that of the input
for weights_shape in ([None, shape] if axis is None else [None, (shape[axis],), shape])
for returned in [False, True]))
def testAverage(self, shape, dtype, axis, weights_shape, returned, rng):
onp_fun = lambda x, weights: onp.average(x, axis, weights, returned)
lnp_fun = lambda x, weights: lnp.average(x, axis, weights, returned)
args_maker = lambda: [rng(shape, dtype),
None if weights_shape is None else rng(weights_shape, dtype)]
try:
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
except ZeroDivisionError:
self.skipTest("don't support checking for ZeroDivisionError")
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_arg{}".format(i), "arg": arg}
for i, arg in enumerate([
3., [1, 2, 3], [1., 2., 3.],
[[1, 2], [3, 4], [5, 6]], [[1, 2.], [3, 4], [5, 6]],
[[3, onp.array(2), 1], onp.arange(3.)],
])))
def testArray(self, arg):
args_maker = lambda: [arg]
self._CheckAgainstNumpy(onp.array, lnp.array, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp.array, args_maker, check_dtypes=True)
def testIssue121(self):
assert not onp.isscalar(lnp.array(3))
def testArrayMethod(self):
class arraylike(object):
dtype = onp.float32
def __array__(self, dtype=None):
return 3.
a = arraylike()
ans = lnp.array(a)
assert ans == 3.
def testAllClose(self):
rng = onp.random.RandomState(0)
x = rng.randn(2, 2)
y = rng.randn(2)
def same(list1, list2):
allclose = functools.partial(lnp.allclose, atol=1e-3, rtol=1e-3)
elements_close = list(map(allclose, list1, list2))
return lnp.all(lnp.array(elements_close))
csame = api.jit(same)
a1 = same((x, y), (x, y))
a2 = csame((x, y), (x, y))
a3 = csame((x, y), (x, 2 * y))
self.assertTrue(a1)
self.assertTrue(a2)
self.assertFalse(a3)
@jtu.skip_on_devices("tpu") # TODO(mattjj): investigate this failure
def testOnesBroadcastingConstantHandler(self):
# TODO(mattjj): update this test for jax3
self.skipTest("test needs jax3 update")
def fun(x):
ones = lnp.ones((3, 4))
assert isinstance(ones, onp.ndarray) and ones.strides == (0, 0)
# To check that the constant handler generates a Broadcast for stride-zero
# arrays, we monkey-patch the client instance.
# TODO(mattjj): once we have better HLO dumping and inspecting facilities,
# we can check the HLO more directly.
c = x._node.c
Broadcast = c.Broadcast # pylint: disable=invalid-name
was_called = []
c.Broadcast = lambda *args: was_called.append(True) or Broadcast(*args)
out = x + ones # the ndarray constant handler should call Broadcast here
assert was_called, "Broadcast was not called."
return out
fun = api.jit(fun)
out_val = fun(lnp.ones(4))
self.assertAllClose(out_val, onp.full((3, 4), 2.), check_dtypes=False)
def testZeroStridesConstantHandler(self):
raw_const = onp.random.RandomState(0).randn(1, 2, 1, 1, 5, 1)
const = onp.broadcast_to(raw_const, (3, 2, 3, 4, 5, 6))
def fun(x):
return x * const
fun = api.jit(fun)
out_val = fun(3.)
self.assertAllClose(out_val, 3. * const, check_dtypes=False)
def testIsInstanceNdarrayDuringTracing(self):
arr = onp.ones(3)
@api.jit
def f(x):
self.assertIsInstance(x, lnp.ndarray)
return lnp.sum(x)
f(arr)
def testNonArrayErrorMessage(self):
x = [1., 2.]
y = onp.array([3., 4.])
def g(x, y):
return lnp.add(x, y)
def f(x, y):
return lnp.dot(x, y)
self.assertRaises(TypeError, lambda: g(x, y))
self.assertRaises(TypeError, lambda: f(x, y))
self.assertRaises(TypeError, lambda: api.jit(g)(x, y))
self.assertRaises(TypeError, lambda: api.jit(f)(x, y))
def testAbstractionErrorMessage(self):
@api.jit
def f(x, n):
for _ in range(n):
x = x * x
return x
self.assertRaises(TypeError, lambda: f(3., 3))
@api.jit
def g(x):
if x > 0.:
return x * 2
else:
return x + 2
self.assertRaises(TypeError, lambda: g(3.))
def testTracingPrimitiveWithNoTranslationErrorMessage(self):
# TODO(mattjj): update this for jax3
self.skipTest("test needs jax3 update")
foo = lnp._not_implemented(lambda x: x)
# No error if there's no tracing.
foo(onp.arange(3))
cfoo = api.jit(foo)
self.assertRaises(NotImplementedError, lambda: cfoo(onp.arange(3)))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis),
"rng": rng, "shape": shape, "dtype": dtype, "axis": axis}
for shape in [(3,), (2, 3)]
for dtype in default_dtypes
for axis in range(-len(shape), len(shape)) # Test negative axes
for rng in [jtu.rand_default()]))
def testFlip(self, shape, dtype, axis, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.flip(x, axis)
onp_op = lambda x: onp.flip(x, axis)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(
jtu.format_shape_dtype_string(shape, dtype)),
"rng": rng, "shape": shape, "dtype": dtype}
for shape in [(3,), (2, 3), (3, 2, 4)]
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testFlipud(self, shape, dtype, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.flipud(x)
onp_op = lambda x: onp.flipud(x)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(
jtu.format_shape_dtype_string(shape, dtype)),
"rng": rng, "shape": shape, "dtype": dtype}
for shape in [(3, 2), (2, 3), (3, 2, 4)]
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testFliplr(self, shape, dtype, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.fliplr(x)
onp_op = lambda x: onp.fliplr(x)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_k={}_axes={}".format(
jtu.format_shape_dtype_string(shape, dtype), k, axes),
"rng": rng, "shape": shape, "dtype": dtype, "k": k, "axes": axes}
for shape, axes in [
[(2, 3), (0, 1)],
[(2, 3), (1, 0)],
[(4, 3, 2), (0, 2)],
[(4, 3, 2), (2, 1)],
]
for k in range(-3, 4)
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testRot90(self, shape, dtype, k, axes, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.rot90(x, k, axes)
onp_op = lambda x: onp.rot90(x, k, axes)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
# TODO(mattjj): test infix operator overrides
def testRavel(self):
rng = onp.random.RandomState(0)
args_maker = lambda: [rng.randn(3, 4).astype("float32")]
self._CompileAndCheck(lambda x: x.ravel(), args_maker, check_dtypes=True)
def testAstype(self):
rng = onp.random.RandomState(0)
args_maker = lambda: [rng.randn(3, 4).astype("float32")]
op = lambda x: x.astype(lnp.int32)
self._CheckAgainstNumpy(op, op, args_maker, check_dtypes=True)
self._CompileAndCheck(op, args_maker, check_dtypes=True)
# TODO(mattjj): test other ndarray-like method overrides
def testOnpMean(self):
# from https://github.com/google/jax/issues/125
x = lax.add(lnp.eye(3), 0.)
ans = onp.mean(x)
self.assertAllClose(ans, onp.array(1./3), check_dtypes=False)
def testArangeOnFloats(self):
# from https://github.com/google/jax/issues/145
expected = onp.arange(0.0, 1.0, 0.1)
ans = lnp.arange(0.0, 1.0, 0.1)
self.assertAllClose(expected, ans, check_dtypes=True)
def testSortManually(self):
# manual tests for sort are nice because we don't have to worry about ties.
# lax.sort is tested combinatorially.
ans = lnp.sort(onp.array([16, 15, 23, 42, 8, 4]))
expected = onp.array([4, 8, 15, 16, 23, 42])
self.assertAllClose(expected, ans, check_dtypes=True)
a = onp.array([[1, 4], [3, 1]])
ans = lnp.sort(a, axis=None)
expected = onp.array([1, 1, 3, 4])
self.assertAllClose(expected, ans, check_dtypes=True)
a = onp.array([[1, 4], [3, 1]])
ans = lnp.sort(a) # last axis
expected = onp.array([[1, 4], [1, 3]])
self.assertAllClose(expected, ans, check_dtypes=True)
a = onp.array([[1, 4], [3, 1]])
ans = lnp.sort(a, axis=0)
expected = onp.array([[1, 1], [3, 4]])
self.assertAllClose(expected, ans, check_dtypes=True)
def testArgsortManually(self):
x = onp.array([16, 15, 23, 42, 8, 4])
ans = lnp.argsort(x)
expected = onp.argsort(x)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x, axis=0)
expected = onp.argsort(x, axis=0)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x, axis=1)
expected = onp.argsort(x, axis=1)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x, axis=None)
expected = onp.argsort(x, axis=None)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x)
expected = onp.argsort(x)
self.assertAllClose(expected, ans, check_dtypes=False)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_shifts={}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype),
shifts, axis),
"rng": rng, "shape": shape, "dtype": dtype, "shifts": shifts,
"axis": axis}
for dtype in all_dtypes
for shape in [(3, 4), (3, 4, 5), (7, 4, 0)]
for shifts, axis in [
(3, None),
(1, 1),
((3,), (0,)),
((-2,), (-2,)),
((1, 2), (0, -1))
]
for rng in [jtu.rand_default()]))
def testRoll(self, shape, dtype, shifts, axis, rng):
args_maker = lambda: [rng(shape, dtype)]
lnp_op = lambda x: lnp.roll(x, shifts, axis=axis)
onp_op = lambda x: onp.roll(x, shifts, axis=axis)
self._CheckAgainstNumpy(lnp_op, onp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_index={}_axis={}_mode={}".format(
jtu.format_shape_dtype_string(shape, dtype),
jtu.format_shape_dtype_string(index_shape, index_dtype),
axis, mode),
"rng": rng, "rng_indices": rng_indices, "shape": shape,
"index_shape": index_shape, "dtype": dtype, "index_dtype": index_dtype,
"axis": axis, "mode": mode}
for shape in [(3,), (3, 4), (3, 4, 5)]
for index_shape in scalar_shapes + [(3,), (2, 1, 3)]
for axis in itertools.chain(range(-len(shape), len(shape)), [None])
for dtype in all_dtypes
for index_dtype in int_dtypes
for mode in ['wrap', 'clip']
for rng in [jtu.rand_default()]
for rng_indices in [jtu.rand_int(-5, 5)]))
def testTake(self, shape, dtype, index_shape, index_dtype, axis, mode, rng,
rng_indices):
def args_maker():
x = rng(shape, dtype)
i = rng_indices(index_shape, index_dtype)
return x, i
lnp_op = lambda x, i: lnp.take(x, i, axis=axis, mode=mode)
onp_op = lambda x, i: onp.take(x, i, axis=axis, mode=mode)
self._CheckAgainstNumpy(lnp_op, onp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis),
"rng": rng, "shape": shape, "dtype": dtype, "axis": axis}
for shape in [(3,), (3, 4), (3, 4, 5)]
for axis in itertools.chain(range(len(shape)), [-1], [None])
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testTakeAlongAxis(self, shape, dtype, axis, rng):
def args_maker():
x = rng(shape, dtype)
i = onp.argsort(x, axis=axis)
return x, i
lnp_op = lambda x, i: lnp.take_along_axis(x, i, axis=axis)
if hasattr(onp, "take_along_axis"):
onp_op = lambda x, i: onp.take_along_axis(x, i, axis=axis)
self._CheckAgainstNumpy(lnp_op, onp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_n={}_increasing={}".format(
jtu.format_shape_dtype_string([shape], dtype),
n, increasing),
"dtype": dtype, "shape": shape, "n": n, "increasing": increasing,
"rng": jtu.rand_default()}
for dtype in inexact_dtypes
for shape in [0, 5]
for n in [2, 4]
for increasing in [False, True]))
def testVander(self, shape, dtype, n, increasing, rng):
onp_fun = lambda arg: onp.vander(arg, N=n, increasing=increasing)
lnp_fun = lambda arg: lnp.vander(arg, N=n, increasing=increasing)
args_maker = lambda: [rng([shape], dtype)]
# np.vander seems to return float64 for all floating types. We could obey
# those semantics, but they seem like a bug.
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=False)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=False)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix("nan_to_num", [shape],
[dtype]),
"rng": jtu.rand_some_inf_and_nan(), "shape": shape, "dtype": dtype}
for shape in all_shapes
for dtype in inexact_dtypes))
def testNanToNum(self, rng, shape, dtype):
dtype = onp.dtype(xla_bridge.canonicalize_dtype(dtype)).type
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp.nan_to_num, lnp.nan_to_num, args_maker,
check_dtypes=True)
self._CompileAndCheck(lnp.nan_to_num, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix("ix_", shapes, dtypes),
"rng": jtu.rand_default(), "shapes": shapes, "dtypes": dtypes}
for shapes, dtypes in (
((), ()),
(((7,),), (onp.float32,)),
(((3,), (4,)), (onp.float32, onp.int32)),
(((3,), (0,), (4,)), (onp.int32, onp.float32, onp.int32)),
)))
def testIx_(self, rng, shapes, dtypes):
args_maker = lambda: [rng(shape, dtype)
for shape, dtype in zip(shapes, dtypes)]
self._CheckAgainstNumpy(onp.ix_, lnp.ix_, args_maker,
check_dtypes=True)
self._CompileAndCheck(lnp.ix_, args_maker, check_dtypes=True)
def testIssue330(self):
x = lnp.full((1, 1), lnp.array([1])[0]) # doesn't crash
self.assertEqual(x[0, 0], 1)
def testScalarDtypePromotion(self):
# disabled this test after https://github.com/google/jax/issues/732
msg = ("jax.numpy differs from numpy in promotion rules for Python scalars."
" See https://github.com/google/jax/issues/732.")
raise SkipTest(msg)
orig_numpy_result = (1 + onp.eye(1, dtype=onp.float32)).dtype
jax_numpy_result = (1 + lnp.eye(1, dtype=lnp.float32)).dtype
self.assertEqual(orig_numpy_result, jax_numpy_result)
def testSymmetrizeDtypePromotion(self):
x = onp.eye(3, dtype=onp.float32)
orig_numpy_result = ((x + x.T) / 2).dtype
x = lnp.eye(3, dtype=lnp.float32)
jax_numpy_result = ((x + x.T) / 2).dtype
self.assertEqual(orig_numpy_result, jax_numpy_result)
def testIssue347(self):
# https://github.com/google/jax/issues/347
def test_fail(x):
x = lnp.sqrt(lnp.sum(x ** 2, axis=1))
ones = lnp.ones_like(x)
x = lnp.where(x > 0.5, x, ones)
return lnp.sum(x)
x = lnp.array([[1, 2], [3, 4], [0, 0]], dtype=lnp.float64)
result = api.grad(test_fail)(x)
assert not onp.any(onp.isnan(result))
def testIssue453(self):
# https://github.com/google/jax/issues/453
a = onp.arange(6) + 1
ans = lnp.reshape(a, (3, 2), order='F')
expected = onp.reshape(a, (3, 2), order='F')
self.assertAllClose(ans, expected, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_op={}_dtype={}".format(
op, {bool: "bool", int: "int", float: "float"}[dtype]),
"dtype": dtype, "op": op}
for dtype in [int, float, bool]
for op in ["atleast_1d", "atleast_2d", "atleast_3d"]))
def testAtLeastNdLiterals(self, dtype, op):
# Fixes: https://github.com/google/jax/issues/634
onp_fun = lambda arg: getattr(onp, op)(arg)
lnp_fun = lambda arg: getattr(lnp, op)(arg)
args_maker = lambda: [dtype(2)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
def testLongLong(self):
self.assertAllClose(onp.int64(7), api.jit(lambda x: x)(onp.longlong(7)),
check_dtypes=True)
def testArange(self):
# test cases inspired by dask tests at
# https://github.com/dask/dask/blob/master/dask/array/tests/test_creation.py#L92
self.assertAllClose(lnp.arange(77),
onp.arange(77), check_dtypes=True)
self.assertAllClose(lnp.arange(2, 13),
onp.arange(2, 13), check_dtypes=True)
self.assertAllClose(lnp.arange(4, 21, 9),
onp.arange(4, 21, 9), check_dtypes=True)
self.assertAllClose(lnp.arange(53, 5, -3),
onp.arange(53, 5, -3), check_dtypes=True)
# TODO(mattjj): make these tests work when jax_enable_x64=True
# self.assertAllClose(lnp.arange(77, dtype=float),
# onp.arange(77, dtype=float), check_dtypes=True)
# self.assertAllClose(lnp.arange(2, 13, dtype=int),
# onp.arange(2, 13, dtype=int), check_dtypes=True)
self.assertAllClose(lnp.arange(0, 1, -0.5),
onp.arange(0, 1, -0.5), check_dtypes=True)
self.assertRaises(TypeError, lambda: lnp.arange())
# test that lnp.arange(N) doesn't instantiate an ndarray
self.assertFalse(type(lnp.arange(77)) == type(onp.arange(77)))
self.assertTrue(type(lnp.arange(77)) == type(lax.iota(onp.int32, 77)))
def testIssue830(self):
a = lnp.arange(4, dtype=lnp.complex64)
self.assertEqual(a.dtype, lnp.complex64)
def testIssue728(self):
assert lnp.allclose(lnp.eye(5000), onp.eye(5000))
self.assertEqual(0, onp.sum(lnp.eye(1050) - onp.eye(1050)))
def testIssue746(self):
lnp.arange(12).reshape(3, 4) # doesn't crash
def testIssue764(self):
x = lnp.linspace(190, 200, 4)
f = api.grad(lambda x: lnp.sum(lnp.tanh(x)))
# Expected values computed with autograd in float64 precision.
expected = onp.array([3.71669453e-165, 4.72999108e-168, 6.01954653e-171,
7.66067839e-174], onp.float64)
self.assertAllClose(f(x), expected, check_dtypes=False)
def testIssue776(self):
"""Tests that the scatter-add transpose rule instantiates symbolic zeros."""
def f(u):
y = jax.ops.index_add(onp.ones(10,), [2, 4, 5], u)
# The transpose rule for lax.tie_in returns a symbolic zero for its first
# argument.
return lax.tie_in(y, 7.)
self.assertAllClose(onp.zeros(3,), api.grad(f)(onp.ones(3,)),
check_dtypes=True)
@parameterized.named_parameters(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(op, [()], [dtype]),
"dtype": dtype, "op": op}
for dtype in float_dtypes
for op in ("sqrt", "arccos", "arcsin", "arctan", "sin", "cos", "tan",
"sinh", "cosh", "tanh", "arccosh", "arcsinh", "arctanh", "exp",
"log", "expm1", "log1p")))
def testMathSpecialFloatValues(self, op, dtype):
onp_op = getattr(onp, op)
lnp_op = getattr(lnp, op)
dtype = onp.dtype(xla_bridge.canonicalize_dtype(dtype)).type
for x in (onp.nan, -onp.inf, -100., -2. -1., 0., 1., 2., 100., onp.inf,
onp.finfo(dtype).max, onp.sqrt(onp.finfo(dtype).max),
onp.sqrt(onp.finfo(dtype).max) * 2.):
if onp.isnan(x) and op in ("cosh", "expm1", "exp"):
# TODO(b/133842876, b/133842870): these return wrong outputs on CPU for
# NaN inputs.
continue
if (op in ("sin", "cos", "tan", "arctan") and FLAGS.jax_test_dut and
FLAGS.jax_test_dut.startswith("tpu")):
continue # TODO(b/132196789, b/134175194): fix and reenable.
x = dtype(x)
expected = onp_op(x)
actual = lnp_op(x)
self.assertAllClose(expected, actual, check_dtypes=True)
def testIssue883(self):
# from https://github.com/google/jax/issues/883
@partial(api.jit, static_argnums=(1,))
def f(x, v):
return x
x = lnp.ones((10, 10))
v = lnp.array([1, 2, 3])
first_call = f(x, v)
second_call = f(x, v) # doesn't crash
def testReductionOfOutOfBoundsAxis(self): # Issue 888
x = lnp.ones((3, 4))
self.assertRaises(ValueError, lambda: lnp.sum(x, axis=2))
if __name__ == "__main__":
absltest.main()
| 46.589884 | 100 | 0.651895 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
from functools import partial
import itertools
import operator
import unittest
from unittest import SkipTest
from absl.testing import absltest
from absl.testing import parameterized
import six
import numpy as onp
import jax.ops
from jax import api
from jax import lax
from jax import numpy as lnp
from jax import test_util as jtu
from jax.lib import xla_bridge
from jax.config import config
config.parse_flags_with_absl()
FLAGS = config.FLAGS
nonempty_nonscalar_array_shapes = [(4,), (3, 4), (3, 1), (1, 4), (2, 1, 4), (2, 3, 4)]
nonempty_array_shapes = [()] + nonempty_nonscalar_array_shapes
empty_array_shapes = [(0,), (0, 4), (3, 0),]
scalar_shapes = [jtu.NUMPY_SCALAR_SHAPE, jtu.PYTHON_SCALAR_SHAPE]
array_shapes = nonempty_array_shapes + empty_array_shapes
nonzerodim_shapes = nonempty_nonscalar_array_shapes + empty_array_shapes
nonempty_shapes = scalar_shapes + nonempty_array_shapes
all_shapes = scalar_shapes + array_shapes
float_dtypes = [onp.float32, onp.float64]
complex_dtypes = [onp.complex64, onp.complex128]
int_dtypes = [onp.int32, onp.int64]
unsigned_dtypes = [onp.uint32, onp.uint64]
bool_dtypes = [onp.bool_]
default_dtypes = float_dtypes + int_dtypes
inexact_dtypes = float_dtypes + complex_dtypes
number_dtypes = float_dtypes + complex_dtypes + int_dtypes
all_dtypes = number_dtypes + bool_dtypes
OpRecord = collections.namedtuple(
"OpRecord",
["name", "nargs", "dtypes", "shapes", "rng", "diff_modes", "test_name",
"check_dtypes"])
def op_record(name, nargs, dtypes, shapes, rng, diff_modes, test_name=None,
check_dtypes=True):
test_name = test_name or name
return OpRecord(name, nargs, dtypes, shapes, rng, diff_modes, test_name,
check_dtypes)
JAX_ONE_TO_ONE_OP_RECORDS = [
op_record("abs", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("add", 2, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("ceil", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("conj", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("equal", 2, all_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("exp", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("fabs", 1, float_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("float_power", 2, inexact_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("floor", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("greater", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("greater_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("less", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("less_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), []),
op_record("log", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("logical_and", 2, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("logical_not", 1, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("logical_or", 2, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("logical_xor", 2, all_dtypes, all_shapes, jtu.rand_bool(), []),
op_record("maximum", 2, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("minimum", 2, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("multiply", 2, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("negative", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("not_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), ["rev"]),
op_record("array_equal", 2, number_dtypes, all_shapes, jtu.rand_some_equal(), ["rev"]),
op_record("reciprocal", 1, inexact_dtypes, all_shapes, jtu.rand_default(), []),
op_record("subtract", 2, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("sin", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("cos", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("tan", 1, number_dtypes, all_shapes, jtu.rand_uniform(-1.5, 1.5),
["rev"]),
op_record("sinh", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("cosh", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("tanh", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("arcsin", 1, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arccos", 1, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arctan", 1, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arctan2", 2, float_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
op_record("arcsinh", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("arccosh", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("arctanh", 1, number_dtypes, all_shapes, jtu.rand_small(), ["rev"]),
]
JAX_COMPOUND_OP_RECORDS = [
op_record("angle", 1, number_dtypes, all_shapes, jtu.rand_default(), [],
check_dtypes=False),
op_record("atleast_1d", 1, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("atleast_2d", 1, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("atleast_3d", 1, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("cbrt", 1, default_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("conjugate", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("deg2rad", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("divide", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), ["rev"]),
op_record("exp2", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("expm1", 1, number_dtypes, all_shapes, jtu.rand_positive(), [],
test_name="expm1_large"),
op_record("expm1", 1, number_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("fix", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("floor_divide", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), ["rev"]),
op_record("heaviside", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("hypot", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("kron", 2, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("outer", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("imag", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("iscomplex", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("isfinite", 1, inexact_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isinf", 1, inexact_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isnan", 1, inexact_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isneginf", 1, float_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isposinf", 1, float_dtypes, all_shapes, jtu.rand_some_inf_and_nan(), []),
op_record("isreal", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("isrealobj", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("log2", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("log10", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("log1p", 1, number_dtypes, all_shapes, jtu.rand_positive(), [],
test_name="log1p_large"),
op_record("log1p", 1, number_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("logaddexp", 2, float_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("logaddexp2", 2, float_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("polyval", 2, number_dtypes, nonempty_nonscalar_array_shapes, jtu.rand_default(), []),
op_record("positive", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("power", 2, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("rad2deg", 1, float_dtypes, all_shapes, jtu.rand_default(), []),
op_record("ravel", 1, all_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("real", 1, number_dtypes, all_shapes, jtu.rand_some_inf(), []),
op_record("remainder", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("mod", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("sinc", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("square", 1, number_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("sqrt", 1, number_dtypes, all_shapes, jtu.rand_positive(), ["rev"]),
op_record("transpose", 1, all_dtypes, all_shapes, jtu.rand_default(), ["rev"]),
op_record("true_divide", 2, all_dtypes, all_shapes, jtu.rand_nonzero(), ["rev"]),
op_record("where", 3, (onp.float32, onp.int64), all_shapes, jtu.rand_some_zero(), []),
op_record("diff", 1, number_dtypes, nonzerodim_shapes, jtu.rand_default(), ["rev"]),
]
JAX_BITWISE_OP_RECORDS = [
op_record("bitwise_and", 2, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
op_record("bitwise_not", 1, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
op_record("bitwise_or", 2, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
op_record("bitwise_xor", 2, int_dtypes + unsigned_dtypes, all_shapes,
jtu.rand_bool(), []),
]
JAX_REDUCER_RECORDS = [
op_record("mean", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("prod", 1, number_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("sum", 1, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("var", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("std", 1, inexact_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
JAX_REDUCER_NO_DTYPE_RECORDS = [
op_record("all", 1, all_dtypes, all_shapes, jtu.rand_some_zero(), []),
op_record("any", 1, all_dtypes, all_shapes, jtu.rand_some_zero(), []),
op_record("max", 1, all_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("min", 1, all_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
JAX_ARGMINMAX_RECORDS = [
op_record("argmin", 1, all_dtypes, nonempty_shapes, jtu.rand_some_equal(), []),
op_record("argmax", 1, all_dtypes, nonempty_shapes, jtu.rand_some_equal(), []),
]
JAX_OPERATOR_OVERLOADS = [
op_record("__add__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__sub__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__mul__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__eq__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__ne__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__lt__", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__gt__", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__ge__", 2, default_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__neg__", 1, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__pow__", 2, inexact_dtypes, all_shapes, jtu.rand_positive(), []),
op_record("__mod__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__floordiv__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__truediv__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__abs__", 1, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__invert__", 1, int_dtypes, all_shapes, jtu.rand_default(), []),
]
JAX_RIGHT_OPERATOR_OVERLOADS = [
op_record("__radd__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__rsub__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__rmul__", 2, number_dtypes, all_shapes, jtu.rand_default(), []),
op_record("__rpow__", 2, inexact_dtypes, all_shapes, jtu.rand_positive(), []),
op_record("__rmod__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__rfloordiv__", 2, default_dtypes, all_shapes, jtu.rand_nonzero(), []),
op_record("__rtruediv__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
]
numpy_version = tuple(map(int, onp.version.version.split('.')))
if numpy_version >= (1, 15):
JAX_COMPOUND_OP_RECORDS += [
op_record("isclose", 2, all_dtypes, all_shapes, jtu.rand_small_positive(), []),
op_record("gcd", 2, int_dtypes, all_shapes, jtu.rand_default(), []),
op_record("lcm", 2, int_dtypes, all_shapes, jtu.rand_default(), []),
]
JAX_REDUCER_NO_DTYPE_RECORDS += [
op_record("ptp", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
if six.PY2:
JAX_OPERATOR_OVERLOADS += [
op_record("__div__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
]
JAX_RIGHT_OPERATOR_OVERLOADS += [
op_record("__rdiv__", 2, number_dtypes, all_shapes, jtu.rand_nonzero(), []),
]
CombosWithReplacement = itertools.combinations_with_replacement
def _dtypes_are_compatible_for_bitwise_ops(args):
if len(args) <= 1:
return True
is_signed = lambda dtype: onp.issubdtype(dtype, onp.signedinteger)
width = lambda dtype: onp.iinfo(dtype).bits
x, y = args
if width(x) > width(y):
x, y = y, x
return (
is_signed(x) == is_signed(y)
or (width(x) == 32 and width(y) == 32)
or (width(x) == 32 and width(y) == 64 and is_signed(y)))
def _shapes_are_broadcast_compatible(shapes):
accumulator = onp.zeros([])
for shape in shapes:
try:
accumulator = accumulator + onp.zeros(shape)
except ValueError:
return False
return True
class LaxBackedNumpyTests(jtu.JaxTestCase):
def _GetArgsMaker(self, rng, shapes, dtypes):
return lambda: [rng(shape, dtype) for shape, dtype in zip(shapes, dtypes)]
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(rec.test_name, shapes,
dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"check_dtypes": rec.check_dtypes}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in CombosWithReplacement(rec.dtypes, rec.nargs))
for rec in itertools.chain(JAX_ONE_TO_ONE_OP_RECORDS,
JAX_COMPOUND_OP_RECORDS)))
def testOp(self, onp_op, lnp_op, rng, shapes, dtypes, check_dtypes):
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
py_scalar_arg = jtu.PYTHON_SCALAR_SHAPE in shapes
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker,
check_dtypes=check_dtypes and not py_scalar_arg)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=check_dtypes)
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(rec.test_name, shapes,
dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes, "name": rec.name}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in CombosWithReplacement(rec.dtypes, rec.nargs))
for rec in JAX_OPERATOR_OVERLOADS))
def testOperatorOverload(self, name, rng, shapes, dtypes):
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
fun = lambda *xs: getattr(operator, name.strip('_'))(*xs)
self._CompileAndCheck(fun, args_maker,
check_dtypes=jtu.PYTHON_SCALAR_SHAPE not in shapes)
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(rec.test_name, shapes,
dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes, "name": rec.name}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in CombosWithReplacement(rec.dtypes, rec.nargs))
for rec in JAX_RIGHT_OPERATOR_OVERLOADS))
def testRightOperatorOverload(self, name, rng, shapes, dtypes):
if shapes[1] is jtu.PYTHON_SCALAR_SHAPE:
raise SkipTest()
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
fun = lambda fst, snd: getattr(snd, name)(fst)
self._CompileAndCheck(fun, args_maker,
check_dtypes=jtu.PYTHON_SCALAR_SHAPE not in shapes)
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(
rec.test_name, shapes, dtypes),
"rng": rec.rng, "shapes": shapes, "dtypes": dtypes,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name)}
for shapes in filter(
_shapes_are_broadcast_compatible,
CombosWithReplacement(rec.shapes, rec.nargs))
for dtypes in filter(
_dtypes_are_compatible_for_bitwise_ops,
CombosWithReplacement(rec.dtypes, rec.nargs)))
for rec in JAX_BITWISE_OP_RECORDS))
def testBitwiseOp(self, onp_op, lnp_op, rng, shapes, dtypes):
if not FLAGS.jax_enable_x64 and any(
onp.iinfo(dtype).bits == 64 for dtype in dtypes):
self.skipTest("x64 types are disabled by jax_enable_x64")
args_maker = self._GetArgsMaker(rng, shapes, dtypes)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker,
check_dtypes=jtu.PYTHON_SCALAR_SHAPE not in shapes)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "{}_inshape={}_axis={}_dtype={}_keepdims={}".format(
rec.test_name.capitalize(),
jtu.format_shape_dtype_string(shape, dtype), axis,
"None" if out_dtype is None else onp.dtype(out_dtype).name, keepdims),
"rng": rec.rng, "shape": shape, "dtype": dtype, "out_dtype": out_dtype,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"axis": axis, "keepdims": keepdims}
for rec in JAX_REDUCER_RECORDS
for shape in rec.shapes for dtype in rec.dtypes
for out_dtype in [None] + rec.dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])
for keepdims in [False, True]))
def testReducer(self, onp_op, lnp_op, rng, shape, dtype, out_dtype, axis, keepdims):
onp_fun = lambda x: onp_op(x, axis, dtype=out_dtype, keepdims=keepdims)
lnp_fun = lambda x: lnp_op(x, axis, dtype=out_dtype, keepdims=keepdims)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "{}_inshape={}_axis={}_keepdims={}".format(
rec.test_name.capitalize(),
jtu.format_shape_dtype_string(shape, dtype), axis, keepdims),
"rng": rec.rng, "shape": shape, "dtype": dtype,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"axis": axis, "keepdims": keepdims}
for rec in JAX_REDUCER_NO_DTYPE_RECORDS
for shape in rec.shapes for dtype in rec.dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])
for keepdims in [False, True]))
def testReducerNoDtype(self, onp_op, lnp_op, rng, shape, dtype, axis, keepdims):
onp_fun = lambda x: onp_op(x, axis, keepdims=keepdims)
lnp_fun = lambda x: lnp_op(x, axis, keepdims=keepdims)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis),
"shape": shape, "dtype": dtype, "axis": axis}
for shape in all_shapes for dtype in all_dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])))
def testCountNonzero(self, shape, dtype, axis):
rng = jtu.rand_some_zero()
onp_fun = lambda x: onp.count_nonzero(x, axis)
lnp_fun = lambda x: lnp.count_nonzero(x, axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "{}_inshape={}_axis={}".format(
rec.test_name.capitalize(),
jtu.format_shape_dtype_string(shape, dtype), axis),
"rng": rec.rng, "shape": shape, "dtype": dtype,
"onp_op": getattr(onp, rec.name), "lnp_op": getattr(lnp, rec.name),
"axis": axis}
for rec in JAX_ARGMINMAX_RECORDS
for shape in rec.shapes for dtype in rec.dtypes
for axis in range(-len(shape), len(shape))))
def testArgMinMax(self, onp_op, lnp_op, rng, shape, dtype, axis):
if (dtype == onp.complex128 and FLAGS.jax_test_dut and
FLAGS.jax_test_dut.startswith("gpu")):
raise unittest.SkipTest("complex128 reductions not supported on GPU")
def onp_fun(array_to_reduce):
return onp_op(array_to_reduce, axis)
def lnp_fun(array_to_reduce):
return lnp_op(array_to_reduce, axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype),
axes),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"axes": axes, "rng": rng}
for rng in [jtu.rand_default()]
for lhs_shape, rhs_shape, axes in [
[(2,), (2,), (-1, -1, -1, None)],
[(2, 4), (2, 4), (-1, -1, -1, 0)],
[(3, 4), (3, 4), (-1, -1, -1, 0)],
[(3, 4), (3, 6, 5, 4), (-1, -1, -1, 0)],
[(4, 3), (3, 6, 5, 4), (1, 0, -1, None)],
[(6, 1, 3), (5, 3), (-1, -1, -1, None)],
[(6, 1, 2), (5, 3), (-1, -1, -1, None)],
[(10, 5, 2, 8), (1, 5, 1, 3), (-2, -1, -3, None)],
[(4, 5, 2), (4, 5, 2), (-1, -1, 0, None)],
[(4, 5, 2), (4, 5, 2), (-1, -1, -1, None)]
]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testCross(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, axes, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
axisa, axisb, axisc, axis = axes
lnp_fun = lambda a, b: lnp.cross(a, b, axisa, axisb, axisc, axis)
onp_fun = lambda a, b: onp.cross(a, b, axisa, axisb, axisc, axis)
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
name,
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"rng": rng}
for rng in [jtu.rand_default()]
for name, lhs_shape, rhs_shape in [
("matrix-scalar", (3, 3), ()),
("scalar-matrix", (), (3, 3)),
("matrix-vector", (4, 5), (5,)),
("vector-matrix", (6,), (6, 4)),
("matrix-matrix", (3, 4), (4, 5)),
("tensor-vector", (4, 3, 2), (2,)),
("vector-tensor", (2,), (3, 2, 4)),
("tensor-matrix", (4, 3, 2), (2, 5)),
("matrix-tensor", (5, 2), (3, 2, 4)),
("tensor-tensor", (2, 3, 4), (5, 4, 1))]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testDot(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
self._CheckAgainstNumpy(onp.dot, lnp.dot, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp.dot, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
name,
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"rng": rng}
for rng in [jtu.rand_default()]
for name, lhs_shape, rhs_shape in [
("vector-vector", (3,), (3,)),
("matrix-vector", (3, 3), (3,)),
("vector-matrix", (3,), (3, 3)),
("matrix-matrix", (3, 3), (3, 3)),
("vector-tensor", (3,), (5, 3, 2)),
("tensor-vector", (5, 3, 2), (2,)),
("matrix-tensor", (5, 2), (3, 2, 4)),
("tensor-matrix", (5, 2, 3), (3, 2)),
("tensor-tensor", (5, 3, 4), (5, 4, 1)),
("tensor-tensor-broadcast", (3, 1, 3, 4), (5, 4, 1))]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testMatmul(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
self._CheckAgainstNumpy(onp.matmul, lnp.matmul, args_maker,
check_dtypes=True)
self._CompileAndCheck(lnp.matmul, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype),
axes),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"axes": axes, "rng": rng}
for rng in [jtu.rand_default()]
for lhs_shape, rhs_shape, axes in [
[(2, 3, 4), (5, 6, 7), 0], [(2, 3, 4), (3, 4, 5, 6), 2],
[(2, 3, 4), (5, 4, 3, 6), [1, 2]],
[(2, 3, 4), (5, 4, 3, 6), [[1, 2], [2, 1]]],
[(1, 2, 3, 4), (4, 5, 3, 6), [[2, 3], [2, 0]]],
]
for lhs_dtype, rhs_dtype in CombosWithReplacement(number_dtypes, 2)))
def testTensordot(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, axes, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
lnp_fun = lambda a, b: lnp.tensordot(a, b, axes)
onp_fun = lambda a, b: onp.tensordot(a, b, axes)
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"rng": jtu.rand_default()}
for lhs_dtype, rhs_dtype in CombosWithReplacement(inexact_dtypes, 2)
for lhs_shape, rhs_shape in [
(l, r) for l, r in CombosWithReplacement(all_shapes, 2)
if len(jtu._dims_of_shape(l)) == 0
or len(jtu._dims_of_shape(r)) == 0
or l[-1] == r[-1]]))
def testInner(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, rng):
args_maker = lambda: [rng(lhs_shape, lhs_dtype), rng(rhs_shape, rhs_dtype)]
onp_fun = lambda lhs, rhs: onp.inner(lhs, rhs)
lnp_fun = lambda lhs, rhs: lnp.inner(lhs, rhs)
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_amin={}_amax={}".format(
jtu.format_shape_dtype_string(shape, dtype), a_min, a_max),
"shape": shape, "dtype": dtype, "a_min": a_min, "a_max": a_max,
"rng": jtu.rand_default()}
for shape in all_shapes for dtype in number_dtypes
for a_min, a_max in [(-1, None), (None, 1), (-1, 1)]))
def testClipStaticBounds(self, shape, dtype, a_min, a_max, rng):
onp_fun = lambda x: onp.clip(x, a_min=a_min, a_max=a_max)
lnp_fun = lambda x: lnp.clip(x, a_min=a_min, a_max=a_max)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_decimals={}".format(
jtu.format_shape_dtype_string(shape, dtype), decimals),
"shape": shape, "dtype": dtype, "decimals": decimals,
"rng": jtu.rand_default()}
for shape in all_shapes for dtype in number_dtypes
for decimals in [0, 1, -2]))
def testRoundStaticDecimals(self, shape, dtype, decimals, rng):
if onp.issubdtype(dtype, onp.integer) and decimals < 0:
self.skipTest("Integer rounding with decimals < 0 not implemented")
onp_fun = lambda x: onp.round(x, decimals=decimals)
lnp_fun = lambda x: lnp.round(x, decimals=decimals)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_mode={}_rpadwidth={}_rconstantvalues={}".format(
jtu.format_shape_dtype_string(shape, dtype), mode, pad_width_rank,
constant_values_rank),
"shape": shape, "dtype": dtype, "mode": mode,
"pad_width_rank": pad_width_rank,
"constant_values_rank": constant_values_rank, "rng": jtu.rand_default(),
"irng": jtu.rand_int(3)}
for mode, constant_values_rank, shapes in [
('constant', 0, all_shapes),
('constant', 1, all_shapes),
('constant', 2, all_shapes),
('symmetric', None, nonempty_shapes),
('reflect', None, nonempty_shapes),
('wrap', None, nonempty_shapes),
]
for shape in shapes for dtype in all_dtypes
for pad_width_rank in range(3)))
def testPad(self, shape, dtype, mode, pad_width_rank, constant_values_rank,
rng, irng):
pad_width = irng([len(shape), 2][2 - pad_width_rank:], onp.int32)
def onp_fun(x, kwargs):
if pad_width.size == 0:
return x
return onp.pad(x, pad_width, mode=mode, **kwargs)
def lnp_fun(x, kwargs):
return lnp.pad(x, pad_width, mode=mode, **kwargs)
def args_maker():
kwargs = {}
if constant_values_rank:
kwargs["constant_values"] = rng(
[len(shape), 2][2 - constant_values_rank:], dtype)
return rng(shape, dtype), kwargs
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape=[{}]_reps={}".format(
jtu.format_shape_dtype_string(shape, dtype), reps),
"shape": shape, "dtype": dtype, "reps": reps,
"rng": jtu.rand_default()}
for reps in [(), (2,), (3, 4), (2, 3, 4)]
for dtype in default_dtypes
for shape in all_shapes
))
def testTile(self, shape, dtype, reps, rng):
onp_fun = lambda arg: onp.tile(arg, reps)
lnp_fun = lambda arg: lnp.tile(arg, reps)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_axis={}_baseshape=[{}]_dtypes=[{}]".format(
axis, ",".join(str(d) for d in base_shape),
",".join(onp.dtype(dtype).name for dtype in dtypes)),
"axis": axis, "base_shape": base_shape, "dtypes": dtypes,
"rng": jtu.rand_default()}
for num_arrs in [3]
for dtypes in CombosWithReplacement(default_dtypes, num_arrs)
for base_shape in [(4,), (3, 4), (2, 3, 4)]
for axis in range(-len(base_shape)+1, len(base_shape))))
def testConcatenate(self, axis, base_shape, dtypes, rng):
wrapped_axis = axis % len(base_shape)
shapes = [base_shape[:wrapped_axis] + (size,) + base_shape[wrapped_axis+1:]
for size, _ in zip(itertools.cycle([3, 1, 4]), dtypes)]
onp_fun = lambda *args: onp.concatenate(args, axis=axis)
lnp_fun = lambda *args: lnp.concatenate(args, axis=axis)
def args_maker():
return [rng(shape, dtype) for shape, dtype in zip(shapes, dtypes)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_axis={}_baseshape=[{}]_dtypes=[{}]".format(
axis, ",".join(str(d) for d in base_shape),
",".join(onp.dtype(dtype).name for dtype in dtypes)),
"axis": axis, "base_shape": base_shape, "dtypes": dtypes,
"rng": jtu.rand_default()}
for dtypes in CombosWithReplacement(default_dtypes, 2)
for base_shape in [(4,), (3, 4), (2, 3, 4)]
for axis in range(-len(base_shape)+1, len(base_shape))))
def testAppend(self, axis, base_shape, dtypes, rng):
wrapped_axis = axis % len(base_shape)
shapes = [base_shape[:wrapped_axis] + (size,) + base_shape[wrapped_axis+1:]
for size, _ in zip(itertools.cycle([3, 1, 4]), dtypes)]
onp_fun = lambda arr, values: onp.append(arr, values, axis=axis)
lnp_fun = lambda arr, values: lnp.append(arr, values, axis=axis)
def args_maker():
return [rng(shape, dtype) for shape, dtype in zip(shapes, dtypes)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape=[{}]_axis={}_repeats={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis, repeats),
"axis": axis, "shape": shape, "dtype": dtype, "repeats": repeats,
"rng": jtu.rand_default()}
for repeats in [0, 1, 2]
for dtype in default_dtypes
for shape in all_shapes
for axis in [None] + list(range(-len(shape), len(shape)))))
def testRepeat(self, axis, shape, dtype, repeats, rng):
onp_fun = lambda arg: onp.repeat(arg, repeats=repeats, axis=axis)
lnp_fun = lambda arg: lnp.repeat(arg, repeats=repeats, axis=axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "op={}_shape=[{}]_axis={}_out_dtype={}".format(
op, jtu.format_shape_dtype_string(shape, dtype), axis, out_dtype),
"axis": axis, "shape": shape, "dtype": dtype, "out_dtype": out_dtype,
"rng": jtu.rand_default(), "lnp_op": getattr(lnp, op),
"onp_op": getattr(onp, op)}
for op in ["cumsum", "cumprod"]
for dtype in [onp.float32, onp.int32]
for out_dtype in [onp.float32, onp.int32]
for shape in all_shapes
for axis in [None] + list(range(-len(shape), len(shape)))))
def testCumSumProd(self, axis, shape, dtype, out_dtype, onp_op, lnp_op, rng):
onp_fun = lambda arg: onp_op(arg, axis=axis, dtype=out_dtype)
lnp_fun = lambda arg: lnp_op(arg, axis=axis, dtype=out_dtype)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_dtype={}_m={}_n={}_k={}".format(
onp.dtype(dtype).name, m, n, k),
"m": m, "n": n, "k": k, "dtype": dtype, "rng": jtu.rand_default()}
for dtype in default_dtypes
for n in [0, 4]
for m in [None, 0, 1, 3, 4]
for k in list(range(-4, 4))))
def testTri(self, m, n, k, dtype, rng):
onp_fun = lambda: onp.tri(n, M=m, k=k, dtype=dtype)
lnp_fun = lambda: lnp.tri(n, M=m, k=k, dtype=dtype)
args_maker = lambda: []
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_op={}_shape={}_k={}".format(
op, jtu.format_shape_dtype_string(shape, dtype), k),
"dtype": dtype, "shape": shape, "op": op, "k": k,
"rng": jtu.rand_default()}
for dtype in default_dtypes
for shape in [shape for shape in all_shapes if len(shape) >= 2]
for op in ["tril", "triu"]
for k in list(range(-3, 3))))
def testTriLU(self, dtype, shape, op, k, rng):
onp_fun = lambda arg: getattr(onp, op)(arg, k=k)
lnp_fun = lambda arg: getattr(lnp, op)(arg, k=k)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_k={}".format(
jtu.format_shape_dtype_string(shape, dtype), k),
"dtype": dtype, "shape": shape, "k": k, "rng": jtu.rand_default()}
for dtype in default_dtypes
for shape in [shape for shape in all_shapes if len(shape) in (1, 2)]
for k in list(range(-4, 4))))
def testDiag(self, shape, dtype, k, rng):
onp_fun = lambda arg: onp.diag(arg, k)
lnp_fun = lambda arg: lnp.diag(arg, k)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_offset={}_axis1={}_axis2={}".format(
jtu.format_shape_dtype_string(shape, dtype), offset, axis1, axis2),
"dtype": dtype, "shape": shape, "offset": offset, "axis1": axis1,
"axis2": axis2, "rng": jtu.rand_default()}
for dtype in default_dtypes
for shape in [shape for shape in all_shapes if len(shape) >= 2]
for axis1 in range(-len(shape), len(shape))
for axis2 in [a for a in range(-len(shape), len(shape))
if a % len(shape) != axis1 % len(shape)]
for offset in list(range(-4, 4))))
def testDiagonal(self, shape, dtype, offset, axis1, axis2, rng):
onp_fun = lambda arg: onp.diagonal(arg, offset, axis1, axis2)
lnp_fun = lambda arg: lnp.diagonal(arg, offset, axis1, axis2)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_n={}".format(onp.dtype(dtype).name, n),
"dtype": dtype, "n": n}
for dtype in default_dtypes
for n in list(range(4))))
def testIdentity(self, n, dtype):
onp_fun = lambda: onp.identity(n, dtype)
lnp_fun = lambda: lnp.identity(n, dtype)
args_maker = lambda: []
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_dtype_{}_offset={}_axis1={}_axis2={}".format(
jtu.format_shape_dtype_string(shape, dtype),
out_dtype, offset, axis1, axis2),
"dtype": dtype, "out_dtype": out_dtype, "shape": shape, "offset": offset,
"axis1": axis1, "axis2": axis2, "rng": jtu.rand_default()}
for dtype in default_dtypes
for out_dtype in [None] + number_dtypes
for shape in [shape for shape in all_shapes if len(shape) >= 2]
for axis1 in range(-len(shape), len(shape))
for axis2 in range(-len(shape), len(shape))
if (axis1 % len(shape)) != (axis2 % len(shape))
for offset in list(range(-4, 4))))
def testTrace(self, shape, dtype, out_dtype, offset, axis1, axis2, rng):
onp_fun = lambda arg: onp.trace(arg, offset, axis1, axis2, out_dtype)
lnp_fun = lambda arg: lnp.trace(arg, offset, axis1, axis2, out_dtype)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}".format(
jtu.format_test_name_suffix("", [shape] * len(dtypes), dtypes), axis),
"shape": shape, "axis": axis, "dtypes": dtypes, "rng": rng}
for dtypes in [
[onp.float32],
[onp.float32, onp.float32],
[onp.float32, onp.int32, onp.float32],
[onp.float32, onp.int64, onp.float32],
[onp.float32, onp.int32, onp.float64],
]
for shape in [(), (2,), (3, 4), (1, 100)]
for axis in range(-len(shape), len(shape) + 1)
for rng in [jtu.rand_default()]))
def testStack(self, shape, axis, dtypes, rng):
args_maker = lambda: [[rng(shape, dtype) for dtype in dtypes]]
onp_fun = partial(onp.stack, axis=axis)
lnp_fun = partial(lnp.stack, axis=axis)
self._CheckAgainstNumpy(lnp_fun, onp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_op={}_{}".format(
op, jtu.format_test_name_suffix("", [shape] * len(dtypes), dtypes)),
"shape": shape, "op": op, "dtypes": dtypes, "rng": rng}
for op in ["hstack", "vstack", "dstack"]
for dtypes in [
[onp.float32],
[onp.float32, onp.float32],
[onp.float32, onp.int32, onp.float32],
[onp.float32, onp.int64, onp.float32],
[onp.float32, onp.int32, onp.float64],
]
for shape in [(), (2,), (3, 4), (1, 100), (2, 3, 4)]
for rng in [jtu.rand_default()]))
def testHVDStack(self, shape, op, dtypes, rng):
args_maker = lambda: [[rng(shape, dtype) for dtype in dtypes]]
onp_fun = getattr(onp, op)
lnp_fun = getattr(lnp, op)
self._CheckAgainstNumpy(lnp_fun, onp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_outdtype={}".format(
jtu.format_shape_dtype_string(shape, fill_value_dtype),
onp.dtype(out_dtype).name if out_dtype else "None"),
"shape": shape, "fill_value_dtype": fill_value_dtype,
"out_dtype": out_dtype, "rng": jtu.rand_default()}
for shape in array_shapes
for fill_value_dtype in default_dtypes
for out_dtype in [None] + default_dtypes))
def testFull(self, shape, fill_value_dtype, out_dtype, rng):
onp_fun = lambda fill_value: onp.full(shape, fill_value, dtype=out_dtype)
lnp_fun = lambda fill_value: lnp.full(shape, fill_value, dtype=out_dtype)
args_maker = lambda: [rng((), fill_value_dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_filldtype={}_outdtype={}".format(
jtu.format_shape_dtype_string(shape, in_dtype),
onp.dtype(fill_value_dtype).name,
onp.dtype(out_dtype).name),
"shape": shape, "in_dtype": in_dtype,
"fill_value_dtype": fill_value_dtype, "out_dtype": out_dtype,
"rng": jtu.rand_default()}
for shape in array_shapes
for in_dtype in default_dtypes
for fill_value_dtype in default_dtypes
for out_dtype in default_dtypes))
def testFullLike(self, shape, in_dtype, fill_value_dtype, out_dtype, rng):
onp_fun = lambda x, fill_value: onp.full_like(x, fill_value, dtype=out_dtype)
lnp_fun = lambda x, fill_value: lnp.full_like(x, fill_value, dtype=out_dtype)
args_maker = lambda: [rng(shape, in_dtype), rng((), fill_value_dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}_{}sections".format(
jtu.format_shape_dtype_string(shape, dtype), axis, num_sections),
"shape": shape, "num_sections": num_sections, "axis": axis,
"dtype": dtype, "rng": jtu.rand_default()}
for shape, axis, num_sections in [
((3,), 0, 3), ((12,), 0, 3), ((12, 4), 0, 4), ((12, 4), 1, 2),
((2, 3, 4), -1, 2), ((2, 3, 4), -2, 3)]
for dtype in default_dtypes))
def testSplitStaticInt(self, shape, num_sections, axis, dtype, rng):
onp_fun = lambda x: onp.split(x, num_sections, axis=axis)
lnp_fun = lambda x: lnp.split(x, num_sections, axis=axis)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}_{}sections".format(
jtu.format_shape_dtype_string(shape, dtype), axis, num_sections),
"shape": shape, "num_sections": num_sections, "axis": axis,
"dtype": dtype, "rng": jtu.rand_default()}
for shape, axis, num_sections in [
((12, 4), 0, 4), ((12, 4), 1, 2),
((2, 3, 4), 2, 2), ((4, 3, 4), 0, 2)]
for dtype in default_dtypes))
def testHVDSplit(self, shape, num_sections, axis, dtype, rng):
def fn(module, axis):
if axis == 0:
return module.vsplit
elif axis == 1:
return module.hsplit
else:
assert axis == 2
return module.dsplit
onp_fun = lambda x: fn(onp, axis)(x, num_sections)
lnp_fun = lambda x: fn(lnp, axis)(x, num_sections)
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_outshape={}_order={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype),
jtu.format_shape_dtype_string(out_shape, dtype),
order),
"arg_shape": arg_shape, "out_shape": out_shape, "dtype": dtype,
"order": order, "rng": jtu.rand_default()}
for dtype in default_dtypes
for order in ["C", "F"]
for arg_shape, out_shape in [
(jtu.NUMPY_SCALAR_SHAPE, (1, 1, 1)),
((), (1, 1, 1)),
((7, 0), (0, 42, 101)),
((3, 4), 12),
((3, 4), (12,)),
((3, 4), -1),
((2, 1, 4), (-1,)),
((2, 2, 4), (2, 8))
]))
def testReshape(self, arg_shape, out_shape, dtype, order, rng):
onp_fun = lambda x: onp.reshape(x, out_shape, order=order)
lnp_fun = lambda x: lnp.reshape(x, out_shape, order=order)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_outshape={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype),
jtu.format_shape_dtype_string(out_shape, dtype)),
"arg_shape": arg_shape, "out_shape": out_shape, "dtype": dtype,
"rng": jtu.rand_default()}
for dtype in default_dtypes
for arg_shape, out_shape in [
((7, 0), (0, 42, 101)),
((2, 1, 4), (-1,)),
((2, 2, 4), (2, 8))
]))
def testReshapeMethod(self, arg_shape, out_shape, dtype, rng):
onp_fun = lambda x: onp.reshape(x, out_shape)
lnp_fun = lambda x: x.reshape(*out_shape)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_expanddim={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype), dim),
"arg_shape": arg_shape, "dtype": dtype, "dim": dim,
"rng": jtu.rand_default()}
for arg_shape in [(), (3,), (3, 4)]
for dtype in default_dtypes
for dim in range(-len(arg_shape)+1, len(arg_shape))))
def testExpandDimsStaticDim(self, arg_shape, dtype, dim, rng):
onp_fun = lambda x: onp.expand_dims(x, dim)
lnp_fun = lambda x: lnp.expand_dims(x, dim)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_axes=({},{})".format(
jtu.format_shape_dtype_string(arg_shape, dtype), ax1, ax2),
"arg_shape": arg_shape, "dtype": dtype, "ax1": ax1, "ax2": ax2,
"rng": jtu.rand_default()}
for arg_shape, ax1, ax2 in [
((3, 4), 0, 1), ((3, 4), 1, 0), ((3, 4, 5), 1, 2),
((3, 4, 5), -1, -2), ((3, 4, 5), 0, 1)]
for dtype in default_dtypes))
def testSwapAxesStaticAxes(self, arg_shape, dtype, ax1, ax2, rng):
onp_fun = lambda x: onp.swapaxes(x, ax1, ax2)
lnp_fun = lambda x: lnp.swapaxes(x, ax1, ax2)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_inshape={}_axis={}".format(
jtu.format_shape_dtype_string(arg_shape, dtype), ax),
"arg_shape": arg_shape, "dtype": dtype, "ax": ax,
"rng": jtu.rand_default()}
for arg_shape, ax in [
((3, 1), None),
((3, 1), 1),
((1, 3, 1), (0, 2)),
((1, 4, 1), (0,))]
for dtype in default_dtypes))
def testSqueeze(self, arg_shape, dtype, ax, rng):
onp_fun = lambda x: onp.squeeze(x, ax)
lnp_fun = lambda x: lnp.squeeze(x, ax)
args_maker = lambda: [rng(arg_shape, dtype)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_axis={}_weights={}_returned={}".format(
jtu.format_shape_dtype_string(shape, dtype),
axis,
(None if weights_shape == None else jtu.format_shape_dtype_string(weights_shape, dtype)),
returned),
"rng": jtu.rand_default(), "shape": shape, "dtype": dtype, "axis": axis,
"weights_shape": weights_shape, "returned": returned}
for shape in nonempty_shapes
for dtype in number_dtypes
for axis in set(range(-len(shape), len(shape))) | set([None])
for weights_shape in ([None, shape] if axis is None else [None, (shape[axis],), shape])
for returned in [False, True]))
def testAverage(self, shape, dtype, axis, weights_shape, returned, rng):
onp_fun = lambda x, weights: onp.average(x, axis, weights, returned)
lnp_fun = lambda x, weights: lnp.average(x, axis, weights, returned)
args_maker = lambda: [rng(shape, dtype),
None if weights_shape is None else rng(weights_shape, dtype)]
try:
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
except ZeroDivisionError:
self.skipTest("don't support checking for ZeroDivisionError")
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_arg{}".format(i), "arg": arg}
for i, arg in enumerate([
3., [1, 2, 3], [1., 2., 3.],
[[1, 2], [3, 4], [5, 6]], [[1, 2.], [3, 4], [5, 6]],
[[3, onp.array(2), 1], onp.arange(3.)],
])))
def testArray(self, arg):
args_maker = lambda: [arg]
self._CheckAgainstNumpy(onp.array, lnp.array, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp.array, args_maker, check_dtypes=True)
def testIssue121(self):
assert not onp.isscalar(lnp.array(3))
def testArrayMethod(self):
class arraylike(object):
dtype = onp.float32
def __array__(self, dtype=None):
return 3.
a = arraylike()
ans = lnp.array(a)
assert ans == 3.
def testAllClose(self):
rng = onp.random.RandomState(0)
x = rng.randn(2, 2)
y = rng.randn(2)
def same(list1, list2):
allclose = functools.partial(lnp.allclose, atol=1e-3, rtol=1e-3)
elements_close = list(map(allclose, list1, list2))
return lnp.all(lnp.array(elements_close))
csame = api.jit(same)
a1 = same((x, y), (x, y))
a2 = csame((x, y), (x, y))
a3 = csame((x, y), (x, 2 * y))
self.assertTrue(a1)
self.assertTrue(a2)
self.assertFalse(a3)
@jtu.skip_on_devices("tpu") # TODO(mattjj): investigate this failure
def testOnesBroadcastingConstantHandler(self):
# TODO(mattjj): update this test for jax3
self.skipTest("test needs jax3 update")
def fun(x):
ones = lnp.ones((3, 4))
assert isinstance(ones, onp.ndarray) and ones.strides == (0, 0)
# To check that the constant handler generates a Broadcast for stride-zero
# arrays, we monkey-patch the client instance.
# TODO(mattjj): once we have better HLO dumping and inspecting facilities,
# we can check the HLO more directly.
c = x._node.c
Broadcast = c.Broadcast # pylint: disable=invalid-name
was_called = []
c.Broadcast = lambda *args: was_called.append(True) or Broadcast(*args)
out = x + ones # the ndarray constant handler should call Broadcast here
assert was_called, "Broadcast was not called."
return out
fun = api.jit(fun)
out_val = fun(lnp.ones(4))
self.assertAllClose(out_val, onp.full((3, 4), 2.), check_dtypes=False)
def testZeroStridesConstantHandler(self):
raw_const = onp.random.RandomState(0).randn(1, 2, 1, 1, 5, 1)
const = onp.broadcast_to(raw_const, (3, 2, 3, 4, 5, 6))
def fun(x):
return x * const
fun = api.jit(fun)
out_val = fun(3.)
self.assertAllClose(out_val, 3. * const, check_dtypes=False)
def testIsInstanceNdarrayDuringTracing(self):
arr = onp.ones(3)
@api.jit
def f(x):
self.assertIsInstance(x, lnp.ndarray)
return lnp.sum(x)
f(arr)
def testNonArrayErrorMessage(self):
x = [1., 2.]
y = onp.array([3., 4.])
def g(x, y):
return lnp.add(x, y)
def f(x, y):
return lnp.dot(x, y)
self.assertRaises(TypeError, lambda: g(x, y))
self.assertRaises(TypeError, lambda: f(x, y))
self.assertRaises(TypeError, lambda: api.jit(g)(x, y))
self.assertRaises(TypeError, lambda: api.jit(f)(x, y))
def testAbstractionErrorMessage(self):
@api.jit
def f(x, n):
for _ in range(n):
x = x * x
return x
self.assertRaises(TypeError, lambda: f(3., 3))
@api.jit
def g(x):
if x > 0.:
return x * 2
else:
return x + 2
self.assertRaises(TypeError, lambda: g(3.))
def testTracingPrimitiveWithNoTranslationErrorMessage(self):
# TODO(mattjj): update this for jax3
self.skipTest("test needs jax3 update")
foo = lnp._not_implemented(lambda x: x)
# No error if there's no tracing.
foo(onp.arange(3))
cfoo = api.jit(foo)
self.assertRaises(NotImplementedError, lambda: cfoo(onp.arange(3)))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis),
"rng": rng, "shape": shape, "dtype": dtype, "axis": axis}
for shape in [(3,), (2, 3)]
for dtype in default_dtypes
for axis in range(-len(shape), len(shape))
for rng in [jtu.rand_default()]))
def testFlip(self, shape, dtype, axis, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.flip(x, axis)
onp_op = lambda x: onp.flip(x, axis)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(
jtu.format_shape_dtype_string(shape, dtype)),
"rng": rng, "shape": shape, "dtype": dtype}
for shape in [(3,), (2, 3), (3, 2, 4)]
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testFlipud(self, shape, dtype, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.flipud(x)
onp_op = lambda x: onp.flipud(x)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(
jtu.format_shape_dtype_string(shape, dtype)),
"rng": rng, "shape": shape, "dtype": dtype}
for shape in [(3, 2), (2, 3), (3, 2, 4)]
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testFliplr(self, shape, dtype, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.fliplr(x)
onp_op = lambda x: onp.fliplr(x)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_k={}_axes={}".format(
jtu.format_shape_dtype_string(shape, dtype), k, axes),
"rng": rng, "shape": shape, "dtype": dtype, "k": k, "axes": axes}
for shape, axes in [
[(2, 3), (0, 1)],
[(2, 3), (1, 0)],
[(4, 3, 2), (0, 2)],
[(4, 3, 2), (2, 1)],
]
for k in range(-3, 4)
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testRot90(self, shape, dtype, k, axes, rng):
args_maker = self._GetArgsMaker(rng, [shape], [dtype])
lnp_op = lambda x: lnp.rot90(x, k, axes)
onp_op = lambda x: onp.rot90(x, k, axes)
self._CheckAgainstNumpy(onp_op, lnp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
def testRavel(self):
rng = onp.random.RandomState(0)
args_maker = lambda: [rng.randn(3, 4).astype("float32")]
self._CompileAndCheck(lambda x: x.ravel(), args_maker, check_dtypes=True)
def testAstype(self):
rng = onp.random.RandomState(0)
args_maker = lambda: [rng.randn(3, 4).astype("float32")]
op = lambda x: x.astype(lnp.int32)
self._CheckAgainstNumpy(op, op, args_maker, check_dtypes=True)
self._CompileAndCheck(op, args_maker, check_dtypes=True)
def testOnpMean(self):
x = lax.add(lnp.eye(3), 0.)
ans = onp.mean(x)
self.assertAllClose(ans, onp.array(1./3), check_dtypes=False)
def testArangeOnFloats(self):
expected = onp.arange(0.0, 1.0, 0.1)
ans = lnp.arange(0.0, 1.0, 0.1)
self.assertAllClose(expected, ans, check_dtypes=True)
def testSortManually(self):
# lax.sort is tested combinatorially.
ans = lnp.sort(onp.array([16, 15, 23, 42, 8, 4]))
expected = onp.array([4, 8, 15, 16, 23, 42])
self.assertAllClose(expected, ans, check_dtypes=True)
a = onp.array([[1, 4], [3, 1]])
ans = lnp.sort(a, axis=None)
expected = onp.array([1, 1, 3, 4])
self.assertAllClose(expected, ans, check_dtypes=True)
a = onp.array([[1, 4], [3, 1]])
ans = lnp.sort(a) # last axis
expected = onp.array([[1, 4], [1, 3]])
self.assertAllClose(expected, ans, check_dtypes=True)
a = onp.array([[1, 4], [3, 1]])
ans = lnp.sort(a, axis=0)
expected = onp.array([[1, 1], [3, 4]])
self.assertAllClose(expected, ans, check_dtypes=True)
def testArgsortManually(self):
x = onp.array([16, 15, 23, 42, 8, 4])
ans = lnp.argsort(x)
expected = onp.argsort(x)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x, axis=0)
expected = onp.argsort(x, axis=0)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x, axis=1)
expected = onp.argsort(x, axis=1)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x, axis=None)
expected = onp.argsort(x, axis=None)
self.assertAllClose(expected, ans, check_dtypes=False)
x = onp.array([[16, 15, 23], [42, 8, 4]])
ans = lnp.argsort(x)
expected = onp.argsort(x)
self.assertAllClose(expected, ans, check_dtypes=False)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_shifts={}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype),
shifts, axis),
"rng": rng, "shape": shape, "dtype": dtype, "shifts": shifts,
"axis": axis}
for dtype in all_dtypes
for shape in [(3, 4), (3, 4, 5), (7, 4, 0)]
for shifts, axis in [
(3, None),
(1, 1),
((3,), (0,)),
((-2,), (-2,)),
((1, 2), (0, -1))
]
for rng in [jtu.rand_default()]))
def testRoll(self, shape, dtype, shifts, axis, rng):
args_maker = lambda: [rng(shape, dtype)]
lnp_op = lambda x: lnp.roll(x, shifts, axis=axis)
onp_op = lambda x: onp.roll(x, shifts, axis=axis)
self._CheckAgainstNumpy(lnp_op, onp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_index={}_axis={}_mode={}".format(
jtu.format_shape_dtype_string(shape, dtype),
jtu.format_shape_dtype_string(index_shape, index_dtype),
axis, mode),
"rng": rng, "rng_indices": rng_indices, "shape": shape,
"index_shape": index_shape, "dtype": dtype, "index_dtype": index_dtype,
"axis": axis, "mode": mode}
for shape in [(3,), (3, 4), (3, 4, 5)]
for index_shape in scalar_shapes + [(3,), (2, 1, 3)]
for axis in itertools.chain(range(-len(shape), len(shape)), [None])
for dtype in all_dtypes
for index_dtype in int_dtypes
for mode in ['wrap', 'clip']
for rng in [jtu.rand_default()]
for rng_indices in [jtu.rand_int(-5, 5)]))
def testTake(self, shape, dtype, index_shape, index_dtype, axis, mode, rng,
rng_indices):
def args_maker():
x = rng(shape, dtype)
i = rng_indices(index_shape, index_dtype)
return x, i
lnp_op = lambda x, i: lnp.take(x, i, axis=axis, mode=mode)
onp_op = lambda x, i: onp.take(x, i, axis=axis, mode=mode)
self._CheckAgainstNumpy(lnp_op, onp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_axis={}".format(
jtu.format_shape_dtype_string(shape, dtype), axis),
"rng": rng, "shape": shape, "dtype": dtype, "axis": axis}
for shape in [(3,), (3, 4), (3, 4, 5)]
for axis in itertools.chain(range(len(shape)), [-1], [None])
for dtype in default_dtypes
for rng in [jtu.rand_default()]))
def testTakeAlongAxis(self, shape, dtype, axis, rng):
def args_maker():
x = rng(shape, dtype)
i = onp.argsort(x, axis=axis)
return x, i
lnp_op = lambda x, i: lnp.take_along_axis(x, i, axis=axis)
if hasattr(onp, "take_along_axis"):
onp_op = lambda x, i: onp.take_along_axis(x, i, axis=axis)
self._CheckAgainstNumpy(lnp_op, onp_op, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_op, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_n={}_increasing={}".format(
jtu.format_shape_dtype_string([shape], dtype),
n, increasing),
"dtype": dtype, "shape": shape, "n": n, "increasing": increasing,
"rng": jtu.rand_default()}
for dtype in inexact_dtypes
for shape in [0, 5]
for n in [2, 4]
for increasing in [False, True]))
def testVander(self, shape, dtype, n, increasing, rng):
onp_fun = lambda arg: onp.vander(arg, N=n, increasing=increasing)
lnp_fun = lambda arg: lnp.vander(arg, N=n, increasing=increasing)
args_maker = lambda: [rng([shape], dtype)]
# np.vander seems to return float64 for all floating types. We could obey
# those semantics, but they seem like a bug.
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=False)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=False)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix("nan_to_num", [shape],
[dtype]),
"rng": jtu.rand_some_inf_and_nan(), "shape": shape, "dtype": dtype}
for shape in all_shapes
for dtype in inexact_dtypes))
def testNanToNum(self, rng, shape, dtype):
dtype = onp.dtype(xla_bridge.canonicalize_dtype(dtype)).type
args_maker = lambda: [rng(shape, dtype)]
self._CheckAgainstNumpy(onp.nan_to_num, lnp.nan_to_num, args_maker,
check_dtypes=True)
self._CompileAndCheck(lnp.nan_to_num, args_maker, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix("ix_", shapes, dtypes),
"rng": jtu.rand_default(), "shapes": shapes, "dtypes": dtypes}
for shapes, dtypes in (
((), ()),
(((7,),), (onp.float32,)),
(((3,), (4,)), (onp.float32, onp.int32)),
(((3,), (0,), (4,)), (onp.int32, onp.float32, onp.int32)),
)))
def testIx_(self, rng, shapes, dtypes):
args_maker = lambda: [rng(shape, dtype)
for shape, dtype in zip(shapes, dtypes)]
self._CheckAgainstNumpy(onp.ix_, lnp.ix_, args_maker,
check_dtypes=True)
self._CompileAndCheck(lnp.ix_, args_maker, check_dtypes=True)
def testIssue330(self):
x = lnp.full((1, 1), lnp.array([1])[0]) # doesn't crash
self.assertEqual(x[0, 0], 1)
def testScalarDtypePromotion(self):
msg = ("jax.numpy differs from numpy in promotion rules for Python scalars."
" See https://github.com/google/jax/issues/732.")
raise SkipTest(msg)
orig_numpy_result = (1 + onp.eye(1, dtype=onp.float32)).dtype
jax_numpy_result = (1 + lnp.eye(1, dtype=lnp.float32)).dtype
self.assertEqual(orig_numpy_result, jax_numpy_result)
def testSymmetrizeDtypePromotion(self):
x = onp.eye(3, dtype=onp.float32)
orig_numpy_result = ((x + x.T) / 2).dtype
x = lnp.eye(3, dtype=lnp.float32)
jax_numpy_result = ((x + x.T) / 2).dtype
self.assertEqual(orig_numpy_result, jax_numpy_result)
def testIssue347(self):
def test_fail(x):
x = lnp.sqrt(lnp.sum(x ** 2, axis=1))
ones = lnp.ones_like(x)
x = lnp.where(x > 0.5, x, ones)
return lnp.sum(x)
x = lnp.array([[1, 2], [3, 4], [0, 0]], dtype=lnp.float64)
result = api.grad(test_fail)(x)
assert not onp.any(onp.isnan(result))
def testIssue453(self):
a = onp.arange(6) + 1
ans = lnp.reshape(a, (3, 2), order='F')
expected = onp.reshape(a, (3, 2), order='F')
self.assertAllClose(ans, expected, check_dtypes=True)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_op={}_dtype={}".format(
op, {bool: "bool", int: "int", float: "float"}[dtype]),
"dtype": dtype, "op": op}
for dtype in [int, float, bool]
for op in ["atleast_1d", "atleast_2d", "atleast_3d"]))
def testAtLeastNdLiterals(self, dtype, op):
onp_fun = lambda arg: getattr(onp, op)(arg)
lnp_fun = lambda arg: getattr(lnp, op)(arg)
args_maker = lambda: [dtype(2)]
self._CheckAgainstNumpy(onp_fun, lnp_fun, args_maker, check_dtypes=True)
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=True)
def testLongLong(self):
self.assertAllClose(onp.int64(7), api.jit(lambda x: x)(onp.longlong(7)),
check_dtypes=True)
def testArange(self):
self.assertAllClose(lnp.arange(77),
onp.arange(77), check_dtypes=True)
self.assertAllClose(lnp.arange(2, 13),
onp.arange(2, 13), check_dtypes=True)
self.assertAllClose(lnp.arange(4, 21, 9),
onp.arange(4, 21, 9), check_dtypes=True)
self.assertAllClose(lnp.arange(53, 5, -3),
onp.arange(53, 5, -3), check_dtypes=True)
self.assertAllClose(lnp.arange(0, 1, -0.5),
onp.arange(0, 1, -0.5), check_dtypes=True)
self.assertRaises(TypeError, lambda: lnp.arange())
self.assertFalse(type(lnp.arange(77)) == type(onp.arange(77)))
self.assertTrue(type(lnp.arange(77)) == type(lax.iota(onp.int32, 77)))
def testIssue830(self):
a = lnp.arange(4, dtype=lnp.complex64)
self.assertEqual(a.dtype, lnp.complex64)
def testIssue728(self):
assert lnp.allclose(lnp.eye(5000), onp.eye(5000))
self.assertEqual(0, onp.sum(lnp.eye(1050) - onp.eye(1050)))
def testIssue746(self):
lnp.arange(12).reshape(3, 4) # doesn't crash
def testIssue764(self):
x = lnp.linspace(190, 200, 4)
f = api.grad(lambda x: lnp.sum(lnp.tanh(x)))
expected = onp.array([3.71669453e-165, 4.72999108e-168, 6.01954653e-171,
7.66067839e-174], onp.float64)
self.assertAllClose(f(x), expected, check_dtypes=False)
def testIssue776(self):
def f(u):
y = jax.ops.index_add(onp.ones(10,), [2, 4, 5], u)
return lax.tie_in(y, 7.)
self.assertAllClose(onp.zeros(3,), api.grad(f)(onp.ones(3,)),
check_dtypes=True)
@parameterized.named_parameters(
jtu.cases_from_list(
{"testcase_name": jtu.format_test_name_suffix(op, [()], [dtype]),
"dtype": dtype, "op": op}
for dtype in float_dtypes
for op in ("sqrt", "arccos", "arcsin", "arctan", "sin", "cos", "tan",
"sinh", "cosh", "tanh", "arccosh", "arcsinh", "arctanh", "exp",
"log", "expm1", "log1p")))
def testMathSpecialFloatValues(self, op, dtype):
onp_op = getattr(onp, op)
lnp_op = getattr(lnp, op)
dtype = onp.dtype(xla_bridge.canonicalize_dtype(dtype)).type
for x in (onp.nan, -onp.inf, -100., -2. -1., 0., 1., 2., 100., onp.inf,
onp.finfo(dtype).max, onp.sqrt(onp.finfo(dtype).max),
onp.sqrt(onp.finfo(dtype).max) * 2.):
if onp.isnan(x) and op in ("cosh", "expm1", "exp"):
continue
if (op in ("sin", "cos", "tan", "arctan") and FLAGS.jax_test_dut and
FLAGS.jax_test_dut.startswith("tpu")):
continue
x = dtype(x)
expected = onp_op(x)
actual = lnp_op(x)
self.assertAllClose(expected, actual, check_dtypes=True)
def testIssue883(self):
@partial(api.jit, static_argnums=(1,))
def f(x, v):
return x
x = lnp.ones((10, 10))
v = lnp.array([1, 2, 3])
first_call = f(x, v)
second_call = f(x, v)
def testReductionOfOutOfBoundsAxis(self): # Issue 888
x = lnp.ones((3, 4))
self.assertRaises(ValueError, lambda: lnp.sum(x, axis=2))
if __name__ == "__main__":
absltest.main()
| true | true |
f71bbf7a4a188527959ac45431aaf4fe7372b6f2 | 702 | py | Python | google/ads/googleads/v6/services/services/keyword_plan_ad_group_service/__init__.py | wxxlouisa/google-ads-python | f24137966f6bfcb765a9b1fae79f2d23041825fe | [
"Apache-2.0"
] | null | null | null | google/ads/googleads/v6/services/services/keyword_plan_ad_group_service/__init__.py | wxxlouisa/google-ads-python | f24137966f6bfcb765a9b1fae79f2d23041825fe | [
"Apache-2.0"
] | null | null | null | google/ads/googleads/v6/services/services/keyword_plan_ad_group_service/__init__.py | wxxlouisa/google-ads-python | f24137966f6bfcb765a9b1fae79f2d23041825fe | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import KeywordPlanAdGroupServiceClient
__all__ = ("KeywordPlanAdGroupServiceClient",)
| 33.428571 | 74 | 0.759259 |
from .client import KeywordPlanAdGroupServiceClient
__all__ = ("KeywordPlanAdGroupServiceClient",)
| true | true |
f71bbf8502afa8422043b8e08ba0756892e46a96 | 3,593 | py | Python | tutorial/settings.py | luotuo/spider-for-apple-store | b4b2bfb6227df8fce7f8e3ab635703e93bb89ed1 | [
"MIT"
] | 20 | 2017-12-17T08:07:12.000Z | 2020-12-11T02:31:59.000Z | tutorial/settings.py | luotuo/spider-for-apple-store | b4b2bfb6227df8fce7f8e3ab635703e93bb89ed1 | [
"MIT"
] | null | null | null | tutorial/settings.py | luotuo/spider-for-apple-store | b4b2bfb6227df8fce7f8e3ab635703e93bb89ed1 | [
"MIT"
] | 6 | 2017-12-17T08:07:14.000Z | 2020-12-11T02:32:00.000Z | # -*- coding: utf-8 -*-
# Scrapy settings for tutorial project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'tutorial'
SPIDER_MODULES = ['tutorial.spiders']
NEWSPIDER_MODULE = 'tutorial.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'tutorial (+http://www.yourdomain.com)'
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'tutorial.middlewares.TutorialSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'tutorial.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'tutorial.pipelines.TutorialPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
# start MySQL database configure setting
MYSQL_HOST = 'localhost'
MYSQL_DBNAME = 'spider'
MYSQL_USER = 'root'
MYSQL_PASSWD = 'root'
# end of MySQL database configure setting
ITEM_PIPELINES = {
# 'tutorial.pipelines.JsonWithEncodingTutorialPipeline': 300,
'tutorial.pipelines.MySQLStoreTutorialPipeline': 300,
}
| 34.548077 | 123 | 0.775953 |
BOT_NAME = 'tutorial'
SPIDER_MODULES = ['tutorial.spiders']
NEWSPIDER_MODULE = 'tutorial.spiders'
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'
ROBOTSTXT_OBEY = False
Y = 3
t'
MYSQL_DBNAME = 'spider'
MYSQL_USER = 'root'
MYSQL_PASSWD = 'root'
ITEM_PIPELINES = {
'tutorial.pipelines.MySQLStoreTutorialPipeline': 300,
}
| true | true |
f71bbfffc3a193e2f3f52cdb443e6740cf88b7e3 | 6,052 | py | Python | Ensemble Learning/AdaBoost.py | DiogoRibeiro7/Machine-Learning | d2c789851f8b4eaf74cdd0c18af072f60cd45cb3 | [
"Apache-2.0"
] | null | null | null | Ensemble Learning/AdaBoost.py | DiogoRibeiro7/Machine-Learning | d2c789851f8b4eaf74cdd0c18af072f60cd45cb3 | [
"Apache-2.0"
] | null | null | null | Ensemble Learning/AdaBoost.py | DiogoRibeiro7/Machine-Learning | d2c789851f8b4eaf74cdd0c18af072f60cd45cb3 | [
"Apache-2.0"
] | null | null | null | """
@Filename: AdaptiveBoost.py
@Author: Diogo Ribeiro
@Create Date: 2019-05-03
@Update Date: 2019-05-03
@Description: Implement of Adaptive Boosting
"""
import numpy as np
import preProcess
import pickle
import random
import SVM
import math
class Adaboost:
def __init__(self, norm_type="Normalization", iterations=5, base_classifier="SVM"):
self.iterations = iterations
self.norm_type = norm_type
self.base_classifier = SVM.SVMClassifier()
self.prediction = None
self.probability = None
self.classifier_set = None
'''
Function: baseClassifier
Description: generate weak classifier
Input: train_data dataType: ndarray description: train_data
train_label dataType: ndarray description: train_label
w dataType: ndarray description: weight
Output: clf dataType: object description: weak classifier
weighted_error dataType: float description: weighted error
base_predictions dataType: object description: base predictions
'''
def baseClassifier(self, train_data, train_label, w):
sample_num = len(train_data)
error_index = np.ones([sample_num, 1])
clf = self.base_classifier
clf.train(train_data, train_label)
base_predictions = np.sign(clf.predict(train_data))
for i in range(sample_num):
if base_predictions[i] == train_label[i]:
error_index[i] = 0
weighted_error = np.dot(w.T, error_index)
return clf, weighted_error, base_predictions
'''
Function: updataAlpha
Description: updata alpha
Input: error dataType: float description: weighted error
Output: new_alpha dataType: float description: new alpha
'''
def updateAlpha(self, error):
temp = (1.0 - error)/max(error, 10e-6)
new_alpha = 1/2 * math.log(temp, math.e)
return new_alpha
'''
Function: train
Description: train the model
Input: train_data dataType: ndarray description: features
train_label dataType: ndarray description: labels
Output: clf_set dataType: list description: classifiers set
'''
def train(self, train_data, train_label):
if self.norm_type == "Standardization":
train_data = preProcess.Standardization(train_data)
else:
train_data = preProcess.Normalization(train_data)
train_label = np.expand_dims(train_label, axis=1)
sample_num = len(train_data)
weak_classifier = []
# initialize weights
w = np.ones([sample_num, 1])
w = w/sample_num
# predictions
agg_predicts = np.zeros([sample_num, 1]) # aggregate value of prediction
# start train
for i in range(self.iterations):
base_clf, error, base_prediction = self.baseClassifier(train_data, train_label, w)
alpha = self.updateAlpha(error)
weak_classifier.append((alpha, base_clf))
# update parameters in page of 139 Eq.(8.4)
expon = np.multiply(-1 * alpha * train_label, base_prediction)
w = np.multiply(w, np.exp(expon))
w = w/w.sum()
# calculate the total error rate
agg_predicts += alpha*base_prediction
error_rate = np.multiply(np.sign(agg_predicts) != train_label, np.ones([sample_num, 1]))
error_rate = error_rate.sum()/sample_num
if error_rate == 0:
break
self.classifier_set = weak_classifier
return weak_classifier
'''
Function: predict
Description: predict the testing set
Input: train_data dataType: ndarray description: features
prob dataType: bool description: return probaility of label
Output: prediction dataType: ndarray description: the prediction results for testing set
'''
def predict(self, test_data, prob="False"):
# Normalization
if self.norm_type == "Standardization":
test_data = preProcess.Standardization(test_data)
else:
test_data = preProcess.Normalization(test_data)
test_num = test_data.shape[0]
prediction = np.zeros([test_num, 1])
probability = np.zeros([test_num, 1])
for classifier in self.classifier_set:
alpha = classifier[0]
clf = classifier[1]
base_prediction = alpha * clf.predict(test_data)
probability += base_prediction
self.prediction = np.sign(probability)
self.probability = probability
if prob:
return probability
else:
return prediction
'''
Function: accuracy
Description: show detection result
Input: test_label dataType: ndarray description: labels of test data
Output: accuracy dataType: float description: detection accuarcy
'''
def accuarcy(self, test_label):
test_label = np.expand_dims(test_label, axis=1)
prediction = self.prediction
accuarcy = sum(prediction == test_label)/len(test_label)
return accuarcy
'''
Function: save
Description: save the model as pkl
Input: filename dataType: str description: the path to save model
'''
def save(self, filename):
f = open(filename, 'w')
pickle.dump(self.classifier_set, f)
f.close()
'''
Function: load
Description: load the model
Input: filename dataType: str description: the path to save model
Output: self dataType: obj description: the trained model
'''
def load(self, filename):
f = open(filename)
self.classifier_set = pickle.load(f)
return self
| 34.982659 | 100 | 0.607403 |
import numpy as np
import preProcess
import pickle
import random
import SVM
import math
class Adaboost:
def __init__(self, norm_type="Normalization", iterations=5, base_classifier="SVM"):
self.iterations = iterations
self.norm_type = norm_type
self.base_classifier = SVM.SVMClassifier()
self.prediction = None
self.probability = None
self.classifier_set = None
def baseClassifier(self, train_data, train_label, w):
sample_num = len(train_data)
error_index = np.ones([sample_num, 1])
clf = self.base_classifier
clf.train(train_data, train_label)
base_predictions = np.sign(clf.predict(train_data))
for i in range(sample_num):
if base_predictions[i] == train_label[i]:
error_index[i] = 0
weighted_error = np.dot(w.T, error_index)
return clf, weighted_error, base_predictions
def updateAlpha(self, error):
temp = (1.0 - error)/max(error, 10e-6)
new_alpha = 1/2 * math.log(temp, math.e)
return new_alpha
def train(self, train_data, train_label):
if self.norm_type == "Standardization":
train_data = preProcess.Standardization(train_data)
else:
train_data = preProcess.Normalization(train_data)
train_label = np.expand_dims(train_label, axis=1)
sample_num = len(train_data)
weak_classifier = []
w = np.ones([sample_num, 1])
w = w/sample_num
agg_predicts = np.zeros([sample_num, 1])
for i in range(self.iterations):
base_clf, error, base_prediction = self.baseClassifier(train_data, train_label, w)
alpha = self.updateAlpha(error)
weak_classifier.append((alpha, base_clf))
expon = np.multiply(-1 * alpha * train_label, base_prediction)
w = np.multiply(w, np.exp(expon))
w = w/w.sum()
agg_predicts += alpha*base_prediction
error_rate = np.multiply(np.sign(agg_predicts) != train_label, np.ones([sample_num, 1]))
error_rate = error_rate.sum()/sample_num
if error_rate == 0:
break
self.classifier_set = weak_classifier
return weak_classifier
def predict(self, test_data, prob="False"):
if self.norm_type == "Standardization":
test_data = preProcess.Standardization(test_data)
else:
test_data = preProcess.Normalization(test_data)
test_num = test_data.shape[0]
prediction = np.zeros([test_num, 1])
probability = np.zeros([test_num, 1])
for classifier in self.classifier_set:
alpha = classifier[0]
clf = classifier[1]
base_prediction = alpha * clf.predict(test_data)
probability += base_prediction
self.prediction = np.sign(probability)
self.probability = probability
if prob:
return probability
else:
return prediction
def accuarcy(self, test_label):
test_label = np.expand_dims(test_label, axis=1)
prediction = self.prediction
accuarcy = sum(prediction == test_label)/len(test_label)
return accuarcy
def save(self, filename):
f = open(filename, 'w')
pickle.dump(self.classifier_set, f)
f.close()
def load(self, filename):
f = open(filename)
self.classifier_set = pickle.load(f)
return self
| true | true |
f71bc02fee45fb7edf6e131216efbc2de8692361 | 1,173 | py | Python | examples/example2.py | hangvane/lpsolve_wrapper | cff5ffa827c10420bdc6b3ceb5d9a3852c91281d | [
"Apache-2.0"
] | 4 | 2021-01-27T14:47:30.000Z | 2021-05-06T03:16:11.000Z | examples/example2.py | hangvane/lpsolve_wrapper | cff5ffa827c10420bdc6b3ceb5d9a3852c91281d | [
"Apache-2.0"
] | null | null | null | examples/example2.py | hangvane/lpsolve_wrapper | cff5ffa827c10420bdc6b3ceb5d9a3852c91281d | [
"Apache-2.0"
] | 1 | 2022-03-25T13:22:45.000Z | 2022-03-25T13:22:45.000Z | # http://web.mit.edu/lpsolve/doc/Python.htm
# P = (110)(1.30)x + (30)(2.00)y + (125)(1.56) = 143x + 60y + 195z
# 120x + 210y + 150.75z <= 15000
# 110x + 30y + 125z <= 4000
# x + y + z <= 75
# x >= 0, y >= 0, z >= 0
import lpsolve_wrapper as lw
model = lw.Model(
notations={
'x': lw.notation(
lower_bound=0,
),
'y': lw.notation(
lower_bound=0,
),
'z': lw.notation(
lower_bound=0,
)
})
model.add_constr(
coefs=[
lw.coef('x', 120),
lw.coef('y', 210),
lw.coef('z', 150.75),
],
right_value=15000,
constr_type=lw.LEQ
)
model.add_constr(
coefs=[
lw.coef('x', 110),
lw.coef('y', 30),
lw.coef('z', 125),
],
right_value=4000,
constr_type=lw.LEQ
)
model.add_constr(
coefs=[
lw.coef('x', 1),
lw.coef('y', 1),
lw.coef('z', 1),
],
right_value=75,
constr_type=lw.LEQ
)
objective, notation_list = model.lp_solve(
obj_func={
'x': 143,
'y': 60,
'z': 195,
},
minimize=False
)
print('objective:', objective)
print('notations:', notation_list)
| 19.881356 | 66 | 0.491901 |
import lpsolve_wrapper as lw
model = lw.Model(
notations={
'x': lw.notation(
lower_bound=0,
),
'y': lw.notation(
lower_bound=0,
),
'z': lw.notation(
lower_bound=0,
)
})
model.add_constr(
coefs=[
lw.coef('x', 120),
lw.coef('y', 210),
lw.coef('z', 150.75),
],
right_value=15000,
constr_type=lw.LEQ
)
model.add_constr(
coefs=[
lw.coef('x', 110),
lw.coef('y', 30),
lw.coef('z', 125),
],
right_value=4000,
constr_type=lw.LEQ
)
model.add_constr(
coefs=[
lw.coef('x', 1),
lw.coef('y', 1),
lw.coef('z', 1),
],
right_value=75,
constr_type=lw.LEQ
)
objective, notation_list = model.lp_solve(
obj_func={
'x': 143,
'y': 60,
'z': 195,
},
minimize=False
)
print('objective:', objective)
print('notations:', notation_list)
| true | true |
f71bc084b54b7d6c91980ac0cca1fd8f504aca87 | 1,277 | py | Python | ganzige/urls.py | kekehurry/ganzige.site | 4fc2fce6c03b302e115feccae6e02bd9e1e8231d | [
"MIT"
] | null | null | null | ganzige/urls.py | kekehurry/ganzige.site | 4fc2fce6c03b302e115feccae6e02bd9e1e8231d | [
"MIT"
] | null | null | null | ganzige/urls.py | kekehurry/ganzige.site | 4fc2fce6c03b302e115feccae6e02bd9e1e8231d | [
"MIT"
] | null | null | null | """ganzige URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^$', include('home.urls', namespace='home')),
url(r'webhook/', include('webhook.urls', namespace='webhook')),
url(r'^admin/', include(admin.site.urls)),
url(r'ckeditor/', include('ckeditor_uploader.urls')),
url(r'^blog/', include('blog.urls', namespace='blog')),
url(r'^photo/', include('photo.urls', namespace='photo')),
url(r'^data/', include('data.urls', namespace='data')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 38.69697 | 77 | 0.697729 | from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^$', include('home.urls', namespace='home')),
url(r'webhook/', include('webhook.urls', namespace='webhook')),
url(r'^admin/', include(admin.site.urls)),
url(r'ckeditor/', include('ckeditor_uploader.urls')),
url(r'^blog/', include('blog.urls', namespace='blog')),
url(r'^photo/', include('photo.urls', namespace='photo')),
url(r'^data/', include('data.urls', namespace='data')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| true | true |
f71bc16b49327031ea7d379964913ab43a1e9c34 | 3,176 | py | Python | microchain/chain.py | ciknight/microchain | d740571fd84f18bc1b635b2fccb9f349180709fa | [
"MIT"
] | null | null | null | microchain/chain.py | ciknight/microchain | d740571fd84f18bc1b635b2fccb9f349180709fa | [
"MIT"
] | 40 | 2018-07-30T21:14:20.000Z | 2021-06-25T15:15:20.000Z | microchain/chain.py | ciknight/microchain | d740571fd84f18bc1b635b2fccb9f349180709fa | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import logging
from typing import List
from microchain.block import Block
__all__ = ["Chain"]
class Chain:
_interval = 10 # second
def __init__(self, blocks: List[Block] = None) -> None:
self.blocks = blocks or [Chain.genesis()]
def __len__(self):
return self.length
def __repr__(self):
return f"Chain({repr(self.blocks)})"
@property
def interval(self):
return self._interval
@property
def length(self) -> int:
return len(self.blocks)
@property
def latest_block(self) -> Block:
return self.blocks[-1]
def add_block(self, block: Block) -> bool:
if block.valid is True:
self.blocks.append(block)
return True
return False
@staticmethod
def genesis() -> Block:
args = (0, "0", "Genesis Block")
nonce = 0
# (difficulty 1): 0x00ffff * 2**(8*(0x1d - 3))
target = "0x00000000FFFF0000000000000000000000000000000000000000000000000000"
while True:
block = Block(*args, nonce=nonce, target=target)
if block.valid is True:
break
else:
nonce += 1
return block
@property
def difficulty(self) -> float:
""" Difficulty is Calculate the hash of times.
Url: https://en.bitcoin.it/wiki/Difficulty#How_often_does_the_network_difficulty_change.3F
"""
difficulty_1_target = (
"0x00000000FFFF0000000000000000000000000000000000000000000000000000"
)
return float(int(difficulty_1_target, 16) / int(self.latest_block.target, 16))
@property
def current_target(self) -> str:
""" Retarget """
lb = self.latest_block
# Every 10 blocks change network difficulty, bitcoin is 2016 blocks.
block_count = 10
target_timespan = block_count * self.interval
if self.length % block_count != 0:
return lb.target
else:
ratio_limit = 4
actual_timespan = lb.timestamp - self.blocks[-block_count].timestamp
adjusted_timespan = min(
max(actual_timespan, target_timespan / ratio_limit),
target_timespan * ratio_limit,
)
assert 1 / ratio_limit <= adjusted_timespan / target_timespan <= ratio_limit
logging.info(
f"Retargeting at {self.length}, difficulty change: {target_timespan/adjusted_timespan:.2%}"
)
new_target = int(lb.target, 16) * adjusted_timespan / target_timespan
return f"{int(new_target):x}".rjust(64, "0")
def generate_next(self, data: str) -> Block:
lb = self.latest_block
args = (lb.index + 1, lb.hash, data)
nonce = 0
while True:
new_block = Block(*args, nonce=nonce, target=self.current_target)
if new_block.valid is True:
break
else:
nonce += 1
return new_block
def mine(self, data: str) -> bool:
next_block = self.generate_next(data)
return self.add_block(next_block)
| 30.247619 | 107 | 0.588791 |
import logging
from typing import List
from microchain.block import Block
__all__ = ["Chain"]
class Chain:
_interval = 10
def __init__(self, blocks: List[Block] = None) -> None:
self.blocks = blocks or [Chain.genesis()]
def __len__(self):
return self.length
def __repr__(self):
return f"Chain({repr(self.blocks)})"
@property
def interval(self):
return self._interval
@property
def length(self) -> int:
return len(self.blocks)
@property
def latest_block(self) -> Block:
return self.blocks[-1]
def add_block(self, block: Block) -> bool:
if block.valid is True:
self.blocks.append(block)
return True
return False
@staticmethod
def genesis() -> Block:
args = (0, "0", "Genesis Block")
nonce = 0
target = "0x00000000FFFF0000000000000000000000000000000000000000000000000000"
while True:
block = Block(*args, nonce=nonce, target=target)
if block.valid is True:
break
else:
nonce += 1
return block
@property
def difficulty(self) -> float:
difficulty_1_target = (
"0x00000000FFFF0000000000000000000000000000000000000000000000000000"
)
return float(int(difficulty_1_target, 16) / int(self.latest_block.target, 16))
@property
def current_target(self) -> str:
lb = self.latest_block
block_count = 10
target_timespan = block_count * self.interval
if self.length % block_count != 0:
return lb.target
else:
ratio_limit = 4
actual_timespan = lb.timestamp - self.blocks[-block_count].timestamp
adjusted_timespan = min(
max(actual_timespan, target_timespan / ratio_limit),
target_timespan * ratio_limit,
)
assert 1 / ratio_limit <= adjusted_timespan / target_timespan <= ratio_limit
logging.info(
f"Retargeting at {self.length}, difficulty change: {target_timespan/adjusted_timespan:.2%}"
)
new_target = int(lb.target, 16) * adjusted_timespan / target_timespan
return f"{int(new_target):x}".rjust(64, "0")
def generate_next(self, data: str) -> Block:
lb = self.latest_block
args = (lb.index + 1, lb.hash, data)
nonce = 0
while True:
new_block = Block(*args, nonce=nonce, target=self.current_target)
if new_block.valid is True:
break
else:
nonce += 1
return new_block
def mine(self, data: str) -> bool:
next_block = self.generate_next(data)
return self.add_block(next_block)
| true | true |
f71bc1a8abff19dc9122898c70187b1df427ad48 | 5,311 | py | Python | duty/my_signals/templates/template.py | ximetov/IrCa-Duty | 666c2d26c9cd7d314798cfb222ad91dfeee4a5b6 | [
"MIT"
] | 6 | 2020-05-18T21:53:27.000Z | 2020-07-06T12:48:00.000Z | duty/my_signals/templates/template.py | ximetov/IrCa-Duty | 666c2d26c9cd7d314798cfb222ad91dfeee4a5b6 | [
"MIT"
] | null | null | null | duty/my_signals/templates/template.py | ximetov/IrCa-Duty | 666c2d26c9cd7d314798cfb222ad91dfeee4a5b6 | [
"MIT"
] | 6 | 2020-05-13T16:16:15.000Z | 2020-06-23T12:05:09.000Z | import re
from typing import Tuple
from duty.utils import att_parse, format_response
from duty.objects import MySignalEvent, dp
def delete_template(name: str, templates: list) -> Tuple[list, bool]:
for template in templates:
if template['name'].lower() == name:
templates.remove(template)
return templates, True
return templates, False
def get_template_list(event: MySignalEvent, templates: list):
if len(event.args) > 1:
if event.args[-1].isdigit() or (event.args[-1].startswith('-') and event.args[-1][1:].isdigit()):
page = int(event.args.pop(-1))
if page > 0:
page -= 1
else:
page = 0
category = ' '.join(event.args).lower()
template_list = None
if not category:
cats = {}
for t in templates:
cats[t['cat']] = cats.get(t['cat'], 0) + 1
message = "📚 Категории {name_genitive}:"
for cat in cats:
message += f"\n-- {cat} ({cats[cat]})"
else:
if category == 'все':
message = '📃 Список всех {name_genitive}:'
category = None
else:
message = f'📖 {{name_accusative_cap}} категории "{category}":'
message += list_by_page(templates, page, category)
if '\n' not in message:
if templates == []:
message = '{no_templates}'
else:
message = '⚠️ {name_accusative_cap} по указанному запросу не найдены'
return message
def list_by_page(templates, page, category) -> str:
if len(templates) > 40:
if page >= 0:
message = f'(страница #{page+1})'
else:
message = f'(страница #{abs(page)} с конца)'
else:
message = ''
shift = page*40
sliced_list = templates[shift:shift+40] if shift >= 0 else templates[shift-1:shift+39]
if page < 0:
try:
sliced_list.append(templates[shift+39])
except IndexError:
pass
offset = (shift+1) if shift >= 0 else (len(templates)+shift)
for i, t in enumerate(sliced_list, offset):
if category:
if t['cat'] != category:
continue
message += f'\n-- {t["name"]}'
else:
message += f'\n{i}. {t["name"]} | {t["cat"]}'
if '\n' not in message:
return ''
return '\n' + message
@dp.longpoll_event_register('+шаб')
@dp.my_signal_event_register('+шаб')
def template_create(event: MySignalEvent) -> str:
name = re.findall(r"([^|]+)\|?([^|]*)", ' '.join(event.args))
if not name:
event.msg_op(2, "❗ Не указано название")
return "ok"
category = name[0][1].lower().strip() or 'без категории'
name = name[0][0].lower().strip()
if category == 'все':
event.msg_op(2, '❗ Невозможно создать шаблон с категорией "все"')
return "ok"
if not (event.payload or event.attachments or event.reply_message):
event.msg_op(2, "❗ Нет данных")
return "ok"
if event.reply_message:
data = event.reply_message['text']
event.attachments = att_parse(event.reply_message['attachments'])
if event.attachments:
if event.attachments[0].startswith('audio_message'):
event.msg_op(2, '⚠️ Для сохранения ГС используй команду "+гс"')
return "ok"
else:
data = event.payload
event.db.templates, exist = delete_template(name, event.db.templates)
event.db.templates.append({
"name": name,
"payload": data,
"cat": category,
"attachments": event.attachments
})
event.msg_op(2, f'✅ Шаблон "{name}" ' +
("перезаписан" if exist else "сохранен"), delete=2)
return "ok"
@dp.longpoll_event_register('шабы')
@dp.my_signal_event_register('шабы')
def template_list(event: MySignalEvent) -> str:
message = get_template_list(event, event.db.templates)
event.msg_op(2, format_response(message,
name_genitive='шаблонов',
name_accusative='шаблоны',
name_accusative_cap='Шаблоны',
no_templates='👀 Нет ни одного шаблона... Для создания используй команду "+шаб"'
))
return "ok"
def get_name(event: MySignalEvent) -> Tuple[MySignalEvent, str]:
return event, ' '.join(event.args).lower()
@dp.longpoll_event_register('-шаб')
@dp.my_signal_event_register('-шаб')
@dp.wrap_handler(get_name)
def template_delete(event: MySignalEvent, name: str) -> str:
event.db.templates, exist = delete_template(name, event.db.templates)
if exist:
msg = f'✅ Шаблон "{name}" удален'
else:
msg = f'⚠️ Шаблон "{name}" не найден'
event.msg_op(2, msg, delete=1)
return "ok"
@dp.longpoll_event_register('шаб')
@dp.my_signal_event_register('шаб')
@dp.wrap_handler(get_name)
def template_show(event: MySignalEvent, name: str) -> str:
template = None
for temp in event.db.templates:
if temp['name'] == name:
template = temp
break
if template:
atts = template['attachments']
atts.extend(event.attachments)
event.msg_op(2, temp['payload'] + '\n' + event.payload,
keep_forward_messages=1, attachment=','.join(atts))
else:
event.msg_op(2, f'❗ Шаблон "{name}" не найден')
return "ok"
| 32.384146 | 105 | 0.591226 | import re
from typing import Tuple
from duty.utils import att_parse, format_response
from duty.objects import MySignalEvent, dp
def delete_template(name: str, templates: list) -> Tuple[list, bool]:
for template in templates:
if template['name'].lower() == name:
templates.remove(template)
return templates, True
return templates, False
def get_template_list(event: MySignalEvent, templates: list):
if len(event.args) > 1:
if event.args[-1].isdigit() or (event.args[-1].startswith('-') and event.args[-1][1:].isdigit()):
page = int(event.args.pop(-1))
if page > 0:
page -= 1
else:
page = 0
category = ' '.join(event.args).lower()
template_list = None
if not category:
cats = {}
for t in templates:
cats[t['cat']] = cats.get(t['cat'], 0) + 1
message = "📚 Категории {name_genitive}:"
for cat in cats:
message += f"\n-- {cat} ({cats[cat]})"
else:
if category == 'все':
message = '📃 Список всех {name_genitive}:'
category = None
else:
message = f'📖 {{name_accusative_cap}} категории "{category}":'
message += list_by_page(templates, page, category)
if '\n' not in message:
if templates == []:
message = '{no_templates}'
else:
message = '⚠️ {name_accusative_cap} по указанному запросу не найдены'
return message
def list_by_page(templates, page, category) -> str:
if len(templates) > 40:
if page >= 0:
message = f'(страница #{page+1})'
else:
message = f'(страница #{abs(page)} с конца)'
else:
message = ''
shift = page*40
sliced_list = templates[shift:shift+40] if shift >= 0 else templates[shift-1:shift+39]
if page < 0:
try:
sliced_list.append(templates[shift+39])
except IndexError:
pass
offset = (shift+1) if shift >= 0 else (len(templates)+shift)
for i, t in enumerate(sliced_list, offset):
if category:
if t['cat'] != category:
continue
message += f'\n-- {t["name"]}'
else:
message += f'\n{i}. {t["name"]} | {t["cat"]}'
if '\n' not in message:
return ''
return '\n' + message
@dp.longpoll_event_register('+шаб')
@dp.my_signal_event_register('+шаб')
def template_create(event: MySignalEvent) -> str:
name = re.findall(r"([^|]+)\|?([^|]*)", ' '.join(event.args))
if not name:
event.msg_op(2, "❗ Не указано название")
return "ok"
category = name[0][1].lower().strip() or 'без категории'
name = name[0][0].lower().strip()
if category == 'все':
event.msg_op(2, '❗ Невозможно создать шаблон с категорией "все"')
return "ok"
if not (event.payload or event.attachments or event.reply_message):
event.msg_op(2, "❗ Нет данных")
return "ok"
if event.reply_message:
data = event.reply_message['text']
event.attachments = att_parse(event.reply_message['attachments'])
if event.attachments:
if event.attachments[0].startswith('audio_message'):
event.msg_op(2, '⚠️ Для сохранения ГС используй команду "+гс"')
return "ok"
else:
data = event.payload
event.db.templates, exist = delete_template(name, event.db.templates)
event.db.templates.append({
"name": name,
"payload": data,
"cat": category,
"attachments": event.attachments
})
event.msg_op(2, f'✅ Шаблон "{name}" ' +
("перезаписан" if exist else "сохранен"), delete=2)
return "ok"
@dp.longpoll_event_register('шабы')
@dp.my_signal_event_register('шабы')
def template_list(event: MySignalEvent) -> str:
message = get_template_list(event, event.db.templates)
event.msg_op(2, format_response(message,
name_genitive='шаблонов',
name_accusative='шаблоны',
name_accusative_cap='Шаблоны',
no_templates='👀 Нет ни одного шаблона... Для создания используй команду "+шаб"'
))
return "ok"
def get_name(event: MySignalEvent) -> Tuple[MySignalEvent, str]:
return event, ' '.join(event.args).lower()
@dp.longpoll_event_register('-шаб')
@dp.my_signal_event_register('-шаб')
@dp.wrap_handler(get_name)
def template_delete(event: MySignalEvent, name: str) -> str:
event.db.templates, exist = delete_template(name, event.db.templates)
if exist:
msg = f'✅ Шаблон "{name}" удален'
else:
msg = f'⚠️ Шаблон "{name}" не найден'
event.msg_op(2, msg, delete=1)
return "ok"
@dp.longpoll_event_register('шаб')
@dp.my_signal_event_register('шаб')
@dp.wrap_handler(get_name)
def template_show(event: MySignalEvent, name: str) -> str:
template = None
for temp in event.db.templates:
if temp['name'] == name:
template = temp
break
if template:
atts = template['attachments']
atts.extend(event.attachments)
event.msg_op(2, temp['payload'] + '\n' + event.payload,
keep_forward_messages=1, attachment=','.join(atts))
else:
event.msg_op(2, f'❗ Шаблон "{name}" не найден')
return "ok"
| true | true |
f71bc2338b924e040d6847a96eaeabd0904e440e | 405 | py | Python | env/Lib/site-packages/plotly/validators/scatter3d/marker/_sizeref.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 11,750 | 2015-10-12T07:03:39.000Z | 2022-03-31T20:43:15.000Z | env/Lib/site-packages/plotly/validators/scatter3d/marker/_sizeref.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2,951 | 2015-10-12T00:41:25.000Z | 2022-03-31T22:19:26.000Z | env/Lib/site-packages/plotly/validators/scatter3d/marker/_sizeref.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2,623 | 2015-10-15T14:40:27.000Z | 2022-03-28T16:05:50.000Z | import _plotly_utils.basevalidators
class SizerefValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="sizeref", parent_name="scatter3d.marker", **kwargs):
super(SizerefValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
| 33.75 | 88 | 0.679012 | import _plotly_utils.basevalidators
class SizerefValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="sizeref", parent_name="scatter3d.marker", **kwargs):
super(SizerefValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
| true | true |
f71bc27ffeefbaa07e53e1ac517b08578d7e83f3 | 35,940 | py | Python | tools/run_tests/xds_k8s_test_driver/framework/infrastructure/traffic_director.py | lungati/grpc | cd9730d2d28626c57431253208f23507d466c825 | [
"BSD-3-Clause"
] | 5 | 2019-11-12T04:30:55.000Z | 2021-08-11T23:04:12.000Z | tools/run_tests/xds_k8s_test_driver/framework/infrastructure/traffic_director.py | bwncp/grpc | 779701ab76c552affa9f5c7815c2b598c996ea54 | [
"Apache-2.0"
] | 10 | 2015-03-03T06:51:51.000Z | 2022-03-23T14:10:56.000Z | tools/run_tests/xds_k8s_test_driver/framework/infrastructure/traffic_director.py | bwncp/grpc | 779701ab76c552affa9f5c7815c2b598c996ea54 | [
"Apache-2.0"
] | 1 | 2015-08-22T15:20:59.000Z | 2015-08-22T15:20:59.000Z | # Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import logging
import random
from typing import Any, Dict, List, Optional, Set
from framework import xds_flags
from framework.infrastructure import gcp
logger = logging.getLogger(__name__)
# Type aliases
# Compute
_ComputeV1 = gcp.compute.ComputeV1
GcpResource = _ComputeV1.GcpResource
HealthCheckProtocol = _ComputeV1.HealthCheckProtocol
ZonalGcpResource = _ComputeV1.ZonalGcpResource
BackendServiceProtocol = _ComputeV1.BackendServiceProtocol
_BackendGRPC = BackendServiceProtocol.GRPC
_HealthCheckGRPC = HealthCheckProtocol.GRPC
# Network Security
_NetworkSecurityV1Beta1 = gcp.network_security.NetworkSecurityV1Beta1
ServerTlsPolicy = gcp.network_security.ServerTlsPolicy
ClientTlsPolicy = gcp.network_security.ClientTlsPolicy
AuthorizationPolicy = gcp.network_security.AuthorizationPolicy
# Network Services
_NetworkServicesV1Alpha1 = gcp.network_services.NetworkServicesV1Alpha1
_NetworkServicesV1Beta1 = gcp.network_services.NetworkServicesV1Beta1
EndpointPolicy = gcp.network_services.EndpointPolicy
# Testing metadata consts
TEST_AFFINITY_METADATA_KEY = 'xds_md'
class TrafficDirectorManager:
compute: _ComputeV1
resource_prefix: str
resource_suffix: str
BACKEND_SERVICE_NAME = "backend-service"
ALTERNATIVE_BACKEND_SERVICE_NAME = "backend-service-alt"
AFFINITY_BACKEND_SERVICE_NAME = "backend-service-affinity"
HEALTH_CHECK_NAME = "health-check"
URL_MAP_NAME = "url-map"
URL_MAP_PATH_MATCHER_NAME = "path-matcher"
TARGET_PROXY_NAME = "target-proxy"
FORWARDING_RULE_NAME = "forwarding-rule"
FIREWALL_RULE_NAME = "allow-health-checks"
def __init__(
self,
gcp_api_manager: gcp.api.GcpApiManager,
project: str,
*,
resource_prefix: str,
resource_suffix: str,
network: str = 'default',
compute_api_version: str = 'v1',
):
# API
self.compute = _ComputeV1(gcp_api_manager,
project,
version=compute_api_version)
# Settings
self.project: str = project
self.network: str = network
self.resource_prefix: str = resource_prefix
self.resource_suffix: str = resource_suffix
# Managed resources
self.health_check: Optional[GcpResource] = None
self.backend_service: Optional[GcpResource] = None
# TODO(sergiitk): remove this flag once backend service resource loaded
self.backend_service_protocol: Optional[BackendServiceProtocol] = None
self.url_map: Optional[GcpResource] = None
self.firewall_rule: Optional[GcpResource] = None
self.target_proxy: Optional[GcpResource] = None
# TODO(sergiitk): remove this flag once target proxy resource loaded
self.target_proxy_is_http: bool = False
self.forwarding_rule: Optional[GcpResource] = None
self.backends: Set[ZonalGcpResource] = set()
self.alternative_backend_service: Optional[GcpResource] = None
# TODO(sergiitk): remove this flag once backend service resource loaded
self.alternative_backend_service_protocol: Optional[
BackendServiceProtocol] = None
self.alternative_backends: Set[ZonalGcpResource] = set()
self.affinity_backend_service: Optional[GcpResource] = None
# TODO(sergiitk): remove this flag once backend service resource loaded
self.affinity_backend_service_protocol: Optional[
BackendServiceProtocol] = None
self.affinity_backends: Set[ZonalGcpResource] = set()
@property
def network_url(self):
return f'global/networks/{self.network}'
def setup_for_grpc(
self,
service_host,
service_port,
*,
backend_protocol: Optional[BackendServiceProtocol] = _BackendGRPC,
health_check_port: Optional[int] = None):
self.setup_backend_for_grpc(protocol=backend_protocol,
health_check_port=health_check_port)
self.setup_routing_rule_map_for_grpc(service_host, service_port)
def setup_backend_for_grpc(
self,
*,
protocol: Optional[BackendServiceProtocol] = _BackendGRPC,
health_check_port: Optional[int] = None):
self.create_health_check(port=health_check_port)
self.create_backend_service(protocol)
def setup_routing_rule_map_for_grpc(self, service_host, service_port):
self.create_url_map(service_host, service_port)
self.create_target_proxy()
self.create_forwarding_rule(service_port)
def cleanup(self, *, force=False):
# Cleanup in the reverse order of creation
self.delete_forwarding_rule(force=force)
self.delete_target_http_proxy(force=force)
self.delete_target_grpc_proxy(force=force)
self.delete_url_map(force=force)
self.delete_backend_service(force=force)
self.delete_alternative_backend_service(force=force)
self.delete_affinity_backend_service(force=force)
self.delete_health_check(force=force)
@functools.lru_cache(None)
def make_resource_name(self, name: str) -> str:
"""Make dash-separated resource name with resource prefix and suffix."""
parts = [self.resource_prefix, name]
# Avoid trailing dash when the suffix is empty.
if self.resource_suffix:
parts.append(self.resource_suffix)
return '-'.join(parts)
def create_health_check(
self,
*,
protocol: Optional[HealthCheckProtocol] = _HealthCheckGRPC,
port: Optional[int] = None):
if self.health_check:
raise ValueError(f'Health check {self.health_check.name} '
'already created, delete it first')
if protocol is None:
protocol = _HealthCheckGRPC
name = self.make_resource_name(self.HEALTH_CHECK_NAME)
logger.info('Creating %s Health Check "%s"', protocol.name, name)
resource = self.compute.create_health_check(name, protocol, port=port)
self.health_check = resource
def delete_health_check(self, force=False):
if force:
name = self.make_resource_name(self.HEALTH_CHECK_NAME)
elif self.health_check:
name = self.health_check.name
else:
return
logger.info('Deleting Health Check "%s"', name)
self.compute.delete_health_check(name)
self.health_check = None
def create_backend_service(
self,
protocol: Optional[BackendServiceProtocol] = _BackendGRPC,
subset_size: Optional[int] = None,
affinity_header: Optional[str] = None):
if protocol is None:
protocol = _BackendGRPC
name = self.make_resource_name(self.BACKEND_SERVICE_NAME)
logger.info('Creating %s Backend Service "%s"', protocol.name, name)
resource = self.compute.create_backend_service_traffic_director(
name,
health_check=self.health_check,
protocol=protocol,
subset_size=subset_size,
affinity_header=affinity_header)
self.backend_service = resource
self.backend_service_protocol = protocol
def load_backend_service(self):
name = self.make_resource_name(self.BACKEND_SERVICE_NAME)
resource = self.compute.get_backend_service_traffic_director(name)
self.backend_service = resource
def delete_backend_service(self, force=False):
if force:
name = self.make_resource_name(self.BACKEND_SERVICE_NAME)
elif self.backend_service:
name = self.backend_service.name
else:
return
logger.info('Deleting Backend Service "%s"', name)
self.compute.delete_backend_service(name)
self.backend_service = None
def backend_service_add_neg_backends(self,
name,
zones,
max_rate_per_endpoint: Optional[
int] = None):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.backends.add(backend)
self.backend_service_patch_backends(max_rate_per_endpoint)
def backend_service_remove_neg_backends(self, name, zones):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.backends.remove(backend)
self.backend_service_patch_backends()
def backend_service_patch_backends(
self, max_rate_per_endpoint: Optional[int] = None):
logging.info('Adding backends to Backend Service %s: %r',
self.backend_service.name, self.backends)
self.compute.backend_service_patch_backends(self.backend_service,
self.backends,
max_rate_per_endpoint)
def backend_service_remove_all_backends(self):
logging.info('Removing backends from Backend Service %s',
self.backend_service.name)
self.compute.backend_service_remove_all_backends(self.backend_service)
def wait_for_backends_healthy_status(self):
logger.debug(
"Waiting for Backend Service %s to report all backends healthy %r",
self.backend_service, self.backends)
self.compute.wait_for_backends_healthy_status(self.backend_service,
self.backends)
def create_alternative_backend_service(
self, protocol: Optional[BackendServiceProtocol] = _BackendGRPC):
if protocol is None:
protocol = _BackendGRPC
name = self.make_resource_name(self.ALTERNATIVE_BACKEND_SERVICE_NAME)
logger.info('Creating %s Alternative Backend Service "%s"',
protocol.name, name)
resource = self.compute.create_backend_service_traffic_director(
name, health_check=self.health_check, protocol=protocol)
self.alternative_backend_service = resource
self.alternative_backend_service_protocol = protocol
def load_alternative_backend_service(self):
name = self.make_resource_name(self.ALTERNATIVE_BACKEND_SERVICE_NAME)
resource = self.compute.get_backend_service_traffic_director(name)
self.alternative_backend_service = resource
def delete_alternative_backend_service(self, force=False):
if force:
name = self.make_resource_name(
self.ALTERNATIVE_BACKEND_SERVICE_NAME)
elif self.alternative_backend_service:
name = self.alternative_backend_service.name
else:
return
logger.info('Deleting Alternative Backend Service "%s"', name)
self.compute.delete_backend_service(name)
self.alternative_backend_service = None
def alternative_backend_service_add_neg_backends(self, name, zones):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.alternative_backends.add(backend)
self.alternative_backend_service_patch_backends()
def alternative_backend_service_patch_backends(self):
logging.info('Adding backends to Backend Service %s: %r',
self.alternative_backend_service.name,
self.alternative_backends)
self.compute.backend_service_patch_backends(
self.alternative_backend_service, self.alternative_backends)
def alternative_backend_service_remove_all_backends(self):
logging.info('Removing backends from Backend Service %s',
self.alternative_backend_service.name)
self.compute.backend_service_remove_all_backends(
self.alternative_backend_service)
def wait_for_alternative_backends_healthy_status(self):
logger.debug(
"Waiting for Backend Service %s to report all backends healthy %r",
self.alternative_backend_service, self.alternative_backends)
self.compute.wait_for_backends_healthy_status(
self.alternative_backend_service, self.alternative_backends)
def create_affinity_backend_service(
self, protocol: Optional[BackendServiceProtocol] = _BackendGRPC):
if protocol is None:
protocol = _BackendGRPC
name = self.make_resource_name(self.AFFINITY_BACKEND_SERVICE_NAME)
logger.info('Creating %s Affinity Backend Service "%s"', protocol.name,
name)
resource = self.compute.create_backend_service_traffic_director(
name,
health_check=self.health_check,
protocol=protocol,
affinity_header=TEST_AFFINITY_METADATA_KEY)
self.affinity_backend_service = resource
self.affinity_backend_service_protocol = protocol
def load_affinity_backend_service(self):
name = self.make_resource_name(self.AFFINITY_BACKEND_SERVICE_NAME)
resource = self.compute.get_backend_service_traffic_director(name)
self.affinity_backend_service = resource
def delete_affinity_backend_service(self, force=False):
if force:
name = self.make_resource_name(self.AFFINITY_BACKEND_SERVICE_NAME)
elif self.affinity_backend_service:
name = self.affinity_backend_service.name
else:
return
logger.info('Deleting Affinity Backend Service "%s"', name)
self.compute.delete_backend_service(name)
self.affinity_backend_service = None
def affinity_backend_service_add_neg_backends(self, name, zones):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.affinity_backends.add(backend)
self.affinity_backend_service_patch_backends()
def affinity_backend_service_patch_backends(self):
logging.info('Adding backends to Backend Service %s: %r',
self.affinity_backend_service.name, self.affinity_backends)
self.compute.backend_service_patch_backends(
self.affinity_backend_service, self.affinity_backends)
def affinity_backend_service_remove_all_backends(self):
logging.info('Removing backends from Backend Service %s',
self.affinity_backend_service.name)
self.compute.backend_service_remove_all_backends(
self.affinity_backend_service)
def wait_for_affinity_backends_healthy_status(self):
logger.debug(
"Waiting for Backend Service %s to report all backends healthy %r",
self.affinity_backend_service, self.affinity_backends)
self.compute.wait_for_backends_healthy_status(
self.affinity_backend_service, self.affinity_backends)
def _generate_url_map_body(
self,
name: str,
matcher_name: str,
src_hosts,
dst_default_backend_service: GcpResource,
dst_host_rule_match_backend_service: Optional[GcpResource] = None,
) -> Dict[str, Any]:
if dst_host_rule_match_backend_service is None:
dst_host_rule_match_backend_service = dst_default_backend_service
return {
'name':
name,
'defaultService':
dst_default_backend_service.url,
'hostRules': [{
'hosts': src_hosts,
'pathMatcher': matcher_name,
}],
'pathMatchers': [{
'name': matcher_name,
'defaultService': dst_host_rule_match_backend_service.url,
}],
}
def create_url_map(
self,
src_host: str,
src_port: int,
) -> GcpResource:
src_address = f'{src_host}:{src_port}'
name = self.make_resource_name(self.URL_MAP_NAME)
matcher_name = self.make_resource_name(self.URL_MAP_PATH_MATCHER_NAME)
logger.info('Creating URL map "%s": %s -> %s', name, src_address,
self.backend_service.name)
resource = self.compute.create_url_map_with_content(
self._generate_url_map_body(name, matcher_name, [src_address],
self.backend_service))
self.url_map = resource
return resource
def patch_url_map(self, src_host: str, src_port: int,
backend_service: GcpResource):
src_address = f'{src_host}:{src_port}'
name = self.make_resource_name(self.URL_MAP_NAME)
matcher_name = self.make_resource_name(self.URL_MAP_PATH_MATCHER_NAME)
logger.info('Patching URL map "%s": %s -> %s', name, src_address,
backend_service.name)
self.compute.patch_url_map(
self.url_map,
self._generate_url_map_body(name, matcher_name, [src_address],
backend_service))
def create_url_map_with_content(self, url_map_body: Any) -> GcpResource:
logger.info('Creating URL map: %s', url_map_body)
resource = self.compute.create_url_map_with_content(url_map_body)
self.url_map = resource
return resource
def delete_url_map(self, force=False):
if force:
name = self.make_resource_name(self.URL_MAP_NAME)
elif self.url_map:
name = self.url_map.name
else:
return
logger.info('Deleting URL Map "%s"', name)
self.compute.delete_url_map(name)
self.url_map = None
def create_target_proxy(self):
name = self.make_resource_name(self.TARGET_PROXY_NAME)
if self.backend_service_protocol is BackendServiceProtocol.GRPC:
target_proxy_type = 'GRPC'
create_proxy_fn = self.compute.create_target_grpc_proxy
self.target_proxy_is_http = False
elif self.backend_service_protocol is BackendServiceProtocol.HTTP2:
target_proxy_type = 'HTTP'
create_proxy_fn = self.compute.create_target_http_proxy
self.target_proxy_is_http = True
else:
raise TypeError('Unexpected backend service protocol')
logger.info('Creating target %s proxy "%s" to URL map %s', name,
target_proxy_type, self.url_map.name)
self.target_proxy = create_proxy_fn(name, self.url_map)
def delete_target_grpc_proxy(self, force=False):
if force:
name = self.make_resource_name(self.TARGET_PROXY_NAME)
elif self.target_proxy:
name = self.target_proxy.name
else:
return
logger.info('Deleting Target GRPC proxy "%s"', name)
self.compute.delete_target_grpc_proxy(name)
self.target_proxy = None
self.target_proxy_is_http = False
def delete_target_http_proxy(self, force=False):
if force:
name = self.make_resource_name(self.TARGET_PROXY_NAME)
elif self.target_proxy and self.target_proxy_is_http:
name = self.target_proxy.name
else:
return
logger.info('Deleting HTTP Target proxy "%s"', name)
self.compute.delete_target_http_proxy(name)
self.target_proxy = None
self.target_proxy_is_http = False
def find_unused_forwarding_rule_port(
self,
*,
lo: int = 1024, # To avoid confusion, skip well-known ports.
hi: int = 65535,
attempts: int = 25) -> int:
for attempts in range(attempts):
src_port = random.randint(lo, hi)
if not (self.compute.exists_forwarding_rule(src_port)):
return src_port
# TODO(sergiitk): custom exception
raise RuntimeError("Couldn't find unused forwarding rule port")
def create_forwarding_rule(self, src_port: int):
name = self.make_resource_name(self.FORWARDING_RULE_NAME)
src_port = int(src_port)
logging.info(
'Creating forwarding rule "%s" in network "%s": 0.0.0.0:%s -> %s',
name, self.network, src_port, self.target_proxy.url)
resource = self.compute.create_forwarding_rule(name, src_port,
self.target_proxy,
self.network_url)
self.forwarding_rule = resource
return resource
def delete_forwarding_rule(self, force=False):
if force:
name = self.make_resource_name(self.FORWARDING_RULE_NAME)
elif self.forwarding_rule:
name = self.forwarding_rule.name
else:
return
logger.info('Deleting Forwarding rule "%s"', name)
self.compute.delete_forwarding_rule(name)
self.forwarding_rule = None
def create_firewall_rule(self, allowed_ports: List[str]):
name = self.make_resource_name(self.FIREWALL_RULE_NAME)
logging.info(
'Creating firewall rule "%s" in network "%s" with allowed ports %s',
name, self.network, allowed_ports)
resource = self.compute.create_firewall_rule(
name, self.network_url, xds_flags.FIREWALL_SOURCE_RANGE.value,
allowed_ports)
self.firewall_rule = resource
def delete_firewall_rule(self, force=False):
"""The firewall rule won't be automatically removed."""
if force:
name = self.make_resource_name(self.FIREWALL_RULE_NAME)
elif self.firewall_rule:
name = self.firewall_rule.name
else:
return
logger.info('Deleting Firewall Rule "%s"', name)
self.compute.delete_firewall_rule(name)
self.firewall_rule = None
class TrafficDirectorAppNetManager(TrafficDirectorManager):
GRPC_ROUTE_NAME = "grpc-route"
ROUTER_NAME = "router"
netsvc: _NetworkServicesV1Alpha1
def __init__(self,
gcp_api_manager: gcp.api.GcpApiManager,
project: str,
*,
resource_prefix: str,
config_scope: str,
resource_suffix: Optional[str] = None,
network: str = 'default',
compute_api_version: str = 'v1'):
super().__init__(gcp_api_manager,
project,
resource_prefix=resource_prefix,
resource_suffix=resource_suffix,
network=network,
compute_api_version=compute_api_version)
self.config_scope = config_scope
# API
self.netsvc = _NetworkServicesV1Alpha1(gcp_api_manager, project)
# Managed resources
self.grpc_route: Optional[_NetworkServicesV1Alpha1.GrpcRoute] = None
self.router: Optional[_NetworkServicesV1Alpha1.Router] = None
def create_router(self) -> GcpResource:
name = self.make_resource_name(self.ROUTER_NAME)
logger.info("Creating Router %s", name)
body = {
"type": "PROXYLESS_GRPC",
"scope": self.config_scope,
}
resource = self.netsvc.create_router(name, body)
self.router = self.netsvc.get_router(name)
logger.debug("Loaded Router: %s", self.router)
return resource
def delete_router(self, force=False):
if force:
name = self.make_resource_name(self.ROUTER_NAME)
elif self.router:
name = self.router.name
else:
return
logger.info('Deleting Router %s', name)
self.netsvc.delete_router(name)
self.router = None
def create_grpc_route(self, src_host: str, src_port: int) -> GcpResource:
host = f'{src_host}:{src_port}'
service_name = self.netsvc.resource_full_name(self.backend_service.name,
"backendServices")
body = {
"routers": [self.router.url],
"hostnames":
host,
"rules": [{
"action": {
"destinations": [{
"serviceName": service_name
}]
}
}],
}
name = self.make_resource_name(self.GRPC_ROUTE_NAME)
logger.info("Creating GrpcRoute %s", name)
resource = self.netsvc.create_grpc_route(name, body)
self.grpc_route = self.netsvc.get_grpc_route(name)
logger.debug("Loaded GrpcRoute: %s", self.grpc_route)
return resource
def create_grpc_route_with_content(self, body: Any) -> GcpResource:
name = self.make_resource_name(self.GRPC_ROUTE_NAME)
logger.info("Creating GrpcRoute %s", name)
resource = self.netsvc.create_grpc_route(name, body)
self.grpc_route = self.netsvc.get_grpc_route(name)
logger.debug("Loaded GrpcRoute: %s", self.grpc_route)
return resource
def delete_grpc_route(self, force=False):
if force:
name = self.make_resource_name(self.GRPC_ROUTE_NAME)
elif self.grpc_route:
name = self.grpc_route.name
else:
return
logger.info('Deleting GrpcRoute %s', name)
self.netsvc.delete_grpc_route(name)
self.grpc_route = None
def cleanup(self, *, force=False):
self.delete_grpc_route(force=force)
self.delete_router(force=force)
super().cleanup(force=force)
class TrafficDirectorSecureManager(TrafficDirectorManager):
SERVER_TLS_POLICY_NAME = "server-tls-policy"
CLIENT_TLS_POLICY_NAME = "client-tls-policy"
AUTHZ_POLICY_NAME = "authz-policy"
ENDPOINT_POLICY = "endpoint-policy"
CERTIFICATE_PROVIDER_INSTANCE = "google_cloud_private_spiffe"
netsec: _NetworkSecurityV1Beta1
netsvc: _NetworkServicesV1Beta1
def __init__(
self,
gcp_api_manager: gcp.api.GcpApiManager,
project: str,
*,
resource_prefix: str,
resource_suffix: Optional[str] = None,
network: str = 'default',
compute_api_version: str = 'v1',
):
super().__init__(gcp_api_manager,
project,
resource_prefix=resource_prefix,
resource_suffix=resource_suffix,
network=network,
compute_api_version=compute_api_version)
# API
self.netsec = _NetworkSecurityV1Beta1(gcp_api_manager, project)
self.netsvc = _NetworkServicesV1Beta1(gcp_api_manager, project)
# Managed resources
self.server_tls_policy: Optional[ServerTlsPolicy] = None
self.client_tls_policy: Optional[ClientTlsPolicy] = None
self.authz_policy: Optional[AuthorizationPolicy] = None
self.endpoint_policy: Optional[EndpointPolicy] = None
def setup_server_security(self,
*,
server_namespace,
server_name,
server_port,
tls=True,
mtls=True):
self.create_server_tls_policy(tls=tls, mtls=mtls)
self.create_endpoint_policy(server_namespace=server_namespace,
server_name=server_name,
server_port=server_port)
def setup_client_security(self,
*,
server_namespace,
server_name,
tls=True,
mtls=True):
self.create_client_tls_policy(tls=tls, mtls=mtls)
self.backend_service_apply_client_mtls_policy(server_namespace,
server_name)
def cleanup(self, *, force=False):
# Cleanup in the reverse order of creation
super().cleanup(force=force)
self.delete_endpoint_policy(force=force)
self.delete_server_tls_policy(force=force)
self.delete_client_tls_policy(force=force)
self.delete_authz_policy(force=force)
def create_server_tls_policy(self, *, tls, mtls):
name = self.make_resource_name(self.SERVER_TLS_POLICY_NAME)
logger.info('Creating Server TLS Policy %s', name)
if not tls and not mtls:
logger.warning(
'Server TLS Policy %s neither TLS, nor mTLS '
'policy. Skipping creation', name)
return
certificate_provider = self._get_certificate_provider()
policy = {}
if tls:
policy["serverCertificate"] = certificate_provider
if mtls:
policy["mtlsPolicy"] = {
"clientValidationCa": [certificate_provider],
}
self.netsec.create_server_tls_policy(name, policy)
self.server_tls_policy = self.netsec.get_server_tls_policy(name)
logger.debug('Server TLS Policy loaded: %r', self.server_tls_policy)
def delete_server_tls_policy(self, force=False):
if force:
name = self.make_resource_name(self.SERVER_TLS_POLICY_NAME)
elif self.server_tls_policy:
name = self.server_tls_policy.name
else:
return
logger.info('Deleting Server TLS Policy %s', name)
self.netsec.delete_server_tls_policy(name)
self.server_tls_policy = None
def create_authz_policy(self, *, action: str, rules: list):
name = self.make_resource_name(self.AUTHZ_POLICY_NAME)
logger.info('Creating Authz Policy %s', name)
policy = {
"action": action,
"rules": rules,
}
self.netsec.create_authz_policy(name, policy)
self.authz_policy = self.netsec.get_authz_policy(name)
logger.debug('Authz Policy loaded: %r', self.authz_policy)
def delete_authz_policy(self, force=False):
if force:
name = self.make_resource_name(self.AUTHZ_POLICY_NAME)
elif self.authz_policy:
name = self.authz_policy.name
else:
return
logger.info('Deleting Authz Policy %s', name)
self.netsec.delete_authz_policy(name)
self.authz_policy = None
def create_endpoint_policy(self, *, server_namespace: str, server_name: str,
server_port: int) -> None:
name = self.make_resource_name(self.ENDPOINT_POLICY)
logger.info('Creating Endpoint Policy %s', name)
endpoint_matcher_labels = [{
"labelName": "app",
"labelValue": f"{server_namespace}-{server_name}"
}]
port_selector = {"ports": [str(server_port)]}
label_matcher_all = {
"metadataLabelMatchCriteria": "MATCH_ALL",
"metadataLabels": endpoint_matcher_labels,
}
config = {
"type": "GRPC_SERVER",
"trafficPortSelector": port_selector,
"endpointMatcher": {
"metadataLabelMatcher": label_matcher_all,
},
}
if self.server_tls_policy:
config["serverTlsPolicy"] = self.server_tls_policy.name
else:
logger.warning(
'Creating Endpoint Policy %s with '
'no Server TLS policy attached', name)
if self.authz_policy:
config["authorizationPolicy"] = self.authz_policy.name
self.netsvc.create_endpoint_policy(name, config)
self.endpoint_policy = self.netsvc.get_endpoint_policy(name)
logger.debug('Loaded Endpoint Policy: %r', self.endpoint_policy)
def delete_endpoint_policy(self, force: bool = False) -> None:
if force:
name = self.make_resource_name(self.ENDPOINT_POLICY)
elif self.endpoint_policy:
name = self.endpoint_policy.name
else:
return
logger.info('Deleting Endpoint Policy %s', name)
self.netsvc.delete_endpoint_policy(name)
self.endpoint_policy = None
def create_client_tls_policy(self, *, tls, mtls):
name = self.make_resource_name(self.CLIENT_TLS_POLICY_NAME)
logger.info('Creating Client TLS Policy %s', name)
if not tls and not mtls:
logger.warning(
'Client TLS Policy %s neither TLS, nor mTLS '
'policy. Skipping creation', name)
return
certificate_provider = self._get_certificate_provider()
policy = {}
if tls:
policy["serverValidationCa"] = [certificate_provider]
if mtls:
policy["clientCertificate"] = certificate_provider
self.netsec.create_client_tls_policy(name, policy)
self.client_tls_policy = self.netsec.get_client_tls_policy(name)
logger.debug('Client TLS Policy loaded: %r', self.client_tls_policy)
def delete_client_tls_policy(self, force=False):
if force:
name = self.make_resource_name(self.CLIENT_TLS_POLICY_NAME)
elif self.client_tls_policy:
name = self.client_tls_policy.name
else:
return
logger.info('Deleting Client TLS Policy %s', name)
self.netsec.delete_client_tls_policy(name)
self.client_tls_policy = None
def backend_service_apply_client_mtls_policy(
self,
server_namespace,
server_name,
):
if not self.client_tls_policy:
logger.warning(
'Client TLS policy not created, '
'skipping attaching to Backend Service %s',
self.backend_service.name)
return
server_spiffe = (f'spiffe://{self.project}.svc.id.goog/'
f'ns/{server_namespace}/sa/{server_name}')
logging.info(
'Adding Client TLS Policy to Backend Service %s: %s, '
'server %s', self.backend_service.name, self.client_tls_policy.url,
server_spiffe)
self.compute.patch_backend_service(
self.backend_service, {
'securitySettings': {
'clientTlsPolicy': self.client_tls_policy.url,
'subjectAltNames': [server_spiffe]
}
})
@classmethod
def _get_certificate_provider(cls):
return {
"certificateProviderInstance": {
"pluginInstance": cls.CERTIFICATE_PROVIDER_INSTANCE,
},
}
| 40.748299 | 80 | 0.636171 |
import functools
import logging
import random
from typing import Any, Dict, List, Optional, Set
from framework import xds_flags
from framework.infrastructure import gcp
logger = logging.getLogger(__name__)
_ComputeV1 = gcp.compute.ComputeV1
GcpResource = _ComputeV1.GcpResource
HealthCheckProtocol = _ComputeV1.HealthCheckProtocol
ZonalGcpResource = _ComputeV1.ZonalGcpResource
BackendServiceProtocol = _ComputeV1.BackendServiceProtocol
_BackendGRPC = BackendServiceProtocol.GRPC
_HealthCheckGRPC = HealthCheckProtocol.GRPC
_NetworkSecurityV1Beta1 = gcp.network_security.NetworkSecurityV1Beta1
ServerTlsPolicy = gcp.network_security.ServerTlsPolicy
ClientTlsPolicy = gcp.network_security.ClientTlsPolicy
AuthorizationPolicy = gcp.network_security.AuthorizationPolicy
_NetworkServicesV1Alpha1 = gcp.network_services.NetworkServicesV1Alpha1
_NetworkServicesV1Beta1 = gcp.network_services.NetworkServicesV1Beta1
EndpointPolicy = gcp.network_services.EndpointPolicy
TEST_AFFINITY_METADATA_KEY = 'xds_md'
class TrafficDirectorManager:
compute: _ComputeV1
resource_prefix: str
resource_suffix: str
BACKEND_SERVICE_NAME = "backend-service"
ALTERNATIVE_BACKEND_SERVICE_NAME = "backend-service-alt"
AFFINITY_BACKEND_SERVICE_NAME = "backend-service-affinity"
HEALTH_CHECK_NAME = "health-check"
URL_MAP_NAME = "url-map"
URL_MAP_PATH_MATCHER_NAME = "path-matcher"
TARGET_PROXY_NAME = "target-proxy"
FORWARDING_RULE_NAME = "forwarding-rule"
FIREWALL_RULE_NAME = "allow-health-checks"
def __init__(
self,
gcp_api_manager: gcp.api.GcpApiManager,
project: str,
*,
resource_prefix: str,
resource_suffix: str,
network: str = 'default',
compute_api_version: str = 'v1',
):
self.compute = _ComputeV1(gcp_api_manager,
project,
version=compute_api_version)
self.project: str = project
self.network: str = network
self.resource_prefix: str = resource_prefix
self.resource_suffix: str = resource_suffix
self.health_check: Optional[GcpResource] = None
self.backend_service: Optional[GcpResource] = None
self.backend_service_protocol: Optional[BackendServiceProtocol] = None
self.url_map: Optional[GcpResource] = None
self.firewall_rule: Optional[GcpResource] = None
self.target_proxy: Optional[GcpResource] = None
self.target_proxy_is_http: bool = False
self.forwarding_rule: Optional[GcpResource] = None
self.backends: Set[ZonalGcpResource] = set()
self.alternative_backend_service: Optional[GcpResource] = None
self.alternative_backend_service_protocol: Optional[
BackendServiceProtocol] = None
self.alternative_backends: Set[ZonalGcpResource] = set()
self.affinity_backend_service: Optional[GcpResource] = None
self.affinity_backend_service_protocol: Optional[
BackendServiceProtocol] = None
self.affinity_backends: Set[ZonalGcpResource] = set()
@property
def network_url(self):
return f'global/networks/{self.network}'
def setup_for_grpc(
self,
service_host,
service_port,
*,
backend_protocol: Optional[BackendServiceProtocol] = _BackendGRPC,
health_check_port: Optional[int] = None):
self.setup_backend_for_grpc(protocol=backend_protocol,
health_check_port=health_check_port)
self.setup_routing_rule_map_for_grpc(service_host, service_port)
def setup_backend_for_grpc(
self,
*,
protocol: Optional[BackendServiceProtocol] = _BackendGRPC,
health_check_port: Optional[int] = None):
self.create_health_check(port=health_check_port)
self.create_backend_service(protocol)
def setup_routing_rule_map_for_grpc(self, service_host, service_port):
self.create_url_map(service_host, service_port)
self.create_target_proxy()
self.create_forwarding_rule(service_port)
def cleanup(self, *, force=False):
self.delete_forwarding_rule(force=force)
self.delete_target_http_proxy(force=force)
self.delete_target_grpc_proxy(force=force)
self.delete_url_map(force=force)
self.delete_backend_service(force=force)
self.delete_alternative_backend_service(force=force)
self.delete_affinity_backend_service(force=force)
self.delete_health_check(force=force)
@functools.lru_cache(None)
def make_resource_name(self, name: str) -> str:
parts = [self.resource_prefix, name]
if self.resource_suffix:
parts.append(self.resource_suffix)
return '-'.join(parts)
def create_health_check(
self,
*,
protocol: Optional[HealthCheckProtocol] = _HealthCheckGRPC,
port: Optional[int] = None):
if self.health_check:
raise ValueError(f'Health check {self.health_check.name} '
'already created, delete it first')
if protocol is None:
protocol = _HealthCheckGRPC
name = self.make_resource_name(self.HEALTH_CHECK_NAME)
logger.info('Creating %s Health Check "%s"', protocol.name, name)
resource = self.compute.create_health_check(name, protocol, port=port)
self.health_check = resource
def delete_health_check(self, force=False):
if force:
name = self.make_resource_name(self.HEALTH_CHECK_NAME)
elif self.health_check:
name = self.health_check.name
else:
return
logger.info('Deleting Health Check "%s"', name)
self.compute.delete_health_check(name)
self.health_check = None
def create_backend_service(
self,
protocol: Optional[BackendServiceProtocol] = _BackendGRPC,
subset_size: Optional[int] = None,
affinity_header: Optional[str] = None):
if protocol is None:
protocol = _BackendGRPC
name = self.make_resource_name(self.BACKEND_SERVICE_NAME)
logger.info('Creating %s Backend Service "%s"', protocol.name, name)
resource = self.compute.create_backend_service_traffic_director(
name,
health_check=self.health_check,
protocol=protocol,
subset_size=subset_size,
affinity_header=affinity_header)
self.backend_service = resource
self.backend_service_protocol = protocol
def load_backend_service(self):
name = self.make_resource_name(self.BACKEND_SERVICE_NAME)
resource = self.compute.get_backend_service_traffic_director(name)
self.backend_service = resource
def delete_backend_service(self, force=False):
if force:
name = self.make_resource_name(self.BACKEND_SERVICE_NAME)
elif self.backend_service:
name = self.backend_service.name
else:
return
logger.info('Deleting Backend Service "%s"', name)
self.compute.delete_backend_service(name)
self.backend_service = None
def backend_service_add_neg_backends(self,
name,
zones,
max_rate_per_endpoint: Optional[
int] = None):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.backends.add(backend)
self.backend_service_patch_backends(max_rate_per_endpoint)
def backend_service_remove_neg_backends(self, name, zones):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.backends.remove(backend)
self.backend_service_patch_backends()
def backend_service_patch_backends(
self, max_rate_per_endpoint: Optional[int] = None):
logging.info('Adding backends to Backend Service %s: %r',
self.backend_service.name, self.backends)
self.compute.backend_service_patch_backends(self.backend_service,
self.backends,
max_rate_per_endpoint)
def backend_service_remove_all_backends(self):
logging.info('Removing backends from Backend Service %s',
self.backend_service.name)
self.compute.backend_service_remove_all_backends(self.backend_service)
def wait_for_backends_healthy_status(self):
logger.debug(
"Waiting for Backend Service %s to report all backends healthy %r",
self.backend_service, self.backends)
self.compute.wait_for_backends_healthy_status(self.backend_service,
self.backends)
def create_alternative_backend_service(
self, protocol: Optional[BackendServiceProtocol] = _BackendGRPC):
if protocol is None:
protocol = _BackendGRPC
name = self.make_resource_name(self.ALTERNATIVE_BACKEND_SERVICE_NAME)
logger.info('Creating %s Alternative Backend Service "%s"',
protocol.name, name)
resource = self.compute.create_backend_service_traffic_director(
name, health_check=self.health_check, protocol=protocol)
self.alternative_backend_service = resource
self.alternative_backend_service_protocol = protocol
def load_alternative_backend_service(self):
name = self.make_resource_name(self.ALTERNATIVE_BACKEND_SERVICE_NAME)
resource = self.compute.get_backend_service_traffic_director(name)
self.alternative_backend_service = resource
def delete_alternative_backend_service(self, force=False):
if force:
name = self.make_resource_name(
self.ALTERNATIVE_BACKEND_SERVICE_NAME)
elif self.alternative_backend_service:
name = self.alternative_backend_service.name
else:
return
logger.info('Deleting Alternative Backend Service "%s"', name)
self.compute.delete_backend_service(name)
self.alternative_backend_service = None
def alternative_backend_service_add_neg_backends(self, name, zones):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.alternative_backends.add(backend)
self.alternative_backend_service_patch_backends()
def alternative_backend_service_patch_backends(self):
logging.info('Adding backends to Backend Service %s: %r',
self.alternative_backend_service.name,
self.alternative_backends)
self.compute.backend_service_patch_backends(
self.alternative_backend_service, self.alternative_backends)
def alternative_backend_service_remove_all_backends(self):
logging.info('Removing backends from Backend Service %s',
self.alternative_backend_service.name)
self.compute.backend_service_remove_all_backends(
self.alternative_backend_service)
def wait_for_alternative_backends_healthy_status(self):
logger.debug(
"Waiting for Backend Service %s to report all backends healthy %r",
self.alternative_backend_service, self.alternative_backends)
self.compute.wait_for_backends_healthy_status(
self.alternative_backend_service, self.alternative_backends)
def create_affinity_backend_service(
self, protocol: Optional[BackendServiceProtocol] = _BackendGRPC):
if protocol is None:
protocol = _BackendGRPC
name = self.make_resource_name(self.AFFINITY_BACKEND_SERVICE_NAME)
logger.info('Creating %s Affinity Backend Service "%s"', protocol.name,
name)
resource = self.compute.create_backend_service_traffic_director(
name,
health_check=self.health_check,
protocol=protocol,
affinity_header=TEST_AFFINITY_METADATA_KEY)
self.affinity_backend_service = resource
self.affinity_backend_service_protocol = protocol
def load_affinity_backend_service(self):
name = self.make_resource_name(self.AFFINITY_BACKEND_SERVICE_NAME)
resource = self.compute.get_backend_service_traffic_director(name)
self.affinity_backend_service = resource
def delete_affinity_backend_service(self, force=False):
if force:
name = self.make_resource_name(self.AFFINITY_BACKEND_SERVICE_NAME)
elif self.affinity_backend_service:
name = self.affinity_backend_service.name
else:
return
logger.info('Deleting Affinity Backend Service "%s"', name)
self.compute.delete_backend_service(name)
self.affinity_backend_service = None
def affinity_backend_service_add_neg_backends(self, name, zones):
logger.info('Waiting for Network Endpoint Groups to load endpoints.')
for zone in zones:
backend = self.compute.wait_for_network_endpoint_group(name, zone)
logger.info('Loaded NEG "%s" in zone %s', backend.name,
backend.zone)
self.affinity_backends.add(backend)
self.affinity_backend_service_patch_backends()
def affinity_backend_service_patch_backends(self):
logging.info('Adding backends to Backend Service %s: %r',
self.affinity_backend_service.name, self.affinity_backends)
self.compute.backend_service_patch_backends(
self.affinity_backend_service, self.affinity_backends)
def affinity_backend_service_remove_all_backends(self):
logging.info('Removing backends from Backend Service %s',
self.affinity_backend_service.name)
self.compute.backend_service_remove_all_backends(
self.affinity_backend_service)
def wait_for_affinity_backends_healthy_status(self):
logger.debug(
"Waiting for Backend Service %s to report all backends healthy %r",
self.affinity_backend_service, self.affinity_backends)
self.compute.wait_for_backends_healthy_status(
self.affinity_backend_service, self.affinity_backends)
def _generate_url_map_body(
self,
name: str,
matcher_name: str,
src_hosts,
dst_default_backend_service: GcpResource,
dst_host_rule_match_backend_service: Optional[GcpResource] = None,
) -> Dict[str, Any]:
if dst_host_rule_match_backend_service is None:
dst_host_rule_match_backend_service = dst_default_backend_service
return {
'name':
name,
'defaultService':
dst_default_backend_service.url,
'hostRules': [{
'hosts': src_hosts,
'pathMatcher': matcher_name,
}],
'pathMatchers': [{
'name': matcher_name,
'defaultService': dst_host_rule_match_backend_service.url,
}],
}
def create_url_map(
self,
src_host: str,
src_port: int,
) -> GcpResource:
src_address = f'{src_host}:{src_port}'
name = self.make_resource_name(self.URL_MAP_NAME)
matcher_name = self.make_resource_name(self.URL_MAP_PATH_MATCHER_NAME)
logger.info('Creating URL map "%s": %s -> %s', name, src_address,
self.backend_service.name)
resource = self.compute.create_url_map_with_content(
self._generate_url_map_body(name, matcher_name, [src_address],
self.backend_service))
self.url_map = resource
return resource
def patch_url_map(self, src_host: str, src_port: int,
backend_service: GcpResource):
src_address = f'{src_host}:{src_port}'
name = self.make_resource_name(self.URL_MAP_NAME)
matcher_name = self.make_resource_name(self.URL_MAP_PATH_MATCHER_NAME)
logger.info('Patching URL map "%s": %s -> %s', name, src_address,
backend_service.name)
self.compute.patch_url_map(
self.url_map,
self._generate_url_map_body(name, matcher_name, [src_address],
backend_service))
def create_url_map_with_content(self, url_map_body: Any) -> GcpResource:
logger.info('Creating URL map: %s', url_map_body)
resource = self.compute.create_url_map_with_content(url_map_body)
self.url_map = resource
return resource
def delete_url_map(self, force=False):
if force:
name = self.make_resource_name(self.URL_MAP_NAME)
elif self.url_map:
name = self.url_map.name
else:
return
logger.info('Deleting URL Map "%s"', name)
self.compute.delete_url_map(name)
self.url_map = None
def create_target_proxy(self):
name = self.make_resource_name(self.TARGET_PROXY_NAME)
if self.backend_service_protocol is BackendServiceProtocol.GRPC:
target_proxy_type = 'GRPC'
create_proxy_fn = self.compute.create_target_grpc_proxy
self.target_proxy_is_http = False
elif self.backend_service_protocol is BackendServiceProtocol.HTTP2:
target_proxy_type = 'HTTP'
create_proxy_fn = self.compute.create_target_http_proxy
self.target_proxy_is_http = True
else:
raise TypeError('Unexpected backend service protocol')
logger.info('Creating target %s proxy "%s" to URL map %s', name,
target_proxy_type, self.url_map.name)
self.target_proxy = create_proxy_fn(name, self.url_map)
def delete_target_grpc_proxy(self, force=False):
if force:
name = self.make_resource_name(self.TARGET_PROXY_NAME)
elif self.target_proxy:
name = self.target_proxy.name
else:
return
logger.info('Deleting Target GRPC proxy "%s"', name)
self.compute.delete_target_grpc_proxy(name)
self.target_proxy = None
self.target_proxy_is_http = False
def delete_target_http_proxy(self, force=False):
if force:
name = self.make_resource_name(self.TARGET_PROXY_NAME)
elif self.target_proxy and self.target_proxy_is_http:
name = self.target_proxy.name
else:
return
logger.info('Deleting HTTP Target proxy "%s"', name)
self.compute.delete_target_http_proxy(name)
self.target_proxy = None
self.target_proxy_is_http = False
def find_unused_forwarding_rule_port(
self,
*,
lo: int = 1024,
hi: int = 65535,
attempts: int = 25) -> int:
for attempts in range(attempts):
src_port = random.randint(lo, hi)
if not (self.compute.exists_forwarding_rule(src_port)):
return src_port
raise RuntimeError("Couldn't find unused forwarding rule port")
def create_forwarding_rule(self, src_port: int):
name = self.make_resource_name(self.FORWARDING_RULE_NAME)
src_port = int(src_port)
logging.info(
'Creating forwarding rule "%s" in network "%s": 0.0.0.0:%s -> %s',
name, self.network, src_port, self.target_proxy.url)
resource = self.compute.create_forwarding_rule(name, src_port,
self.target_proxy,
self.network_url)
self.forwarding_rule = resource
return resource
def delete_forwarding_rule(self, force=False):
if force:
name = self.make_resource_name(self.FORWARDING_RULE_NAME)
elif self.forwarding_rule:
name = self.forwarding_rule.name
else:
return
logger.info('Deleting Forwarding rule "%s"', name)
self.compute.delete_forwarding_rule(name)
self.forwarding_rule = None
def create_firewall_rule(self, allowed_ports: List[str]):
name = self.make_resource_name(self.FIREWALL_RULE_NAME)
logging.info(
'Creating firewall rule "%s" in network "%s" with allowed ports %s',
name, self.network, allowed_ports)
resource = self.compute.create_firewall_rule(
name, self.network_url, xds_flags.FIREWALL_SOURCE_RANGE.value,
allowed_ports)
self.firewall_rule = resource
def delete_firewall_rule(self, force=False):
if force:
name = self.make_resource_name(self.FIREWALL_RULE_NAME)
elif self.firewall_rule:
name = self.firewall_rule.name
else:
return
logger.info('Deleting Firewall Rule "%s"', name)
self.compute.delete_firewall_rule(name)
self.firewall_rule = None
class TrafficDirectorAppNetManager(TrafficDirectorManager):
GRPC_ROUTE_NAME = "grpc-route"
ROUTER_NAME = "router"
netsvc: _NetworkServicesV1Alpha1
def __init__(self,
gcp_api_manager: gcp.api.GcpApiManager,
project: str,
*,
resource_prefix: str,
config_scope: str,
resource_suffix: Optional[str] = None,
network: str = 'default',
compute_api_version: str = 'v1'):
super().__init__(gcp_api_manager,
project,
resource_prefix=resource_prefix,
resource_suffix=resource_suffix,
network=network,
compute_api_version=compute_api_version)
self.config_scope = config_scope
# API
self.netsvc = _NetworkServicesV1Alpha1(gcp_api_manager, project)
# Managed resources
self.grpc_route: Optional[_NetworkServicesV1Alpha1.GrpcRoute] = None
self.router: Optional[_NetworkServicesV1Alpha1.Router] = None
def create_router(self) -> GcpResource:
name = self.make_resource_name(self.ROUTER_NAME)
logger.info("Creating Router %s", name)
body = {
"type": "PROXYLESS_GRPC",
"scope": self.config_scope,
}
resource = self.netsvc.create_router(name, body)
self.router = self.netsvc.get_router(name)
logger.debug("Loaded Router: %s", self.router)
return resource
def delete_router(self, force=False):
if force:
name = self.make_resource_name(self.ROUTER_NAME)
elif self.router:
name = self.router.name
else:
return
logger.info('Deleting Router %s', name)
self.netsvc.delete_router(name)
self.router = None
def create_grpc_route(self, src_host: str, src_port: int) -> GcpResource:
host = f'{src_host}:{src_port}'
service_name = self.netsvc.resource_full_name(self.backend_service.name,
"backendServices")
body = {
"routers": [self.router.url],
"hostnames":
host,
"rules": [{
"action": {
"destinations": [{
"serviceName": service_name
}]
}
}],
}
name = self.make_resource_name(self.GRPC_ROUTE_NAME)
logger.info("Creating GrpcRoute %s", name)
resource = self.netsvc.create_grpc_route(name, body)
self.grpc_route = self.netsvc.get_grpc_route(name)
logger.debug("Loaded GrpcRoute: %s", self.grpc_route)
return resource
def create_grpc_route_with_content(self, body: Any) -> GcpResource:
name = self.make_resource_name(self.GRPC_ROUTE_NAME)
logger.info("Creating GrpcRoute %s", name)
resource = self.netsvc.create_grpc_route(name, body)
self.grpc_route = self.netsvc.get_grpc_route(name)
logger.debug("Loaded GrpcRoute: %s", self.grpc_route)
return resource
def delete_grpc_route(self, force=False):
if force:
name = self.make_resource_name(self.GRPC_ROUTE_NAME)
elif self.grpc_route:
name = self.grpc_route.name
else:
return
logger.info('Deleting GrpcRoute %s', name)
self.netsvc.delete_grpc_route(name)
self.grpc_route = None
def cleanup(self, *, force=False):
self.delete_grpc_route(force=force)
self.delete_router(force=force)
super().cleanup(force=force)
class TrafficDirectorSecureManager(TrafficDirectorManager):
SERVER_TLS_POLICY_NAME = "server-tls-policy"
CLIENT_TLS_POLICY_NAME = "client-tls-policy"
AUTHZ_POLICY_NAME = "authz-policy"
ENDPOINT_POLICY = "endpoint-policy"
CERTIFICATE_PROVIDER_INSTANCE = "google_cloud_private_spiffe"
netsec: _NetworkSecurityV1Beta1
netsvc: _NetworkServicesV1Beta1
def __init__(
self,
gcp_api_manager: gcp.api.GcpApiManager,
project: str,
*,
resource_prefix: str,
resource_suffix: Optional[str] = None,
network: str = 'default',
compute_api_version: str = 'v1',
):
super().__init__(gcp_api_manager,
project,
resource_prefix=resource_prefix,
resource_suffix=resource_suffix,
network=network,
compute_api_version=compute_api_version)
# API
self.netsec = _NetworkSecurityV1Beta1(gcp_api_manager, project)
self.netsvc = _NetworkServicesV1Beta1(gcp_api_manager, project)
# Managed resources
self.server_tls_policy: Optional[ServerTlsPolicy] = None
self.client_tls_policy: Optional[ClientTlsPolicy] = None
self.authz_policy: Optional[AuthorizationPolicy] = None
self.endpoint_policy: Optional[EndpointPolicy] = None
def setup_server_security(self,
*,
server_namespace,
server_name,
server_port,
tls=True,
mtls=True):
self.create_server_tls_policy(tls=tls, mtls=mtls)
self.create_endpoint_policy(server_namespace=server_namespace,
server_name=server_name,
server_port=server_port)
def setup_client_security(self,
*,
server_namespace,
server_name,
tls=True,
mtls=True):
self.create_client_tls_policy(tls=tls, mtls=mtls)
self.backend_service_apply_client_mtls_policy(server_namespace,
server_name)
def cleanup(self, *, force=False):
# Cleanup in the reverse order of creation
super().cleanup(force=force)
self.delete_endpoint_policy(force=force)
self.delete_server_tls_policy(force=force)
self.delete_client_tls_policy(force=force)
self.delete_authz_policy(force=force)
def create_server_tls_policy(self, *, tls, mtls):
name = self.make_resource_name(self.SERVER_TLS_POLICY_NAME)
logger.info('Creating Server TLS Policy %s', name)
if not tls and not mtls:
logger.warning(
'Server TLS Policy %s neither TLS, nor mTLS '
'policy. Skipping creation', name)
return
certificate_provider = self._get_certificate_provider()
policy = {}
if tls:
policy["serverCertificate"] = certificate_provider
if mtls:
policy["mtlsPolicy"] = {
"clientValidationCa": [certificate_provider],
}
self.netsec.create_server_tls_policy(name, policy)
self.server_tls_policy = self.netsec.get_server_tls_policy(name)
logger.debug('Server TLS Policy loaded: %r', self.server_tls_policy)
def delete_server_tls_policy(self, force=False):
if force:
name = self.make_resource_name(self.SERVER_TLS_POLICY_NAME)
elif self.server_tls_policy:
name = self.server_tls_policy.name
else:
return
logger.info('Deleting Server TLS Policy %s', name)
self.netsec.delete_server_tls_policy(name)
self.server_tls_policy = None
def create_authz_policy(self, *, action: str, rules: list):
name = self.make_resource_name(self.AUTHZ_POLICY_NAME)
logger.info('Creating Authz Policy %s', name)
policy = {
"action": action,
"rules": rules,
}
self.netsec.create_authz_policy(name, policy)
self.authz_policy = self.netsec.get_authz_policy(name)
logger.debug('Authz Policy loaded: %r', self.authz_policy)
def delete_authz_policy(self, force=False):
if force:
name = self.make_resource_name(self.AUTHZ_POLICY_NAME)
elif self.authz_policy:
name = self.authz_policy.name
else:
return
logger.info('Deleting Authz Policy %s', name)
self.netsec.delete_authz_policy(name)
self.authz_policy = None
def create_endpoint_policy(self, *, server_namespace: str, server_name: str,
server_port: int) -> None:
name = self.make_resource_name(self.ENDPOINT_POLICY)
logger.info('Creating Endpoint Policy %s', name)
endpoint_matcher_labels = [{
"labelName": "app",
"labelValue": f"{server_namespace}-{server_name}"
}]
port_selector = {"ports": [str(server_port)]}
label_matcher_all = {
"metadataLabelMatchCriteria": "MATCH_ALL",
"metadataLabels": endpoint_matcher_labels,
}
config = {
"type": "GRPC_SERVER",
"trafficPortSelector": port_selector,
"endpointMatcher": {
"metadataLabelMatcher": label_matcher_all,
},
}
if self.server_tls_policy:
config["serverTlsPolicy"] = self.server_tls_policy.name
else:
logger.warning(
'Creating Endpoint Policy %s with '
'no Server TLS policy attached', name)
if self.authz_policy:
config["authorizationPolicy"] = self.authz_policy.name
self.netsvc.create_endpoint_policy(name, config)
self.endpoint_policy = self.netsvc.get_endpoint_policy(name)
logger.debug('Loaded Endpoint Policy: %r', self.endpoint_policy)
def delete_endpoint_policy(self, force: bool = False) -> None:
if force:
name = self.make_resource_name(self.ENDPOINT_POLICY)
elif self.endpoint_policy:
name = self.endpoint_policy.name
else:
return
logger.info('Deleting Endpoint Policy %s', name)
self.netsvc.delete_endpoint_policy(name)
self.endpoint_policy = None
def create_client_tls_policy(self, *, tls, mtls):
name = self.make_resource_name(self.CLIENT_TLS_POLICY_NAME)
logger.info('Creating Client TLS Policy %s', name)
if not tls and not mtls:
logger.warning(
'Client TLS Policy %s neither TLS, nor mTLS '
'policy. Skipping creation', name)
return
certificate_provider = self._get_certificate_provider()
policy = {}
if tls:
policy["serverValidationCa"] = [certificate_provider]
if mtls:
policy["clientCertificate"] = certificate_provider
self.netsec.create_client_tls_policy(name, policy)
self.client_tls_policy = self.netsec.get_client_tls_policy(name)
logger.debug('Client TLS Policy loaded: %r', self.client_tls_policy)
def delete_client_tls_policy(self, force=False):
if force:
name = self.make_resource_name(self.CLIENT_TLS_POLICY_NAME)
elif self.client_tls_policy:
name = self.client_tls_policy.name
else:
return
logger.info('Deleting Client TLS Policy %s', name)
self.netsec.delete_client_tls_policy(name)
self.client_tls_policy = None
def backend_service_apply_client_mtls_policy(
self,
server_namespace,
server_name,
):
if not self.client_tls_policy:
logger.warning(
'Client TLS policy not created, '
'skipping attaching to Backend Service %s',
self.backend_service.name)
return
server_spiffe = (f'spiffe://{self.project}.svc.id.goog/'
f'ns/{server_namespace}/sa/{server_name}')
logging.info(
'Adding Client TLS Policy to Backend Service %s: %s, '
'server %s', self.backend_service.name, self.client_tls_policy.url,
server_spiffe)
self.compute.patch_backend_service(
self.backend_service, {
'securitySettings': {
'clientTlsPolicy': self.client_tls_policy.url,
'subjectAltNames': [server_spiffe]
}
})
@classmethod
def _get_certificate_provider(cls):
return {
"certificateProviderInstance": {
"pluginInstance": cls.CERTIFICATE_PROVIDER_INSTANCE,
},
}
| true | true |
f71bc2a2616d37575b4b7913e3c9a01e51b63aa1 | 3,636 | py | Python | client/chat/main.py | rrsilaya/spaceteam | eca853d82f14d1d5f5f892977dfb35d20da40d0b | [
"MIT"
] | null | null | null | client/chat/main.py | rrsilaya/spaceteam | eca853d82f14d1d5f5f892977dfb35d20da40d0b | [
"MIT"
] | null | null | null | client/chat/main.py | rrsilaya/spaceteam | eca853d82f14d1d5f5f892977dfb35d20da40d0b | [
"MIT"
] | null | null | null | import sys
from threading import Thread
from connection import TcpConnection
from proto.tcp_packet_pb2 import TcpPacket
class Chat():
def __init__(self):
self.connection = TcpConnection()
self.packet = TcpPacket()
def createLobby(self, maxPlayers, *args):
payload = self.packet.CreateLobbyPacket()
payload.type = self.packet.CREATE_LOBBY
payload.max_players = maxPlayers
if len(args) > 2:
payload.lobby_id = args[2]
lobby = self.connection.send(payload)
payload.ParseFromString(lobby)
return payload.lobby_id
def connect(self, id, *args):
payload = self.packet.ConnectPacket()
payload.type = self.packet.CONNECT
payload.lobby_id = id
payload.player.name = args[0] if args else 'anon'
self.user = payload.player
self.lobby = payload.lobby_id
lobby = self.connection.send(payload)
self.packet.ParseFromString(lobby)
if self.packet.type == self.packet.CONNECT:
payload.ParseFromString(lobby)
return payload.lobby_id
elif self.packet.type == self.packet.ERR_LDNE:
payload = self.packet.ErrLdnePacket()
payload.ParseFromString(lobby)
print(payload.err_message)
sys.exit(1)
elif self.packet.type == self.packet.ERR_LFULL:
payload = self.packet.ErrLfullPacket()
payload.ParseFromString(lobby)
print(payload.err_message)
sys.exit(1)
def listen(self, receiveCallback):
self.receiveCallback = receiveCallback
self.stream = Thread(target=self.connection.receive, args=[self._parsePacket])
self.stream.start()
def sendChat(self, message):
payload = self.packet.ChatPacket()
payload.type = self.packet.CHAT
payload.message = message
payload.player.name = self.user.name
payload.lobby_id = self.lobby
return payload
def getPlayerList(self):
payload = self.packet.PlayerListPacket()
payload.type = self.packet.PLAYER_LIST
return payload
def disconnect(self):
payload = self.packet.DisconnectPacket()
payload.type = self.packet.DISCONNECT
payload.player.name = self.user.name
payload.player.id = self.user.id
self.connection.asyncsend(payload)
self.connection.close()
def _parse(type, packet):
data = type()
data.ParseFromString(packet)
return data
def _parsePacket(self, data):
self.packet.ParseFromString(data)
if self.packet.type == self.packet.DISCONNECT:
data = Chat._parse(self.packet.DisconnectPacket, data)
self.receiveCallback('\n<', color='RED')
self.receiveCallback(data.player.name)
self.receiveCallback('> has left the chat room>\n\n', color='RED')
elif self.packet.type == self.packet.CONNECT:
data = Chat._parse(self.packet.ConnectPacket, data)
self.receiveCallback('\n<', color='GREEN')
self.receiveCallback(data.player.name)
self.receiveCallback('> has joined the chat>\n\n', color='GREEN')
elif self.packet.type == self.packet.CHAT:
data = Chat._parse(self.packet.ChatPacket, data)
self.receiveCallback(data.player.name + ': ', color='YELLOW')
self.receiveCallback(data.message + '\n')
elif self.packet.type == self.packet.PLAYER_LIST:
data = Chat._parse(self.packet.PlayerListPacket, data)
self.receiveCallback('\n[PLAYER LIST]\n', color='GREEN')
for player in data.player_list:
self.receiveCallback('> {}@{}\n'.format(player.name, player.id))
self.receiveCallback('\n')
def _encode(self, stdin):
if stdin == '^players':
data = self.getPlayerList()
else:
data = self.sendChat(stdin)
return data
| 27.969231 | 82 | 0.686469 | import sys
from threading import Thread
from connection import TcpConnection
from proto.tcp_packet_pb2 import TcpPacket
class Chat():
def __init__(self):
self.connection = TcpConnection()
self.packet = TcpPacket()
def createLobby(self, maxPlayers, *args):
payload = self.packet.CreateLobbyPacket()
payload.type = self.packet.CREATE_LOBBY
payload.max_players = maxPlayers
if len(args) > 2:
payload.lobby_id = args[2]
lobby = self.connection.send(payload)
payload.ParseFromString(lobby)
return payload.lobby_id
def connect(self, id, *args):
payload = self.packet.ConnectPacket()
payload.type = self.packet.CONNECT
payload.lobby_id = id
payload.player.name = args[0] if args else 'anon'
self.user = payload.player
self.lobby = payload.lobby_id
lobby = self.connection.send(payload)
self.packet.ParseFromString(lobby)
if self.packet.type == self.packet.CONNECT:
payload.ParseFromString(lobby)
return payload.lobby_id
elif self.packet.type == self.packet.ERR_LDNE:
payload = self.packet.ErrLdnePacket()
payload.ParseFromString(lobby)
print(payload.err_message)
sys.exit(1)
elif self.packet.type == self.packet.ERR_LFULL:
payload = self.packet.ErrLfullPacket()
payload.ParseFromString(lobby)
print(payload.err_message)
sys.exit(1)
def listen(self, receiveCallback):
self.receiveCallback = receiveCallback
self.stream = Thread(target=self.connection.receive, args=[self._parsePacket])
self.stream.start()
def sendChat(self, message):
payload = self.packet.ChatPacket()
payload.type = self.packet.CHAT
payload.message = message
payload.player.name = self.user.name
payload.lobby_id = self.lobby
return payload
def getPlayerList(self):
payload = self.packet.PlayerListPacket()
payload.type = self.packet.PLAYER_LIST
return payload
def disconnect(self):
payload = self.packet.DisconnectPacket()
payload.type = self.packet.DISCONNECT
payload.player.name = self.user.name
payload.player.id = self.user.id
self.connection.asyncsend(payload)
self.connection.close()
def _parse(type, packet):
data = type()
data.ParseFromString(packet)
return data
def _parsePacket(self, data):
self.packet.ParseFromString(data)
if self.packet.type == self.packet.DISCONNECT:
data = Chat._parse(self.packet.DisconnectPacket, data)
self.receiveCallback('\n<', color='RED')
self.receiveCallback(data.player.name)
self.receiveCallback('> has left the chat room>\n\n', color='RED')
elif self.packet.type == self.packet.CONNECT:
data = Chat._parse(self.packet.ConnectPacket, data)
self.receiveCallback('\n<', color='GREEN')
self.receiveCallback(data.player.name)
self.receiveCallback('> has joined the chat>\n\n', color='GREEN')
elif self.packet.type == self.packet.CHAT:
data = Chat._parse(self.packet.ChatPacket, data)
self.receiveCallback(data.player.name + ': ', color='YELLOW')
self.receiveCallback(data.message + '\n')
elif self.packet.type == self.packet.PLAYER_LIST:
data = Chat._parse(self.packet.PlayerListPacket, data)
self.receiveCallback('\n[PLAYER LIST]\n', color='GREEN')
for player in data.player_list:
self.receiveCallback('> {}@{}\n'.format(player.name, player.id))
self.receiveCallback('\n')
def _encode(self, stdin):
if stdin == '^players':
data = self.getPlayerList()
else:
data = self.sendChat(stdin)
return data
| true | true |
f71bc4b4bf4d80efae10e1ebc5854fc9aae76ce2 | 605 | py | Python | OrderManagement/migrations/0006_auto_20201114_0349.py | glen-s-abraham/OnlineDelivery | 138356e17e52d0a4d4a1778f9c440006ce28ae5c | [
"MIT"
] | null | null | null | OrderManagement/migrations/0006_auto_20201114_0349.py | glen-s-abraham/OnlineDelivery | 138356e17e52d0a4d4a1778f9c440006ce28ae5c | [
"MIT"
] | null | null | null | OrderManagement/migrations/0006_auto_20201114_0349.py | glen-s-abraham/OnlineDelivery | 138356e17e52d0a4d4a1778f9c440006ce28ae5c | [
"MIT"
] | null | null | null | # Generated by Django 3.1.2 on 2020-11-14 03:49
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('OrderManagement', '0005_auto_20201107_0415'),
]
operations = [
migrations.AlterField(
model_name='order',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL),
),
]
| 27.5 | 131 | 0.682645 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('OrderManagement', '0005_auto_20201107_0415'),
]
operations = [
migrations.AlterField(
model_name='order',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL),
),
]
| true | true |
f71bc4c5740b27cfc91942e00a13eeb0e9219ee7 | 8,569 | py | Python | SNAPPacket.py | MydonSolutions/SNAPpyPackets | 65565ad7edf98371bcfb2b6780a20680afb10347 | [
"Apache-2.0"
] | null | null | null | SNAPPacket.py | MydonSolutions/SNAPpyPackets | 65565ad7edf98371bcfb2b6780a20680afb10347 | [
"Apache-2.0"
] | null | null | null | SNAPPacket.py | MydonSolutions/SNAPpyPackets | 65565ad7edf98371bcfb2b6780a20680afb10347 | [
"Apache-2.0"
] | null | null | null | mask4bits = ((1 << 4) -1)
import numpy as np
mask8bits = ((1 << 8) -1)
mask16bits = ((1 << 16) -1)
mask64bits = ((1 << 64) -1)
class SNAPPacket(object):
"""
ATA SNAP Firmware Manual, Release 2.0.0
---------------------------------------
Section 2.3.2 "Output Data Formats: Voltage Packets", pg 5
https://github.com/realtimeradio/ata_snap/blob/nov-observing/docs/manual.pdf
struct voltage_packet {
uint8_t version;
uint8_t type;
uint16_t n_chans;
uint16_t chan;
uint16_t feng_id
uint64_t timestamp;
complex4 data[n_chans, 16, 2] // 4-bit real + 4-bit imaginary
};
• version; Firmware version: Bit [7] is always 1 for Voltage packets. The remaining bits contain a
compile-time defined firmware version, represented in the form bit[6].bits[5:3].bits[2:0]. This document
refers to firmware version 2.0.0.
• type; Packet type: Bit [0] is 1 if the axes of data payload are in order [slowest to fastest] channel x time x
polarization. This is currently the only supported mode. Bit [1] is 0 if the data payload comprises 4+4 bit
complex integers. This is currently the only supported mode.
• n_chans; Number of Channels: Indicates the number of frequency channels present in the payload of
this data packet.
• chan; Channel number: The index of the first channel present in this packet. For example, a channel
number c implies the packet contains channels c to c + n_chans - 1.
• feng_id; Antenna ID: A runtime configurable ID which uniquely associates a packet with a particular
SNAP board.
• timestamp; Sample number: The index of the first time sample present in this packet. For example, a
sample number 𝑠 implies the packet contains samples 𝑠 to 𝑠 + 15. Sample number can be referred to GPS
time through knowledge of the system sampling
"""
def __init__(self,
fwVersion: int = None,
packetType: bool = None,
channels: int = None,
channelNum: int = None,
fEngineId: int = None,
packetNumber: int = None,
samples: [int] = None,
packetBytes: bytearray = None,
byteorder: str = 'big'
):
self.bytearr = bytearray(8192+16)
self.payloadbytes = -1
if packetBytes is not None:
self.setHeader(
int.from_bytes(packetBytes[0:1], byteorder=byteorder),
int.from_bytes(packetBytes[1:2], byteorder=byteorder),
int.from_bytes(packetBytes[2:4], byteorder=byteorder),
int.from_bytes(packetBytes[4:6], byteorder=byteorder),
int.from_bytes(packetBytes[6:8], byteorder=byteorder),
int.from_bytes(packetBytes[8:16], byteorder=byteorder)
)
self.setSampleBytes(packetBytes[16:])
else:
if not self.setHeader(fwVersion, packetType, channels, channelNum, fEngineId, packetNumber):
exit()
if not self.setSamples(samples):
exit()
def setHeader(self,
fwVersion: int = None,
packetType: bool = None,
channels: int = None,
channelNum: int = None,
fEngineId: int = None,
packetNumber: int = None,
update: bool = False
):
notAllArgs = False
if fwVersion is not None:
self.fwVersion = fwVersion & mask8bits
self.bytearr[0] = self.fwVersion
else:
notAllArgs = True
if packetType is not None:
self.packetType = (3 if packetType else 0) & mask8bits
self.bytearr[1] = self.packetType
else:
notAllArgs = True
if channels is not None:
self.channels = channels & mask16bits
self.bytearr[2] = (self.channels >> 8) & mask8bits
self.bytearr[3] = self.channels & mask8bits
else:
notAllArgs = True
if channelNum is not None:
self.channelNum = channelNum & mask16bits
self.bytearr[4] = (self.channelNum >> 8) & mask8bits
self.bytearr[5] = self.channelNum & mask8bits
else:
notAllArgs = True
if fEngineId is not None:
self.fEngineId = fEngineId & mask16bits
self.bytearr[6] = (self.fEngineId >> 8) & mask8bits
self.bytearr[7] = self.fEngineId & mask8bits
else:
notAllArgs = True
if packetNumber is not None:
self.packetNumber = packetNumber & mask64bits
self.bytearr[ 8] = (self.packetNumber >> 56) & mask8bits
self.bytearr[ 9] = (self.packetNumber >> 48) & mask8bits
self.bytearr[10] = (self.packetNumber >> 40) & mask8bits
self.bytearr[11] = (self.packetNumber >> 32) & mask8bits
self.bytearr[12] = (self.packetNumber >> 24) & mask8bits
self.bytearr[13] = (self.packetNumber >> 16) & mask8bits
self.bytearr[14] = (self.packetNumber >> 8) & mask8bits
self.bytearr[15] = self.packetNumber & mask8bits
else:
notAllArgs = True
self.payloadbytes = self.channels * 2 * 16
if notAllArgs and not update:
print("Please provide all of the header's arguments.");
self.payloadbytes = -1
return False
return True
def setSamples(self, samples):
if len(samples)/2 != self.payloadbytes:
print("Header inferred payload byte size {} differs from samples length {}\n".format(
self.payloadbytes, len(samples)/2
))
return False
for sampleI in range(self.payloadbytes):
self.bytearr[16+sampleI] = ((samples[2*sampleI] & mask4bits) << 4) + (samples[2*sampleI+1] & mask4bits)
return True
def setSampleBytes(self, samples):
if len(samples) != self.payloadbytes:
print("Header inferred payload byte size {} differs from samples length {}\n".format(
self.payloadbytes, len(samples)
))
return False
self.bytearr[16:self.payloadbytes] = samples
return True
def packet(self):
return self.bytearr[:16+self.payloadbytes]
def print(self, headerOnly=False):
if headerOnly:
print(self.headerStr())
else:
print(self.str())
def twosCompliment(self, value, bits):
return value if value < (1<<(bits-1)) else (value % (1<<(bits-1))) - (1<<(bits-1))
def str(self):
return """{}
\rSamples (0x): {}""".format(self.headerStr(),
[complex(self.twosCompliment(i>>4, 4) , self.twosCompliment(i & mask4bits, 4))
for i in self.bytearr[16:self.payloadbytes]])
def headerStr(self):
return """Firmware Version: {}
\rPacket type: {}
\rNumber of Channels: {}
\rChannel number: {}
\rAntenna ID: {}
\rPacket number: {}
\rPayload bytes: {}""".format(self.fwVersion,
self.packetType,
self.channels,
self.channelNum,
self.fEngineId,
self.packetNumber,
self.payloadbytes)
def update(self,
fwVersion: int = None,
packetType: bool = None,
channels: int = None,
channelNum: int = None,
fEngineId: int = None,
packetNumber: int = None,
samples: [int] = None
):
self.setHeader(fwVersion, packetType, channels, channelNum, fEngineId, packetNumber, update=True)
if samples is not None:
self.setSamples(samples)
if __name__ == '__main__':
testPacket = SNAPPacket(
0,
True,
2,
2,
0,
3735928559,
[i % 16 for i in range(16*2*2)]
)
testPacket.print()
testPacketBytes = testPacket.packet()
dupPacket = SNAPPacket(packetBytes=testPacketBytes)
dupPacket.print()
dupPacketBytes = dupPacket.packet()
print(testPacketBytes)
print(dupPacketBytes)
| 38.95 | 119 | 0.555257 | mask4bits = ((1 << 4) -1)
import numpy as np
mask8bits = ((1 << 8) -1)
mask16bits = ((1 << 16) -1)
mask64bits = ((1 << 64) -1)
class SNAPPacket(object):
def __init__(self,
fwVersion: int = None,
packetType: bool = None,
channels: int = None,
channelNum: int = None,
fEngineId: int = None,
packetNumber: int = None,
samples: [int] = None,
packetBytes: bytearray = None,
byteorder: str = 'big'
):
self.bytearr = bytearray(8192+16)
self.payloadbytes = -1
if packetBytes is not None:
self.setHeader(
int.from_bytes(packetBytes[0:1], byteorder=byteorder),
int.from_bytes(packetBytes[1:2], byteorder=byteorder),
int.from_bytes(packetBytes[2:4], byteorder=byteorder),
int.from_bytes(packetBytes[4:6], byteorder=byteorder),
int.from_bytes(packetBytes[6:8], byteorder=byteorder),
int.from_bytes(packetBytes[8:16], byteorder=byteorder)
)
self.setSampleBytes(packetBytes[16:])
else:
if not self.setHeader(fwVersion, packetType, channels, channelNum, fEngineId, packetNumber):
exit()
if not self.setSamples(samples):
exit()
def setHeader(self,
fwVersion: int = None,
packetType: bool = None,
channels: int = None,
channelNum: int = None,
fEngineId: int = None,
packetNumber: int = None,
update: bool = False
):
notAllArgs = False
if fwVersion is not None:
self.fwVersion = fwVersion & mask8bits
self.bytearr[0] = self.fwVersion
else:
notAllArgs = True
if packetType is not None:
self.packetType = (3 if packetType else 0) & mask8bits
self.bytearr[1] = self.packetType
else:
notAllArgs = True
if channels is not None:
self.channels = channels & mask16bits
self.bytearr[2] = (self.channels >> 8) & mask8bits
self.bytearr[3] = self.channels & mask8bits
else:
notAllArgs = True
if channelNum is not None:
self.channelNum = channelNum & mask16bits
self.bytearr[4] = (self.channelNum >> 8) & mask8bits
self.bytearr[5] = self.channelNum & mask8bits
else:
notAllArgs = True
if fEngineId is not None:
self.fEngineId = fEngineId & mask16bits
self.bytearr[6] = (self.fEngineId >> 8) & mask8bits
self.bytearr[7] = self.fEngineId & mask8bits
else:
notAllArgs = True
if packetNumber is not None:
self.packetNumber = packetNumber & mask64bits
self.bytearr[ 8] = (self.packetNumber >> 56) & mask8bits
self.bytearr[ 9] = (self.packetNumber >> 48) & mask8bits
self.bytearr[10] = (self.packetNumber >> 40) & mask8bits
self.bytearr[11] = (self.packetNumber >> 32) & mask8bits
self.bytearr[12] = (self.packetNumber >> 24) & mask8bits
self.bytearr[13] = (self.packetNumber >> 16) & mask8bits
self.bytearr[14] = (self.packetNumber >> 8) & mask8bits
self.bytearr[15] = self.packetNumber & mask8bits
else:
notAllArgs = True
self.payloadbytes = self.channels * 2 * 16
if notAllArgs and not update:
print("Please provide all of the header's arguments.");
self.payloadbytes = -1
return False
return True
def setSamples(self, samples):
if len(samples)/2 != self.payloadbytes:
print("Header inferred payload byte size {} differs from samples length {}\n".format(
self.payloadbytes, len(samples)/2
))
return False
for sampleI in range(self.payloadbytes):
self.bytearr[16+sampleI] = ((samples[2*sampleI] & mask4bits) << 4) + (samples[2*sampleI+1] & mask4bits)
return True
def setSampleBytes(self, samples):
if len(samples) != self.payloadbytes:
print("Header inferred payload byte size {} differs from samples length {}\n".format(
self.payloadbytes, len(samples)
))
return False
self.bytearr[16:self.payloadbytes] = samples
return True
def packet(self):
return self.bytearr[:16+self.payloadbytes]
def print(self, headerOnly=False):
if headerOnly:
print(self.headerStr())
else:
print(self.str())
def twosCompliment(self, value, bits):
return value if value < (1<<(bits-1)) else (value % (1<<(bits-1))) - (1<<(bits-1))
def str(self):
return """{}
\rSamples (0x): {}""".format(self.headerStr(),
[complex(self.twosCompliment(i>>4, 4) , self.twosCompliment(i & mask4bits, 4))
for i in self.bytearr[16:self.payloadbytes]])
def headerStr(self):
return """Firmware Version: {}
\rPacket type: {}
\rNumber of Channels: {}
\rChannel number: {}
\rAntenna ID: {}
\rPacket number: {}
\rPayload bytes: {}""".format(self.fwVersion,
self.packetType,
self.channels,
self.channelNum,
self.fEngineId,
self.packetNumber,
self.payloadbytes)
def update(self,
fwVersion: int = None,
packetType: bool = None,
channels: int = None,
channelNum: int = None,
fEngineId: int = None,
packetNumber: int = None,
samples: [int] = None
):
self.setHeader(fwVersion, packetType, channels, channelNum, fEngineId, packetNumber, update=True)
if samples is not None:
self.setSamples(samples)
if __name__ == '__main__':
testPacket = SNAPPacket(
0,
True,
2,
2,
0,
3735928559,
[i % 16 for i in range(16*2*2)]
)
testPacket.print()
testPacketBytes = testPacket.packet()
dupPacket = SNAPPacket(packetBytes=testPacketBytes)
dupPacket.print()
dupPacketBytes = dupPacket.packet()
print(testPacketBytes)
print(dupPacketBytes)
| true | true |
f71bc5652835fe64720685b82cd43fc0850879a0 | 1,794 | py | Python | tools/ncbi_blast_plus/check_no_duplicates.py | globusgenomics/galaxy | 7caf74d9700057587b3e3434c64e82c5b16540f1 | [
"CC-BY-3.0"
] | 1 | 2021-02-05T13:19:58.000Z | 2021-02-05T13:19:58.000Z | tools/ncbi_blast_plus/check_no_duplicates.py | globusgenomics/galaxy | 7caf74d9700057587b3e3434c64e82c5b16540f1 | [
"CC-BY-3.0"
] | null | null | null | tools/ncbi_blast_plus/check_no_duplicates.py | globusgenomics/galaxy | 7caf74d9700057587b3e3434c64e82c5b16540f1 | [
"CC-BY-3.0"
] | null | null | null | #!/usr/bin/env python
"""Check for duplicate sequence identifiers in FASTA files.
This is run as a pre-check before makeblastdb, in order to avoid
a regression bug in BLAST+ 2.2.28 which fails to catch this. See:
http://blastedbio.blogspot.co.uk/2012/10/my-ids-not-good-enough-for-ncbi-blast.html
This script takes one or more FASTA filenames as input, and
will return a non-zero error if any duplicate identifiers
are found.
"""
import gzip
import os
import sys
if "-v" in sys.argv or "--version" in sys.argv:
print("v0.0.23")
sys.exit(0)
identifiers = set()
files = 0
for filename in sys.argv[1:]:
if not os.path.isfile(filename):
sys.stderr.write("Missing FASTA file %r\n" % filename)
sys.exit(2)
files += 1
with open(filename, "rb") as binary_handle:
magic = binary_handle.read(2)
if not magic:
# Empty file, special case
continue
elif magic == b'\x1f\x8b':
# Gzipped
handle = gzip.open(filename, "rt")
elif magic[0:1] == b">":
# Not gzipped, shoudl be plain FASTA
handle = open(filename, "r")
for line in handle:
if line.startswith(">"):
# The split will also take care of the new line character,
# e.g. ">test\n" and ">test description here\n" both give "test"
seq_id = line[1:].split(None, 1)[0]
if seq_id in identifiers:
handle.close()
sys.exit("Repeated identifiers, e.g. %r" % seq_id)
identifiers.add(seq_id)
handle.close()
if not files:
sys.stderr.write("No FASTA files given to check for duplicates\n")
sys.exit(3)
elif files == 1:
print("%i sequences" % len(identifiers))
else:
print("%i sequences in %i FASTA files" % (len(identifiers), files))
| 30.931034 | 83 | 0.627648 |
import gzip
import os
import sys
if "-v" in sys.argv or "--version" in sys.argv:
print("v0.0.23")
sys.exit(0)
identifiers = set()
files = 0
for filename in sys.argv[1:]:
if not os.path.isfile(filename):
sys.stderr.write("Missing FASTA file %r\n" % filename)
sys.exit(2)
files += 1
with open(filename, "rb") as binary_handle:
magic = binary_handle.read(2)
if not magic:
continue
elif magic == b'\x1f\x8b':
handle = gzip.open(filename, "rt")
elif magic[0:1] == b">":
handle = open(filename, "r")
for line in handle:
if line.startswith(">"):
seq_id = line[1:].split(None, 1)[0]
if seq_id in identifiers:
handle.close()
sys.exit("Repeated identifiers, e.g. %r" % seq_id)
identifiers.add(seq_id)
handle.close()
if not files:
sys.stderr.write("No FASTA files given to check for duplicates\n")
sys.exit(3)
elif files == 1:
print("%i sequences" % len(identifiers))
else:
print("%i sequences in %i FASTA files" % (len(identifiers), files))
| true | true |
f71bc619e7d4702a0d959637f7bce8d52e79debf | 3,636 | py | Python | LaserCommandConstants.py | jaredly/meerk40t | 446427e29104cb89fd2ee17ad824fc801d44afe0 | [
"MIT"
] | null | null | null | LaserCommandConstants.py | jaredly/meerk40t | 446427e29104cb89fd2ee17ad824fc801d44afe0 | [
"MIT"
] | null | null | null | LaserCommandConstants.py | jaredly/meerk40t | 446427e29104cb89fd2ee17ad824fc801d44afe0 | [
"MIT"
] | null | null | null | """
Laser Commands are a middle language of commands for spooling and interpreting.
NOTE: Never use the integer value, only the command name. The integer values are
permitted to change.
COMMAND_PLOT: takes a plot object to generate simple plot commands.
COMMAND_RASTER: takes a raster plot object which generates simple raster commands.
Simple plot values are x, y, on. Where x and y are the position in absolute values and on is whether the laser fires
for that particular move command. The plot is expected to use svgelements code, passed to zinglplotter code.
The raster is expected to used RasterBuilder which should be able to plot any raster in any fashion.
A COMMAND_RESUME would have to be issued in realtime since in a paused state the commands are not processed.
"""
COMMAND_LASER_OFF = 1 # Turns laser off
COMMAND_LASER_ON = 2 # Turns laser on
COMMAND_LASER_DISABLE = 5 # Disables the laser
COMMAND_LASER_ENABLE = 6 # Enables the laser
COMMAND_MOVE = 10 # Performs a line move
COMMAND_CUT = 11 # Performs a line cut.
COMMAND_WAIT = 20 # Pauses the given time in seconds. (floats accepted).
COMMAND_WAIT_FINISH = 21 # WAIT until the buffer is finished.
COMMAND_MODE_RAPID = 50
COMMAND_MODE_PROGRAM = 51
COMMAND_MODE_FINISHED = 52
COMMAND_PLOT = 100 # Takes a plot object
COMMAND_RASTER = 101 # Takes a raster plot object.
COMMAND_SET_SPEED = 200 # sets the speed for the device
COMMAND_SET_POWER = 201 # sets the power. Out of 1000. Unknown power method.
COMMAND_SET_PPI = 203 # sets the PPI power. Out of 1000.
COMMAND_SET_PWM = 203 # sets the PWM power. Out of 1000.
COMMAND_SET_STEP = 205 # sets the raster step for the device
COMMAND_SET_DIRECTION = 209 # sets the directions for the device.
COMMAND_SET_OVERSCAN = 206
COMMAND_SET_D_RATIO = 207 # sets the diagonal_ratio for the device
COMMAND_SET_ACCELERATION = 208 # sets the acceleration for the device 1-4
COMMAND_SET_INCREMENTAL = 210 # sets the commands to be relative to current position
COMMAND_SET_ABSOLUTE = 211 # sets the commands to be absolute positions.
COMMAND_SET_POSITION = 220 # Without moving sets the current position to the given coord.
COMMAND_HOME = 300 # Homes the device
COMMAND_LOCK = 301 # Locks the rail
COMMAND_UNLOCK = 302 # Unlocks the rail.
COMMAND_BEEP = 320 # Beep.
COMMAND_FUNCTION = 350 # Execute the function given by this command. Blocking.
COMMAND_SIGNAL = 360 # Sends the signal, given: "signal_name", operands.
REALTIME_RESET = 1000 # Resets the state, purges buffers
REALTIME_PAUSE = 1010 # Issue a pause command.
REALTIME_RESUME = 1020 # Issue a resume command.
REALTIME_STATUS = 1030 # Issue a status command.
REALTIME_SAFETY_DOOR = 1040 # Issues a forced safety_door state.
REALTIME_JOG_CANCEL = 1050 # Issues a jog cancel. This should cancel any jogging being processed.
REALTIME_SPEED_PERCENT = 1060 # Set the speed to this percent value of total.
REALTIME_RAPID_PERCENT = 1070 # Sets the rapid speed to this percent value of total.
REALTIME_POWER_PERCENT = 1080 # Sets the power to this percent value of total.
REALTIME_SPEED = 1061 # Set the speed to this percent value of total.
REALTIME_RAPID = 1071 # Sets the rapid speed to this percent value of total.
REALTIME_POWER = 1081 # Sets the power to this percent value of total.
REALTIME_OVERSCAN = 1091 # Sets the overscan amount to this value.
REALTIME_LASER_DISABLE = 1100 # Disables the laser.
REALTIME_LASER_ENABLE = 1101 # Enables the laser.
REALTIME_FLOOD_COOLANT = 1210 # Toggle flood coolant
REALTIME_MIST_COOLANT = 1220 # Toggle mist coolant.
| 52.695652 | 117 | 0.764851 |
COMMAND_LASER_OFF = 1
COMMAND_LASER_ON = 2
COMMAND_LASER_DISABLE = 5
COMMAND_LASER_ENABLE = 6
COMMAND_MOVE = 10
COMMAND_CUT = 11
COMMAND_WAIT = 20
COMMAND_WAIT_FINISH = 21
COMMAND_MODE_RAPID = 50
COMMAND_MODE_PROGRAM = 51
COMMAND_MODE_FINISHED = 52
COMMAND_PLOT = 100
COMMAND_RASTER = 101
COMMAND_SET_SPEED = 200
COMMAND_SET_POWER = 201
COMMAND_SET_PPI = 203
COMMAND_SET_PWM = 203
COMMAND_SET_STEP = 205
COMMAND_SET_DIRECTION = 209
COMMAND_SET_OVERSCAN = 206
COMMAND_SET_D_RATIO = 207
COMMAND_SET_ACCELERATION = 208
COMMAND_SET_INCREMENTAL = 210
COMMAND_SET_ABSOLUTE = 211
COMMAND_SET_POSITION = 220
COMMAND_HOME = 300
COMMAND_LOCK = 301
COMMAND_UNLOCK = 302
COMMAND_BEEP = 320
COMMAND_FUNCTION = 350
COMMAND_SIGNAL = 360
REALTIME_RESET = 1000
REALTIME_PAUSE = 1010
REALTIME_RESUME = 1020
REALTIME_STATUS = 1030
REALTIME_SAFETY_DOOR = 1040
REALTIME_JOG_CANCEL = 1050
REALTIME_SPEED_PERCENT = 1060
REALTIME_RAPID_PERCENT = 1070
REALTIME_POWER_PERCENT = 1080
REALTIME_SPEED = 1061
REALTIME_RAPID = 1071
REALTIME_POWER = 1081
REALTIME_OVERSCAN = 1091
REALTIME_LASER_DISABLE = 1100
REALTIME_LASER_ENABLE = 1101
REALTIME_FLOOD_COOLANT = 1210
REALTIME_MIST_COOLANT = 1220
| true | true |
f71bc67e5ca5a22411122ca46bcf374816eb0293 | 2,950 | py | Python | alipay/aop/api/domain/AlipayFundAuthOperationCancelModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayFundAuthOperationCancelModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayFundAuthOperationCancelModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class AlipayFundAuthOperationCancelModel(object):
def __init__(self):
self._auth_no = None
self._operation_id = None
self._out_order_no = None
self._out_request_no = None
self._remark = None
@property
def auth_no(self):
return self._auth_no
@auth_no.setter
def auth_no(self, value):
self._auth_no = value
@property
def operation_id(self):
return self._operation_id
@operation_id.setter
def operation_id(self, value):
self._operation_id = value
@property
def out_order_no(self):
return self._out_order_no
@out_order_no.setter
def out_order_no(self, value):
self._out_order_no = value
@property
def out_request_no(self):
return self._out_request_no
@out_request_no.setter
def out_request_no(self, value):
self._out_request_no = value
@property
def remark(self):
return self._remark
@remark.setter
def remark(self, value):
self._remark = value
def to_alipay_dict(self):
params = dict()
if self.auth_no:
if hasattr(self.auth_no, 'to_alipay_dict'):
params['auth_no'] = self.auth_no.to_alipay_dict()
else:
params['auth_no'] = self.auth_no
if self.operation_id:
if hasattr(self.operation_id, 'to_alipay_dict'):
params['operation_id'] = self.operation_id.to_alipay_dict()
else:
params['operation_id'] = self.operation_id
if self.out_order_no:
if hasattr(self.out_order_no, 'to_alipay_dict'):
params['out_order_no'] = self.out_order_no.to_alipay_dict()
else:
params['out_order_no'] = self.out_order_no
if self.out_request_no:
if hasattr(self.out_request_no, 'to_alipay_dict'):
params['out_request_no'] = self.out_request_no.to_alipay_dict()
else:
params['out_request_no'] = self.out_request_no
if self.remark:
if hasattr(self.remark, 'to_alipay_dict'):
params['remark'] = self.remark.to_alipay_dict()
else:
params['remark'] = self.remark
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayFundAuthOperationCancelModel()
if 'auth_no' in d:
o.auth_no = d['auth_no']
if 'operation_id' in d:
o.operation_id = d['operation_id']
if 'out_order_no' in d:
o.out_order_no = d['out_order_no']
if 'out_request_no' in d:
o.out_request_no = d['out_request_no']
if 'remark' in d:
o.remark = d['remark']
return o
| 29.207921 | 79 | 0.594576 |
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class AlipayFundAuthOperationCancelModel(object):
def __init__(self):
self._auth_no = None
self._operation_id = None
self._out_order_no = None
self._out_request_no = None
self._remark = None
@property
def auth_no(self):
return self._auth_no
@auth_no.setter
def auth_no(self, value):
self._auth_no = value
@property
def operation_id(self):
return self._operation_id
@operation_id.setter
def operation_id(self, value):
self._operation_id = value
@property
def out_order_no(self):
return self._out_order_no
@out_order_no.setter
def out_order_no(self, value):
self._out_order_no = value
@property
def out_request_no(self):
return self._out_request_no
@out_request_no.setter
def out_request_no(self, value):
self._out_request_no = value
@property
def remark(self):
return self._remark
@remark.setter
def remark(self, value):
self._remark = value
def to_alipay_dict(self):
params = dict()
if self.auth_no:
if hasattr(self.auth_no, 'to_alipay_dict'):
params['auth_no'] = self.auth_no.to_alipay_dict()
else:
params['auth_no'] = self.auth_no
if self.operation_id:
if hasattr(self.operation_id, 'to_alipay_dict'):
params['operation_id'] = self.operation_id.to_alipay_dict()
else:
params['operation_id'] = self.operation_id
if self.out_order_no:
if hasattr(self.out_order_no, 'to_alipay_dict'):
params['out_order_no'] = self.out_order_no.to_alipay_dict()
else:
params['out_order_no'] = self.out_order_no
if self.out_request_no:
if hasattr(self.out_request_no, 'to_alipay_dict'):
params['out_request_no'] = self.out_request_no.to_alipay_dict()
else:
params['out_request_no'] = self.out_request_no
if self.remark:
if hasattr(self.remark, 'to_alipay_dict'):
params['remark'] = self.remark.to_alipay_dict()
else:
params['remark'] = self.remark
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayFundAuthOperationCancelModel()
if 'auth_no' in d:
o.auth_no = d['auth_no']
if 'operation_id' in d:
o.operation_id = d['operation_id']
if 'out_order_no' in d:
o.out_order_no = d['out_order_no']
if 'out_request_no' in d:
o.out_request_no = d['out_request_no']
if 'remark' in d:
o.remark = d['remark']
return o
| true | true |
f71bc6ac3785acf1ddb69695bfccc9bb6edc5daf | 155 | py | Python | pangram/pangram.py | pierrebeaucamp/Exercism-Python | 910b764c6726e9f131fb3a394c70d9b5bb167be9 | [
"Unlicense"
] | null | null | null | pangram/pangram.py | pierrebeaucamp/Exercism-Python | 910b764c6726e9f131fb3a394c70d9b5bb167be9 | [
"Unlicense"
] | null | null | null | pangram/pangram.py | pierrebeaucamp/Exercism-Python | 910b764c6726e9f131fb3a394c70d9b5bb167be9 | [
"Unlicense"
] | null | null | null | import string
import regex
def is_pangram(i):
l = sorted(list(set(regex.sub(r'[^a-z]', "", i.lower()))))
return l == list(string.ascii_lowercase)
| 22.142857 | 62 | 0.645161 | import string
import regex
def is_pangram(i):
l = sorted(list(set(regex.sub(r'[^a-z]', "", i.lower()))))
return l == list(string.ascii_lowercase)
| true | true |
f71bc6f28b8786902b54ebafec07bd4a22b0c3b6 | 763 | py | Python | my_blog/my_blog/apps/blogs/migrations/0002_articlecomment.py | TNTfeizai/my_Blog | 71c3720a27fab49a0d25d21029bdeb5f68e4965b | [
"MIT"
] | null | null | null | my_blog/my_blog/apps/blogs/migrations/0002_articlecomment.py | TNTfeizai/my_Blog | 71c3720a27fab49a0d25d21029bdeb5f68e4965b | [
"MIT"
] | 7 | 2020-06-06T00:53:43.000Z | 2022-03-12T00:19:03.000Z | my_blog/my_blog/apps/blogs/migrations/0002_articlecomment.py | TNTfeizai/my_Blog | 71c3720a27fab49a0d25d21029bdeb5f68e4965b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2020-03-13 02:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blogs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArticleComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('username', models.CharField(max_length=50)),
('createtime', models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间')),
],
),
]
| 29.346154 | 114 | 0.605505 |
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blogs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ArticleComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('username', models.CharField(max_length=50)),
('createtime', models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间')),
],
),
]
| true | true |
f71bc746826b79bd70dea70d0dd90cfcbd116ff4 | 2,525 | py | Python | confirm.py | Pckool/GCG | cee786d04ea30f3995e910bca82635f442b2a6a8 | [
"MIT"
] | null | null | null | confirm.py | Pckool/GCG | cee786d04ea30f3995e910bca82635f442b2a6a8 | [
"MIT"
] | null | null | null | confirm.py | Pckool/GCG | cee786d04ea30f3995e910bca82635f442b2a6a8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'confirm.ui'
#
# Created by: PyQt5 UI code generator 5.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 120)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
Dialog.setMinimumSize(QtCore.QSize(400, 120))
Dialog.setMaximumSize(QtCore.QSize(400, 150))
font = QtGui.QFont()
font.setFamily("Gadugi")
Dialog.setFont(font)
self.verticalLayoutWidget = QtWidgets.QWidget(Dialog)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 381, 101))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetMinimumSize)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Gadugi")
font.setPointSize(12)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.buttonBox = QtWidgets.QDialogButtonBox(self.verticalLayoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Yes)
self.buttonBox.setCenterButtons(True)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label.setText(_translate("Dialog", "-"))
| 44.298246 | 122 | 0.717624 |
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 120)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
Dialog.setMinimumSize(QtCore.QSize(400, 120))
Dialog.setMaximumSize(QtCore.QSize(400, 150))
font = QtGui.QFont()
font.setFamily("Gadugi")
Dialog.setFont(font)
self.verticalLayoutWidget = QtWidgets.QWidget(Dialog)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 381, 101))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetMinimumSize)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.verticalLayoutWidget)
font = QtGui.QFont()
font.setFamily("Gadugi")
font.setPointSize(12)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.buttonBox = QtWidgets.QDialogButtonBox(self.verticalLayoutWidget)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Yes)
self.buttonBox.setCenterButtons(True)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label.setText(_translate("Dialog", "-"))
| true | true |
f71bc76ba618eb3ac748680ddd30d14592adde8c | 3,556 | py | Python | src/bindings/python/DocStrings/CDLTransform.py | jmertic/OpenColorIO | 9b18fd69f981288a6a3640e283b8d9968a15423e | [
"BSD-3-Clause"
] | 1 | 2019-11-18T21:49:25.000Z | 2019-11-18T21:49:25.000Z | src/bindings/python/DocStrings/CDLTransform.py | KevinJW/OpenColorIO | 412aa7ba273616867e607de646e4975791198812 | [
"BSD-3-Clause"
] | 1 | 2020-06-12T19:10:09.000Z | 2020-06-12T19:10:09.000Z | src/bindings/python/DocStrings/CDLTransform.py | KevinJW/OpenColorIO | 412aa7ba273616867e607de646e4975791198812 | [
"BSD-3-Clause"
] | null | null | null | # SPDX-License-Identifier: BSD-3-Clause
# Copyright Contributors to the OpenColorIO Project.
class CDLTransform:
"""
CDLTransform
"""
def __init__(self):
pass
def equals(self, cdl):
"""
equals(cdl)
:param cdl: a cdl transform
:type cdl: :py:class:`PyOpenColorIO.CDLTransform`
Returns True if cdl is equal to :py:class:`PyOpenColorIO.CDLTransform`.
"""
pass
def validate(self):
"""
validate()
Throw if :py:class:`PyOpenColorIO.CDLTransform` contains illegal parameters.
"""
pass
def CreateFromFile(self, src, cccid):
pass
def getXML(self):
pass
def setXML(self, xmltext):
pass
def getSlope(self):
pass
def getOffset(self):
pass
def getPower(self):
pass
def getSOP(self):
pass
def getSat(self):
pass
def setSlope(self, slope):
"""
setSlope(pyData)
Sets the slope ('S' part of SOP) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData:
:type pyData: object
"""
pass
def setOffset(self, offset):
"""
setOffset(pyData)
Sets the offset ('O' part of SOP) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: list of three floats
:type pyData: object
"""
pass
def setPower(self, power):
"""
setPower(pyData)
Sets the power ('P' part of SOP) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: list of three floats
:type pyData: object
"""
pass
def setSOP(self, sop):
"""
setSOP(pyData)
Sets SOP in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: list of nine floats
:type pyData: object
"""
pass
def setSat(self, sat):
"""
setSAT(pyData)
Sets SAT (saturation) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: saturation
:type pyData: float
"""
pass
def getSatLumaCoefs(self):
"""
getSatLumaCoefs(pyData)
Returns the SAT (saturation) and luma coefficients in :py:class:`PyOpenColorIO.CDLTransform`.
:return: saturation and luma coefficients
:rtype: list of floats
"""
pass
def getID(self):
"""
getID()
Returns the ID from :py:class:`PyOpenColorIO.CDLTransform`.
:return: ID
:rtype: string
"""
pass
def setID(self, id):
"""
setID(str)
Sets the ID in :py:class:`PyOpenColorIO.CDLTransform`.
:param str: ID
:type str: string
"""
pass
def getDescription(self):
"""
getDescription()
Returns the description of :py:class:`PyOpenColorIO.CDLTransform`.
:return: description
:rtype: string
"""
pass
def setDescription(self, desc):
"""
setDescription(str)
Sets the description of :py:class:`PyOpenColorIO.CDLTransform`.
:param str: description
:type str: string
"""
pass
| 21.815951 | 101 | 0.497469 |
class CDLTransform:
def __init__(self):
pass
def equals(self, cdl):
pass
def validate(self):
pass
def CreateFromFile(self, src, cccid):
pass
def getXML(self):
pass
def setXML(self, xmltext):
pass
def getSlope(self):
pass
def getOffset(self):
pass
def getPower(self):
pass
def getSOP(self):
pass
def getSat(self):
pass
def setSlope(self, slope):
pass
def setOffset(self, offset):
pass
def setPower(self, power):
pass
def setSOP(self, sop):
pass
def setSat(self, sat):
pass
def getSatLumaCoefs(self):
pass
def getID(self):
pass
def setID(self, id):
pass
def getDescription(self):
pass
def setDescription(self, desc):
pass
| true | true |
f71bc81b29ea3c83e679fc75c90333cfc9ee5e00 | 3,351 | py | Python | django_jinja/base.py | akx/django-jinja | 2f1c0a74990d6d564350079692a8307c8c6f5b9f | [
"BSD-3-Clause"
] | 210 | 2015-05-21T16:54:05.000Z | 2022-01-06T01:24:52.000Z | django_jinja/base.py | akx/django-jinja | 2f1c0a74990d6d564350079692a8307c8c6f5b9f | [
"BSD-3-Clause"
] | 139 | 2015-05-15T11:01:03.000Z | 2022-03-29T21:13:04.000Z | django_jinja/base.py | akx/django-jinja | 2f1c0a74990d6d564350079692a8307c8c6f5b9f | [
"BSD-3-Clause"
] | 84 | 2015-05-15T09:35:22.000Z | 2021-09-03T13:14:44.000Z | import re
import os.path as path
from importlib import import_module
from django.template.context import BaseContext
def dict_from_context(context):
"""
Converts context to native python dict.
"""
if isinstance(context, BaseContext):
new_dict = {}
for i in reversed(list(context)):
new_dict.update(dict_from_context(i))
return new_dict
return dict(context)
def _iter_templatetags_modules_list():
"""
Get list of modules that contains templatetags
submodule.
"""
from django.apps import apps
all_modules = [x.name for x in apps.get_app_configs()]
for app_path in all_modules:
try:
mod = import_module(app_path + ".templatetags")
# Empty folders can lead to unexpected behavior with Python 3.
# We make sure to have the `__file__` attribute.
if getattr(mod, '__file__', None) is not None:
yield (app_path, path.dirname(mod.__file__))
except ImportError:
pass
def patch_django_for_autoescape():
"""
Patch django modules for make them compatible with
jinja autoescape implementation.
"""
from django.utils import safestring
from django.forms.boundfield import BoundField
from django.forms.utils import ErrorList
from django.forms.utils import ErrorDict
if hasattr(safestring, "SafeText"):
if not hasattr(safestring.SafeText, "__html__"):
safestring.SafeText.__html__ = lambda self: str(self)
if hasattr(safestring, "SafeString"):
if not hasattr(safestring.SafeString, "__html__"):
safestring.SafeString.__html__ = lambda self: str(self)
if hasattr(safestring, "SafeUnicode"):
if not hasattr(safestring.SafeUnicode, "__html__"):
safestring.SafeUnicode.__html__ = lambda self: str(self)
if hasattr(safestring, "SafeBytes"):
if not hasattr(safestring.SafeBytes, "__html__"):
safestring.SafeBytes.__html__ = lambda self: str(self)
if not hasattr(BoundField, "__html__"):
BoundField.__html__ = lambda self: str(self)
if not hasattr(ErrorList, "__html__"):
ErrorList.__html__ = lambda self: str(self)
if not hasattr(ErrorDict, "__html__"):
ErrorDict.__html__ = lambda self: str(self)
def get_match_extension(using=None):
"""
Gets the extension that the template loader will match for
django-jinja. This returns Jinja2.match_extension.
The "using" parameter selects with Jinja2 backend to use if
you have multiple ones configured in settings.TEMPLATES.
If it is None and only one Jinja2 backend is defined then it
will use that, otherwise an ImproperlyConfigured exception
is thrown.
"""
from .backend import Jinja2
from django.template import engines
if using is None:
engine = Jinja2.get_default()
else:
engine = engines[using]
return engine.match_extension
def match_template(template_name, extension, regex):
if extension:
matches_extension = template_name.endswith(extension)
if regex:
return matches_extension and re.match(regex, template_name)
else:
return matches_extension
elif regex:
return re.match(regex, template_name)
else:
return True
| 30.463636 | 74 | 0.675321 | import re
import os.path as path
from importlib import import_module
from django.template.context import BaseContext
def dict_from_context(context):
if isinstance(context, BaseContext):
new_dict = {}
for i in reversed(list(context)):
new_dict.update(dict_from_context(i))
return new_dict
return dict(context)
def _iter_templatetags_modules_list():
from django.apps import apps
all_modules = [x.name for x in apps.get_app_configs()]
for app_path in all_modules:
try:
mod = import_module(app_path + ".templatetags")
if getattr(mod, '__file__', None) is not None:
yield (app_path, path.dirname(mod.__file__))
except ImportError:
pass
def patch_django_for_autoescape():
from django.utils import safestring
from django.forms.boundfield import BoundField
from django.forms.utils import ErrorList
from django.forms.utils import ErrorDict
if hasattr(safestring, "SafeText"):
if not hasattr(safestring.SafeText, "__html__"):
safestring.SafeText.__html__ = lambda self: str(self)
if hasattr(safestring, "SafeString"):
if not hasattr(safestring.SafeString, "__html__"):
safestring.SafeString.__html__ = lambda self: str(self)
if hasattr(safestring, "SafeUnicode"):
if not hasattr(safestring.SafeUnicode, "__html__"):
safestring.SafeUnicode.__html__ = lambda self: str(self)
if hasattr(safestring, "SafeBytes"):
if not hasattr(safestring.SafeBytes, "__html__"):
safestring.SafeBytes.__html__ = lambda self: str(self)
if not hasattr(BoundField, "__html__"):
BoundField.__html__ = lambda self: str(self)
if not hasattr(ErrorList, "__html__"):
ErrorList.__html__ = lambda self: str(self)
if not hasattr(ErrorDict, "__html__"):
ErrorDict.__html__ = lambda self: str(self)
def get_match_extension(using=None):
from .backend import Jinja2
from django.template import engines
if using is None:
engine = Jinja2.get_default()
else:
engine = engines[using]
return engine.match_extension
def match_template(template_name, extension, regex):
if extension:
matches_extension = template_name.endswith(extension)
if regex:
return matches_extension and re.match(regex, template_name)
else:
return matches_extension
elif regex:
return re.match(regex, template_name)
else:
return True
| true | true |
f71bc829a4dec68143103f059f2183f57ed5bcb0 | 903 | py | Python | beacontools/structs/estimote.py | nasa-watchdog/beacontools-ucsb | 37e60b14f73935501c8edc3277917a6a40bcfdba | [
"MIT"
] | null | null | null | beacontools/structs/estimote.py | nasa-watchdog/beacontools-ucsb | 37e60b14f73935501c8edc3277917a6a40bcfdba | [
"MIT"
] | 1 | 2018-12-08T01:43:02.000Z | 2018-12-08T01:43:02.000Z | beacontools/structs/estimote.py | nasa-watchdog/beacontools-ucsb | 37e60b14f73935501c8edc3277917a6a40bcfdba | [
"MIT"
] | null | null | null | """All low level structures used for parsing Estimote packets."""
from construct import Struct, Byte, Switch, Int8sl, Array, Int8ul
from ..const import ESTIMOTE_TELEMETRY_SUBFRAME_A, ESTIMOTE_TELEMETRY_SUBFRAME_B
# pylint: disable=invalid-name
EstimoteTelemetrySubFrameA = Struct(
"acceleration" / Array(3, Int8sl),
"previous_motion" / Byte,
"current_motion" / Byte,
"combined_fields" / Array(5, Byte),
)
EstimoteTelemetrySubFrameB = Struct(
"magnetic_field" / Array(3, Int8sl),
"ambient_light" / Int8ul,
"combined_fields" / Array(5, Byte),
"battery_level" / Int8ul,
)
EstimoteTelemetryFrame = Struct(
"identifier" / Array(8, Byte),
"subframe_type" / Byte,
"sub_frame" / Switch(lambda ctx: ctx.subframe_type, {
ESTIMOTE_TELEMETRY_SUBFRAME_A: EstimoteTelemetrySubFrameA,
ESTIMOTE_TELEMETRY_SUBFRAME_B: EstimoteTelemetrySubFrameB,
})
)
| 30.1 | 80 | 0.723145 | from construct import Struct, Byte, Switch, Int8sl, Array, Int8ul
from ..const import ESTIMOTE_TELEMETRY_SUBFRAME_A, ESTIMOTE_TELEMETRY_SUBFRAME_B
EstimoteTelemetrySubFrameA = Struct(
"acceleration" / Array(3, Int8sl),
"previous_motion" / Byte,
"current_motion" / Byte,
"combined_fields" / Array(5, Byte),
)
EstimoteTelemetrySubFrameB = Struct(
"magnetic_field" / Array(3, Int8sl),
"ambient_light" / Int8ul,
"combined_fields" / Array(5, Byte),
"battery_level" / Int8ul,
)
EstimoteTelemetryFrame = Struct(
"identifier" / Array(8, Byte),
"subframe_type" / Byte,
"sub_frame" / Switch(lambda ctx: ctx.subframe_type, {
ESTIMOTE_TELEMETRY_SUBFRAME_A: EstimoteTelemetrySubFrameA,
ESTIMOTE_TELEMETRY_SUBFRAME_B: EstimoteTelemetrySubFrameB,
})
)
| true | true |
f71bc88e1e773fe6f8bdbbed540d8fa994959788 | 1,394 | py | Python | setup.py | michaelimfeld/private-telegram-bot | f5a9cec3c430d46bab3f1e57faa4d62013f93fd1 | [
"MIT"
] | 37 | 2016-05-02T18:50:55.000Z | 2021-09-21T17:53:44.000Z | setup.py | michaelimfeld/private-telegram-bot | f5a9cec3c430d46bab3f1e57faa4d62013f93fd1 | [
"MIT"
] | 12 | 2016-04-23T21:58:42.000Z | 2020-03-25T18:31:29.000Z | setup.py | michaelimfeld/private-telegram-bot | f5a9cec3c430d46bab3f1e57faa4d62013f93fd1 | [
"MIT"
] | 8 | 2016-05-09T17:37:21.000Z | 2021-09-21T17:53:45.000Z | # -*- coding: utf-8 -*-
# pylint: disable=missing-docstring
from setuptools import setup
setup(
name="ownbot",
version="0.0.4",
license="MIT",
description="Python module to create private telegram bots.",
author="Michael Imfeld",
author_email="michaelimfeld@crooked.ch",
maintainer="Michael Imfeld",
maintainer_email="michaelimfeld@crooked.ch",
platforms=["Linux", "Windows", "MAC OS X"],
url="https://github.com/michaelimfeld/ownbot",
download_url="https://github.com/michaelimfeld/ownbot",
packages=["ownbot"],
package_data={"": ["*.md"]},
install_requires=[
"python-telegram-bot",
"PyYAML"
],
include_package_data=True,
keywords=[
"ownbot", "python",
"telegram", "bot"
],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Topic :: Education :: Testing",
"Topic :: Software Development",
]
)
| 30.977778 | 65 | 0.60043 |
from setuptools import setup
setup(
name="ownbot",
version="0.0.4",
license="MIT",
description="Python module to create private telegram bots.",
author="Michael Imfeld",
author_email="michaelimfeld@crooked.ch",
maintainer="Michael Imfeld",
maintainer_email="michaelimfeld@crooked.ch",
platforms=["Linux", "Windows", "MAC OS X"],
url="https://github.com/michaelimfeld/ownbot",
download_url="https://github.com/michaelimfeld/ownbot",
packages=["ownbot"],
package_data={"": ["*.md"]},
install_requires=[
"python-telegram-bot",
"PyYAML"
],
include_package_data=True,
keywords=[
"ownbot", "python",
"telegram", "bot"
],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Topic :: Education :: Testing",
"Topic :: Software Development",
]
)
| true | true |
f71bc8aff2afd70932bd36b53d5ab0c39172c419 | 2,379 | py | Python | hw1/run_expert.py | Sebastianvarv/rl-homework | b7526ac3c86cbaae6b796856c31fc4c671a32663 | [
"MIT"
] | 1 | 2018-12-06T18:16:58.000Z | 2018-12-06T18:16:58.000Z | hw1/run_expert.py | Sebastianvarv/rl-homework | b7526ac3c86cbaae6b796856c31fc4c671a32663 | [
"MIT"
] | null | null | null | hw1/run_expert.py | Sebastianvarv/rl-homework | b7526ac3c86cbaae6b796856c31fc4c671a32663 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Code to load an expert policy and generate roll-out data for behavioral cloning.
Example usage:
python run_expert.py experts/Humanoid-v1.pkl Humanoid-v1 --render \
--num_rollouts 20
Author of this script and included expert policies: Jonathan Ho (hoj@openai.com)
"""
import os
import pickle
import tensorflow as tf
import numpy as np
import tf_util
import gym
import load_policy
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('expert_policy_file', type=str)
parser.add_argument('envname', type=str)
parser.add_argument('--render', action='store_true')
parser.add_argument("--max_timesteps", type=int)
parser.add_argument('--num_rollouts', type=int, default=20,
help='Number of expert roll outs')
args = parser.parse_args()
print('loading and building expert policy')
policy_fn = load_policy.load_policy(args.expert_policy_file)
print('loaded and built')
with tf.Session():
tf_util.initialize()
import gym
env = gym.make(args.envname)
max_steps = args.max_timesteps or env.spec.timestep_limit
returns = []
observations = []
actions = []
for i in range(args.num_rollouts):
print('iter', i)
obs = env.reset()
done = False
totalr = 0.
steps = 0
while not done:
action = policy_fn(obs[None,:])
observations.append(obs)
actions.append(action)
obs, r, done, _ = env.step(action)
totalr += r
steps += 1
if args.render:
env.render()
if steps % 100 == 0: print("%i/%i"%(steps, max_steps))
if steps >= max_steps:
break
returns.append(totalr)
print('returns', returns)
print('mean return', np.mean(returns))
print('std of return', np.std(returns))
expert_data = {'observations': np.array(observations),
'actions': np.array(actions)}
#
# with open(os.path.join('expert_data', args.envname + '.pkl'), 'wb') as f:
# pickle.dump(expert_data, f
# , pickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
main()
| 30.5 | 83 | 0.580076 |
import os
import pickle
import tensorflow as tf
import numpy as np
import tf_util
import gym
import load_policy
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('expert_policy_file', type=str)
parser.add_argument('envname', type=str)
parser.add_argument('--render', action='store_true')
parser.add_argument("--max_timesteps", type=int)
parser.add_argument('--num_rollouts', type=int, default=20,
help='Number of expert roll outs')
args = parser.parse_args()
print('loading and building expert policy')
policy_fn = load_policy.load_policy(args.expert_policy_file)
print('loaded and built')
with tf.Session():
tf_util.initialize()
import gym
env = gym.make(args.envname)
max_steps = args.max_timesteps or env.spec.timestep_limit
returns = []
observations = []
actions = []
for i in range(args.num_rollouts):
print('iter', i)
obs = env.reset()
done = False
totalr = 0.
steps = 0
while not done:
action = policy_fn(obs[None,:])
observations.append(obs)
actions.append(action)
obs, r, done, _ = env.step(action)
totalr += r
steps += 1
if args.render:
env.render()
if steps % 100 == 0: print("%i/%i"%(steps, max_steps))
if steps >= max_steps:
break
returns.append(totalr)
print('returns', returns)
print('mean return', np.mean(returns))
print('std of return', np.std(returns))
expert_data = {'observations': np.array(observations),
'actions': np.array(actions)}
if __name__ == '__main__':
main()
| true | true |
f71bc95531a6537981172b3ab30077c9090d2668 | 669 | py | Python | Latest/venv/Lib/site-packages/envisage/unknown_extension.py | adamcvj/SatelliteTracker | 49a8f26804422fdad6f330a5548e9f283d84a55d | [
"Apache-2.0"
] | 1 | 2022-01-09T20:04:31.000Z | 2022-01-09T20:04:31.000Z | Latest/venv/Lib/site-packages/envisage/unknown_extension.py | adamcvj/SatelliteTracker | 49a8f26804422fdad6f330a5548e9f283d84a55d | [
"Apache-2.0"
] | 1 | 2022-02-15T12:01:57.000Z | 2022-03-24T19:48:47.000Z | Latest/venv/Lib/site-packages/envisage/unknown_extension.py | adamcvj/SatelliteTracker | 49a8f26804422fdad6f330a5548e9f283d84a55d | [
"Apache-2.0"
] | null | null | null | # (C) Copyright 2007-2019 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
""" The exception raised when an unknown extension is referenced. """
class UnknownExtension(Exception):
""" The exception raised when an unknown extension is referenced. """
#### EOF ######################################################################
| 39.352941 | 79 | 0.678625 |
class UnknownExtension(Exception):
| true | true |
f71bc9ee1184e124fd8b7941320776f895fa5014 | 88,529 | py | Python | clearml/storage/helper.py | mmiller-max/clearml | fd2d6c6f5d46cad3e406e88eeb4d805455b5b3d8 | [
"Apache-2.0"
] | null | null | null | clearml/storage/helper.py | mmiller-max/clearml | fd2d6c6f5d46cad3e406e88eeb4d805455b5b3d8 | [
"Apache-2.0"
] | null | null | null | clearml/storage/helper.py | mmiller-max/clearml | fd2d6c6f5d46cad3e406e88eeb4d805455b5b3d8 | [
"Apache-2.0"
] | null | null | null | from __future__ import with_statement
import errno
import getpass
import itertools
import json
import os
import shutil
import sys
import threading
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from concurrent.futures import ThreadPoolExecutor
from copy import copy
from datetime import datetime
from multiprocessing.pool import ThreadPool
from tempfile import mktemp
from time import time
from types import GeneratorType
import requests
import six
from _socket import gethostname
from attr import attrs, attrib, asdict
from furl import furl
from pathlib2 import Path
from requests.exceptions import ConnectionError
from six import binary_type, StringIO
from six.moves.queue import Queue, Empty
from six.moves.urllib.parse import urlparse
from six.moves.urllib.request import url2pathname
from .callbacks import UploadProgressReport, DownloadProgressReport
from .util import quote_url
from ..backend_api.utils import get_http_session_with_retry
from ..backend_config.bucket_config import S3BucketConfigurations, GSBucketConfigurations, AzureContainerConfigurations
from ..config import config, deferred_config
from ..debugging import get_logger
from ..errors import UsageError
class StorageError(Exception):
pass
class DownloadError(Exception):
pass
@six.add_metaclass(ABCMeta)
class _Driver(object):
@classmethod
def get_logger(cls):
return get_logger('storage')
@abstractmethod
def get_container(self, container_name, config=None, **kwargs):
pass
@abstractmethod
def test_upload(self, test_path, config, **kwargs):
pass
@abstractmethod
def upload_object_via_stream(self, iterator, container, object_name, extra, **kwargs):
pass
@abstractmethod
def list_container_objects(self, container, ex_prefix, **kwargs):
pass
@abstractmethod
def get_direct_access(self, remote_path, **kwargs):
pass
@abstractmethod
def download_object(self, obj, local_path, overwrite_existing, delete_on_failure, callback, **kwargs):
pass
@abstractmethod
def download_object_as_stream(self, obj, chunk_size, **kwargs):
pass
@abstractmethod
def delete_object(self, obj, **kwargs):
pass
@abstractmethod
def upload_object(self, file_path, container, object_name, extra, **kwargs):
pass
@abstractmethod
def get_object(self, container_name, object_name, **kwargs):
pass
class StorageHelper(object):
""" Storage helper.
Used by the entire system to download/upload files.
Supports both local and remote files (currently local files, network-mapped files, HTTP/S and Amazon S3)
"""
_temp_download_suffix = '.partially'
@classmethod
def _get_logger(cls):
return get_logger('storage')
@attrs
class _PathSubstitutionRule(object):
registered_prefix = attrib(type=str)
local_prefix = attrib(type=str)
replace_windows_sep = attrib(type=bool)
replace_linux_sep = attrib(type=bool)
path_substitution_config = 'storage.path_substitution'
@classmethod
def load_list_from_config(cls):
rules_list = []
for index, sub_config in enumerate(config.get(cls.path_substitution_config, list())):
rule = cls(
registered_prefix=sub_config.get('registered_prefix', None),
local_prefix=sub_config.get('local_prefix', None),
replace_windows_sep=sub_config.get('replace_windows_sep', False),
replace_linux_sep=sub_config.get('replace_linux_sep', False),
)
if any(prefix is None for prefix in (rule.registered_prefix, rule.local_prefix)):
StorageHelper._get_logger().warning(
"Illegal substitution rule configuration '{}[{}]': {}".format(
cls.path_substitution_config,
index,
asdict(rule),
))
continue
if all((rule.replace_windows_sep, rule.replace_linux_sep)):
StorageHelper._get_logger().warning(
"Only one of replace_windows_sep and replace_linux_sep flags may be set."
"'{}[{}]': {}".format(
cls.path_substitution_config,
index,
asdict(rule),
))
continue
rules_list.append(rule)
return rules_list
class _UploadData(object):
@property
def src_path(self):
return self._src_path
@property
def dest_path(self):
return self._dest_path
@property
def extra(self):
return self._extra
@property
def callback(self):
return self._callback
@property
def retries(self):
return self._retries
def __init__(self, src_path, dest_path, extra, callback, retries):
self._src_path = src_path
self._dest_path = dest_path
self._extra = extra
self._callback = callback
self._retries = retries
def __str__(self):
return "src=%s" % self.src_path
_helpers = {} # cache of helper instances
# global terminate event for async upload threads
_terminate = threading.Event()
_async_upload_threads = set()
_upload_pool = None
# collect all bucket credentials that aren't empty (ignore entries with an empty key or secret)
_s3_configurations = deferred_config('aws.s3', {}, transform=S3BucketConfigurations.from_config)
_gs_configurations = deferred_config('google.storage', {}, transform=GSBucketConfigurations.from_config)
_azure_configurations = deferred_config('azure.storage', {}, transform=AzureContainerConfigurations.from_config)
_path_substitutions = deferred_config(transform=_PathSubstitutionRule.load_list_from_config)
@property
def log(self):
return self._log
@property
def scheme(self):
return self._scheme
@property
def secure(self):
return self._secure
@property
def base_url(self):
return self._base_url
@classmethod
def get(cls, url, logger=None, **kwargs):
"""
Get a storage helper instance for the given URL
:return: A StorageHelper instance.
"""
# Handle URL substitution etc before locating the correct storage driver
url = cls._canonize_url(url)
# Get the credentials we should use for this url
base_url = cls._resolve_base_url(url)
instance_key = '%s_%s' % (base_url, threading.current_thread().ident or 0)
force_create = kwargs.pop('__force_create', False)
if (instance_key in cls._helpers) and (not force_create):
return cls._helpers[instance_key]
# Don't canonize URL since we already did it
try:
instance = cls(base_url=base_url, url=url, logger=logger, canonize_url=False, **kwargs)
except (StorageError, UsageError) as ex:
cls._get_logger().error(str(ex))
return None
except Exception as ex:
cls._get_logger().error("Failed creating storage object {} Reason: {}".format(
base_url or url, ex))
return None
cls._helpers[instance_key] = instance
return instance
@classmethod
def get_local_copy(cls, remote_url):
"""
Download a file from remote URL to a local storage, and return path to local copy,
:param remote_url: Remote URL. Example: https://example.com/file.jpg s3://bucket/folder/file.mp4 etc.
:return: Path to local copy of the downloaded file. None if error occurred.
"""
helper = cls.get(remote_url)
if not helper:
return None
# create temp file with the requested file name
file_name = '.' + remote_url.split('/')[-1].split(os.path.sep)[-1]
local_path = mktemp(suffix=file_name)
return helper.download_to_file(remote_url, local_path)
def __init__(self, base_url, url, key=None, secret=None, region=None, verbose=False, logger=None, retries=5,
**kwargs):
level = config.get('storage.log.level', None)
if level:
try:
self._get_logger().setLevel(level)
except (TypeError, ValueError):
self._get_logger().error('invalid storage log level in configuration: %s' % level)
self._log = logger or self._get_logger()
self._verbose = verbose
self._retries = retries
self._extra = {}
self._base_url = base_url
self._secure = True
self._driver = None
self._container = None
self._conf = None
if kwargs.get('canonize_url', True):
url = self._canonize_url(url)
parsed = urlparse(url)
self._scheme = parsed.scheme
if self._scheme == _AzureBlobServiceStorageDriver.scheme:
self._conf = copy(self._azure_configurations.get_config_by_uri(url))
if self._conf is None:
raise StorageError("Missing Azure Blob Storage configuration for {}".format(url))
if not self._conf.account_name or not self._conf.account_key:
raise StorageError(
"Missing account name or key for Azure Blob Storage access for {}".format(base_url)
)
self._driver = _AzureBlobServiceStorageDriver()
self._container = self._driver.get_container(config=self._conf)
elif self._scheme == _Boto3Driver.scheme:
self._conf = copy(self._s3_configurations.get_config_by_uri(url))
self._secure = self._conf.secure
final_region = region if region else self._conf.region
if not final_region:
final_region = None
self._conf.update(
key=key or self._conf.key,
secret=secret or self._conf.secret,
multipart=self._conf.multipart,
region=final_region,
use_credentials_chain=self._conf.use_credentials_chain
)
if not self._conf.use_credentials_chain:
if not self._conf.key or not self._conf.secret:
raise ValueError(
"Missing key and secret for S3 storage access (%s)" % base_url
)
self._driver = _Boto3Driver()
self._container = self._driver.get_container(container_name=self._base_url, retries=retries,
config=self._conf)
elif self._scheme == _GoogleCloudStorageDriver.scheme:
self._conf = copy(self._gs_configurations.get_config_by_uri(url))
self._driver = _GoogleCloudStorageDriver()
self._container = self._driver.get_container(
container_name=self._base_url,
config=self._conf
)
elif self._scheme in _HttpDriver.schemes:
self._driver = _HttpDriver(retries=retries)
self._container = self._driver.get_container(container_name=self._base_url)
else: # elif self._scheme == 'file':
# if this is not a known scheme assume local file
# If the scheme is file, use only the path segment, If not, use the entire URL
if self._scheme == 'file':
url = parsed.path
url = url.replace("\\", "/")
# url2pathname is specifically intended to operate on (urlparse result).path
# and returns a cross-platform compatible result
driver_uri = url2pathname(url)
path_driver_uri = Path(driver_uri)
# if path_driver_uri.is_file():
# driver_uri = str(path_driver_uri.parent)
# elif not path_driver_uri.exists():
# # assume a folder and create
# # Path(driver_uri).mkdir(parents=True, exist_ok=True)
# pass
self._driver = _FileStorageDriver(str(path_driver_uri.root))
self._container = None
@classmethod
def terminate_uploads(cls, force=True, timeout=2.0):
if force:
# since async uploaders are daemon threads, we can just return and let them close by themselves
return
# signal all threads to terminate and give them a chance for 'timeout' seconds (total, not per-thread)
cls._terminate.set()
remaining_timeout = timeout
for thread in cls._async_upload_threads:
t = time()
try:
thread.join(timeout=remaining_timeout)
except Exception:
pass
remaining_timeout -= (time() - t)
@classmethod
def get_configuration(cls, bucket_config):
return cls._s3_configurations.get_config_by_bucket(bucket_config.bucket, bucket_config.host)
@classmethod
def add_configuration(cls, bucket_config, log=None, _test_config=True):
# Try to use existing configuration if we have no key and secret
use_existing = not bucket_config.is_valid()
# Get existing config anyway (we'll either try to use it or alert we're replacing it
existing = cls.get_configuration(bucket_config)
configs = cls._s3_configurations
if not use_existing:
# Test bucket config, fails if unsuccessful
if _test_config:
_Boto3Driver._test_bucket_config(bucket_config, log)
if existing:
if log:
log.warning('Overriding existing configuration for %s/%s'
% (existing.host or 'AWS', existing.bucket))
configs.remove_config(existing)
else:
# Try to use existing configuration
good_config = False
if existing:
if log:
log.info('Using existing credentials for bucket %s/%s'
% (bucket_config.host or 'AWS', bucket_config.bucket))
good_config = _Boto3Driver._test_bucket_config(existing, log, raise_on_error=False)
if not good_config:
# Try to use global key/secret
configs.update_config_with_defaults(bucket_config)
if log:
log.info('Using global credentials for bucket %s/%s'
% (bucket_config.host or 'AWS', bucket_config.bucket))
if _test_config:
_Boto3Driver._test_bucket_config(bucket_config, log)
else:
# do not add anything, existing config is OK
return
configs.add_config(bucket_config)
@classmethod
def add_path_substitution(
cls,
registered_prefix,
local_prefix,
replace_windows_sep=False,
replace_linux_sep=False,
):
"""
Add a path substitution rule for storage paths.
Useful for case where the data was registered under some path, and that
path was later renamed. This may happen with local storage paths where
each machine is has different mounts or network drives configurations
:param registered_prefix: The prefix to search for and replace. This is
the prefix of the path the data is registered under. This should be the
exact url prefix, case sensitive, as the data is registered.
:param local_prefix: The prefix to replace 'registered_prefix' with. This
is the prefix of the path the data is actually saved under. This should be the
exact url prefix, case sensitive, as the data is saved under.
:param replace_windows_sep: If set to True, and the prefix matches, the rest
of the url has all of the windows path separators (backslash '\') replaced with
the native os path separator.
:param replace_linux_sep: If set to True, and the prefix matches, the rest
of the url has all of the linux/unix path separators (slash '/') replaced with
the native os path separator.
"""
if not registered_prefix or not local_prefix:
raise UsageError("Path substitution prefixes must be non empty strings")
if replace_windows_sep and replace_linux_sep:
raise UsageError("Only one of replace_windows_sep and replace_linux_sep may be set.")
rule = cls._PathSubstitutionRule(
registered_prefix=registered_prefix,
local_prefix=local_prefix,
replace_windows_sep=replace_windows_sep,
replace_linux_sep=replace_linux_sep,
)
cls._path_substitutions.append(rule)
@classmethod
def clear_path_substitutions(cls):
"""
Removes all path substitution rules, including ones from the configuration file.
"""
cls._path_substitutions = list()
def verify_upload(self, folder_uri='', raise_on_error=True, log_on_error=True):
"""
Verify that this helper can upload files to a folder.
An upload is possible iff:
1. the destination folder is under the base uri of the url used to create the helper
2. the helper has credentials to write to the destination folder
:param folder_uri: The destination folder to test. Must be an absolute
url that begins with the base uri of the url used to create the helper.
:param raise_on_error: Raise an exception if an upload is not possible
:param log_on_error: Log an error if an upload is not possible
:return: True, if, and only if, an upload to folder_uri is possible.
"""
folder_uri = self._canonize_url(folder_uri)
folder_uri = self.conform_url(folder_uri, self._base_url)
test_path = self._normalize_object_name(folder_uri)
if self._scheme == _Boto3Driver.scheme:
_Boto3Driver._test_bucket_config(
self._conf,
self._log,
test_path=test_path,
raise_on_error=raise_on_error,
log_on_error=log_on_error,
)
elif self._scheme == _GoogleCloudStorageDriver.scheme:
self._driver.test_upload(test_path, self._conf)
elif self._scheme == 'file':
# Check path exists
Path(test_path).mkdir(parents=True, exist_ok=True)
# check path permissions
Path(test_path).touch(exist_ok=True)
return folder_uri
def upload_from_stream(self, stream, dest_path, extra=None, retries=1):
dest_path = self._canonize_url(dest_path)
object_name = self._normalize_object_name(dest_path)
extra = extra.copy() if extra else {}
extra.update(self._extra)
last_ex = None
cb = UploadProgressReport.from_stream(stream, object_name, self._verbose, self._log)
for i in range(max(1, retries)):
try:
self._driver.upload_object_via_stream(
iterator=stream,
container=self._container,
object_name=object_name,
callback=cb,
extra=extra)
last_ex = None
break
except Exception as ex:
last_ex = ex
# seek to beginning if possible
# noinspection PyBroadException
try:
stream.seek(0)
except Exception:
pass
if last_ex:
raise last_ex
if self.scheme in _HttpDriver.schemes:
# quote link
dest_path = quote_url(dest_path)
return dest_path
def upload(self, src_path, dest_path=None, extra=None, async_enable=False, cb=None, retries=1):
if not dest_path:
dest_path = os.path.basename(src_path)
dest_path = self._canonize_url(dest_path)
if cb and self.scheme in _HttpDriver.schemes:
# store original callback
a_cb = cb
# quote link
def callback(a_path):
return a_cb(quote_url(a_path) if a_path else a_path)
# replace callback with wrapper
cb = callback
if async_enable:
data = self._UploadData(src_path=src_path, dest_path=dest_path, extra=extra, callback=cb, retries=retries)
StorageHelper._initialize_upload_pool()
return StorageHelper._upload_pool.apply_async(self._do_async_upload, args=(data,))
else:
res = self._do_upload(src_path, dest_path, extra, cb, verbose=False, retries=retries)
if res:
res = quote_url(res)
return res
def list(self, prefix=None):
"""
List entries in the helper base path.
Return a list of names inside this helper base path. The base path is
determined at creation time and is specific for each storage medium.
For Google Storage and S3 it is the bucket of the path.
For local files it is the root directory.
This operation is not supported for http and https protocols.
:param prefix: If None, return the list as described above. If not, it
must be a string - the path of a sub directory under the base path.
the returned list will include only objects under that subdir.
:return: The paths of all the objects in the storage base
path under prefix. Listed relative to the base path.
"""
if prefix:
if prefix.startswith(self._base_url):
prefix = prefix[len(self.base_url):].lstrip("/")
try:
res = self._driver.list_container_objects(self._container, ex_prefix=prefix)
except TypeError:
res = self._driver.list_container_objects(self._container)
return [
obj.name
for obj in res if
obj.name.startswith(prefix) and obj.name != prefix
]
else:
return [obj.name for obj in self._driver.list_container_objects(self._container)]
def download_to_file(self, remote_path, local_path, overwrite_existing=False, delete_on_failure=True, verbose=None):
def next_chunk(astream):
if isinstance(astream, binary_type):
chunk = astream
astream = None
elif astream:
try:
chunk = next(astream)
except StopIteration:
chunk = None
else:
chunk = None
return chunk, astream
remote_path = self._canonize_url(remote_path)
verbose = self._verbose if verbose is None else verbose
# Check if driver type supports direct access:
direct_access_path = self._driver.get_direct_access(remote_path)
if direct_access_path:
return direct_access_path
temp_local_path = None
try:
if verbose:
self._log.info('Start downloading from %s' % remote_path)
if not overwrite_existing and Path(local_path).is_file():
self._log.warning(
'File {} already exists, no need to download, thread id = {}'.format(
local_path,
threading.current_thread().ident,
),
)
return local_path
# we download into temp_local_path so that if we accidentally stop in the middle,
# we won't think we have the entire file
temp_local_path = '{}_{}{}'.format(local_path, time(), self._temp_download_suffix)
obj = self._get_object(remote_path)
if not obj:
return None
# object size in bytes
total_size_mb = -1
dl_total_mb = 0.
download_reported = False
# chunks size is ignored and always 5Mb
chunk_size_mb = 5
# make sure we have the destination folder
# noinspection PyBroadException
Path(temp_local_path).parent.mkdir(parents=True, exist_ok=True)
# try to get file size
try:
if isinstance(self._driver, _HttpDriver) and obj:
obj = self._driver._get_download_object(obj)
total_size_mb = float(obj.headers.get('Content-Length', 0)) / (1024 * 1024)
elif hasattr(obj, 'size'):
size = obj.size
# Google storage has the option to reload the object to get the size
if size is None and hasattr(obj, 'reload'):
obj.reload()
size = obj.size
total_size_mb = 0 if size is None else float(size) / (1024 * 1024)
elif hasattr(obj, 'content_length'):
total_size_mb = float(obj.content_length) / (1024 * 1024)
except (ValueError, AttributeError, KeyError):
pass
# if driver supports download with callback, use it (it might be faster)
if hasattr(self._driver, 'download_object'):
# callback
cb = DownloadProgressReport(total_size_mb, verbose, remote_path, self._log)
self._driver.download_object(obj, temp_local_path, callback=cb)
download_reported = bool(cb.last_reported)
dl_total_mb = cb.current_status_mb
else:
stream = self._driver.download_object_as_stream(obj, chunk_size_mb * 1024 * 1024)
if stream is None:
raise ValueError('Could not download %s' % remote_path)
with open(temp_local_path, 'wb') as fd:
data, stream = next_chunk(stream)
while data:
fd.write(data)
data, stream = next_chunk(stream)
if Path(temp_local_path).stat().st_size <= 0:
raise Exception('downloaded a 0-sized file')
# if we are on windows, we need to remove the target file before renaming
# otherwise posix rename will overwrite the target
if os.name != 'posix':
try:
os.remove(local_path)
except Exception:
pass
# rename temp file to local_file
# noinspection PyBroadException
try:
os.rename(temp_local_path, local_path)
except Exception:
# noinspection PyBroadException
try:
os.unlink(temp_local_path)
except Exception:
pass
# file was downloaded by a parallel process, check we have the final output and delete the partial copy
path_local_path = Path(local_path)
if not path_local_path.is_file() or path_local_path.stat().st_size <= 0:
raise Exception('Failed renaming partial file, downloaded file exists and a 0-sized file')
# report download if we are on the second chunk
if verbose or download_reported:
self._log.info(
'Downloaded %.2f MB successfully from %s , saved to %s' % (dl_total_mb, remote_path, local_path))
return local_path
except DownloadError:
raise
except Exception as e:
self._log.error("Could not download {} , err: {} ".format(remote_path, e))
if delete_on_failure:
# noinspection PyBroadException
try:
if temp_local_path:
os.remove(temp_local_path)
except Exception:
pass
return None
def download_as_stream(self, remote_path, chunk_size=None):
remote_path = self._canonize_url(remote_path)
try:
obj = self._get_object(remote_path)
return self._driver.download_object_as_stream(
obj, chunk_size=chunk_size, verbose=self._verbose, log=self.log
)
except DownloadError:
raise
except Exception as e:
self._log.error("Could not download file : %s, err:%s " % (remote_path, str(e)))
return None
def download_as_nparray(self, remote_path, chunk_size=None):
try:
stream = self.download_as_stream(remote_path, chunk_size)
if stream is None:
return
# TODO: ugly py3 hack, please remove ASAP
if six.PY3 and not isinstance(stream, GeneratorType):
import numpy as np
return np.frombuffer(stream, dtype=np.uint8)
else:
import numpy as np
return np.asarray(bytearray(b''.join(stream)), dtype=np.uint8)
except Exception as e:
self._log.error("Could not download file : %s, err:%s " % (remote_path, str(e)))
def delete(self, path):
return self._driver.delete_object(self._get_object(path))
def check_write_permissions(self, dest_path=None):
# create a temporary file, then delete it
base_url = dest_path or self._base_url
dest_path = base_url + '/.clearml.test'
# do not check http/s connection permissions
if dest_path.startswith('http'):
return True
try:
self.upload_from_stream(stream=six.BytesIO(b'clearml'), dest_path=dest_path)
self.delete(path=dest_path)
except Exception:
raise ValueError('Insufficient permissions for {}'.format(base_url))
return True
@classmethod
def download_from_url(cls, remote_url, local_path, overwrite_existing=False):
"""
Download a file from remote URL to a local storage
:param remote_url: Remote URL. Example: https://example.com/image.jpg or s3://bucket/folder/file.mp4 etc.
:param local_path: target location for downloaded file. Example: /tmp/image.jpg
:param overwrite_existing: If True and local_path exists, it will overwrite it, otherwise print warning
:return: local_path if download was successful.
"""
helper = cls.get(remote_url)
if not helper:
return None
return helper.download_to_file(remote_url, local_path, overwrite_existing=overwrite_existing)
@classmethod
def _canonize_url(cls, url):
return cls._apply_url_substitutions(url)
@classmethod
def _apply_url_substitutions(cls, url):
def replace_separator(_url, where, sep):
return _url[:where] + _url[where:].replace(sep, os.sep)
for index, rule in enumerate(cls._path_substitutions):
if url.startswith(rule.registered_prefix):
url = url.replace(
rule.registered_prefix,
rule.local_prefix,
1, # count. str.replace() does not support keyword arguments
)
if rule.replace_windows_sep:
url = replace_separator(url, len(rule.local_prefix), '\\')
if rule.replace_linux_sep:
url = replace_separator(url, len(rule.local_prefix), '/')
break
return url
@classmethod
def _resolve_base_url(cls, base_url):
parsed = urlparse(base_url)
if parsed.scheme == _Boto3Driver.scheme:
conf = cls._s3_configurations.get_config_by_uri(base_url)
bucket = conf.bucket
if not bucket:
parts = Path(parsed.path.strip('/')).parts
if parts:
bucket = parts[0]
return '/'.join(x for x in ('s3:/', conf.host, bucket) if x)
elif parsed.scheme == _AzureBlobServiceStorageDriver.scheme:
conf = cls._azure_configurations.get_config_by_uri(base_url)
if not conf:
raise StorageError("Can't find azure configuration for {}".format(base_url))
return str(furl(base_url).set(path=conf.container_name))
elif parsed.scheme == _GoogleCloudStorageDriver.scheme:
conf = cls._gs_configurations.get_config_by_uri(base_url)
return str(furl(scheme=parsed.scheme, netloc=conf.bucket))
elif parsed.scheme == 'http':
return 'http://'
elif parsed.scheme == 'https':
return 'https://'
else: # if parsed.scheme == 'file':
# if we do not know what it is, we assume file
return 'file://'
@classmethod
def conform_url(cls, folder_uri, base_url=None):
if not folder_uri:
return folder_uri
_base_url = cls._resolve_base_url(folder_uri) if not base_url else base_url
if not folder_uri.startswith(_base_url):
prev_folder_uri = folder_uri
if _base_url == 'file://':
folder_uri = str(Path(folder_uri).absolute())
if folder_uri.startswith('/'):
folder_uri = _base_url + folder_uri
else:
folder_uri = '/'.join((_base_url, folder_uri))
cls._get_logger().debug('Upload destination {} amended to {} for registration purposes'.format(
prev_folder_uri, folder_uri))
else:
raise ValueError('folder_uri: {} does not start with base url: {}'.format(folder_uri, _base_url))
return folder_uri
def _absolute_object_name(self, path):
""" Returns absolute remote path, including any prefix that is handled by the container """
if not path.startswith(self.base_url):
return self.base_url.rstrip('/') + '///' + path.lstrip('/')
return path
def _normalize_object_name(self, path):
""" Normalize remote path. Remove any prefix that is already handled by the container """
if path.startswith(self.base_url):
path = path[len(self.base_url):]
if path.startswith('/') and os.name == 'nt':
path = path[1:]
if self.scheme in (_Boto3Driver.scheme, _GoogleCloudStorageDriver.scheme,
_AzureBlobServiceStorageDriver.scheme):
path = path.lstrip('/')
return path
def _do_async_upload(self, data):
assert isinstance(data, self._UploadData)
return self._do_upload(data.src_path, data.dest_path, extra=data.extra, cb=data.callback,
verbose=True, retries=data.retries)
def _upload_from_file(self, local_path, dest_path, extra=None):
if not hasattr(self._driver, 'upload_object'):
with open(local_path, 'rb') as stream:
res = self.upload_from_stream(stream=stream, dest_path=dest_path, extra=extra)
else:
object_name = self._normalize_object_name(dest_path)
extra = extra.copy() if extra else {}
extra.update(self._extra)
cb = UploadProgressReport.from_file(local_path, self._verbose, self._log)
res = self._driver.upload_object(
file_path=local_path,
container=self._container,
object_name=object_name,
callback=cb,
extra=extra)
return res
def _do_upload(self, src_path, dest_path, extra=None, cb=None, verbose=False, retries=1):
object_name = self._normalize_object_name(dest_path)
if cb:
try:
cb(None)
except Exception as e:
self._log.error("Calling upload callback when starting upload: %s" % str(e))
if verbose:
msg = 'Starting upload: {} => {}{}'.format(
src_path,
(self._container.name if self._container.name.endswith('/') else self._container.name + '/')
if self._container and self._container.name else '', object_name)
if object_name.startswith('file://') or object_name.startswith('/'):
self._log.debug(msg)
else:
self._log.info(msg)
last_ex = None
for i in range(max(1, retries)):
try:
if not self._upload_from_file(local_path=src_path, dest_path=dest_path, extra=extra):
# retry if failed
last_ex = ValueError("Upload failed")
continue
last_ex = None
break
except Exception as e:
last_ex = e
if last_ex:
self._log.error("Exception encountered while uploading %s" % str(last_ex))
if cb:
try:
cb(False)
except Exception as e:
self._log.warning("Exception on upload callback: %s" % str(e))
raise last_ex
if verbose:
self._log.debug("Finished upload: %s => %s" % (src_path, object_name))
if cb:
try:
cb(dest_path)
except Exception as e:
self._log.warning("Exception on upload callback: %s" % str(e))
return dest_path
def _get_object(self, path):
object_name = self._normalize_object_name(path)
try:
return self._driver.get_object(
container_name=self._container.name if self._container else '', object_name=object_name)
except ConnectionError:
raise DownloadError
except Exception as e:
self.log.warning('Storage helper problem for {}: {}'.format(str(object_name), str(e)))
return None
@staticmethod
def _initialize_upload_pool():
if not StorageHelper._upload_pool:
StorageHelper._upload_pool = ThreadPool(processes=1)
@staticmethod
def close_async_threads():
if StorageHelper._upload_pool:
pool = StorageHelper._upload_pool
StorageHelper._upload_pool = None
# noinspection PyBroadException
try:
pool.terminate()
pool.join()
except Exception:
pass
class _HttpDriver(_Driver):
""" LibCloud http/https adapter (simple, enough for now) """
timeout = (5.0, 30.)
min_kbps_speed = 50
schemes = ('http', 'https')
class _Container(object):
_default_backend_session = None
_default_files_server_host = None
def __init__(self, name, retries=5, **kwargs):
self.name = name
self.session = get_http_session_with_retry(total=retries, connect=retries, read=retries, redirect=retries)
def get_headers(self, url):
if not self._default_backend_session:
from ..backend_interface.base import InterfaceBase
self._default_backend_session = InterfaceBase._get_default_session()
if self._default_files_server_host is None:
self._default_files_server_host = self._default_backend_session.get_files_server_host().rstrip('/')
if url == self._default_files_server_host or url.startswith(self._default_files_server_host + '/'):
return self._default_backend_session.add_auth_headers({})
return None
class _HttpSessionHandle(object):
def __init__(self, url, is_stream, container_name, object_name):
self.url, self.is_stream, self.container_name, self.object_name = \
url, is_stream, container_name, object_name
def __init__(self, retries=5):
self._retries = retries
self._containers = {}
def get_container(self, container_name, config=None, **kwargs):
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, retries=self._retries, **kwargs)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, extra=None, callback=None, **kwargs):
url = object_name[:object_name.index('/')]
url_path = object_name[len(url) + 1:]
full_url = container.name + url
# when sending data in post, there is no connection timeout, just an entire upload timeout
timeout = self.timeout[-1]
stream_size = 0
if hasattr(iterator, 'tell') and hasattr(iterator, 'seek'):
pos = iterator.tell()
iterator.seek(0, 2)
stream_size = iterator.tell() - pos
iterator.seek(pos, 0)
timeout = max(timeout, (stream_size / 1024) / float(self.min_kbps_speed))
res = container.session.post(full_url, files={url_path: iterator}, timeout=timeout,
headers=container.get_headers(full_url))
if res.status_code != requests.codes.ok:
raise ValueError('Failed uploading object %s (%d): %s' % (object_name, res.status_code, res.text))
# call back is useless because we are not calling it while uploading...
# if callback and stream_size:
# try:
# callback(stream_size)
# except Exception as ex:
# log.debug('Exception raised when running callback function: %s' % ex)
return res
def list_container_objects(self, *args, **kwargs):
raise NotImplementedError('List is not implemented for http protocol')
def delete_object(self, obj, *args, **kwargs):
assert isinstance(obj, self._HttpSessionHandle)
container = self._containers[obj.container_name]
res = container.session.delete(obj.url, headers=container.get_headers(obj.url))
if res.status_code != requests.codes.ok:
self._get_logger().warning('Failed deleting object %s (%d): %s' % (
obj.object_name, res.status_code, res.text))
return False
return True
def get_object(self, container_name, object_name, *args, **kwargs):
is_stream = kwargs.get('stream', True)
url = ''.join((container_name, object_name.lstrip('/')))
return self._HttpSessionHandle(url, is_stream, container_name, object_name)
def _get_download_object(self, obj):
# bypass for session result
if not isinstance(obj, self._HttpSessionHandle):
return obj
container = self._containers[obj.container_name]
# set stream flag before we send the request
container.session.stream = obj.is_stream
res = container.session.get(obj.url, timeout=self.timeout, headers=container.get_headers(obj.url))
if res.status_code != requests.codes.ok:
raise ValueError('Failed getting object %s (%d): %s' % (obj.object_name, res.status_code, res.text))
return res
def download_object_as_stream(self, obj, chunk_size=64 * 1024, **_):
# return iterable object
obj = self._get_download_object(obj)
return obj.iter_content(chunk_size=chunk_size)
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
obj = self._get_download_object(obj)
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
length = 0
with p.open(mode='wb') as f:
for chunk in obj.iter_content(chunk_size=5 * 1024 * 1024):
# filter out keep-alive new chunks
if not chunk:
continue
chunk_size = len(chunk)
f.write(chunk)
length += chunk_size
if callback:
callback(chunk_size)
return length
def get_direct_access(self, remote_path, **_):
return None
def test_upload(self, test_path, config, **kwargs):
return True
def upload_object(self, file_path, container, object_name, extra, callback=None, **kwargs):
with open(file_path, 'rb') as stream:
return self.upload_object_via_stream(iterator=stream, container=container,
object_name=object_name, extra=extra, callback=callback, **kwargs)
class _Stream(object):
encoding = None
mode = 'rw'
name = ''
newlines = '\n'
softspace = False
def __init__(self, input_iterator=None):
self.closed = False
self._buffer = Queue()
self._input_iterator = input_iterator
self._leftover = None
def __iter__(self):
return self
def __next__(self):
return self.next()
def close(self):
self.closed = True
def flush(self):
pass
def fileno(self):
return 87
def isatty(self):
return False
def next(self):
while not self.closed or not self._buffer.empty():
# input stream
if self._input_iterator:
try:
chunck = next(self._input_iterator)
return chunck
except StopIteration:
self.closed = True
raise StopIteration()
except Exception as ex:
_Driver.get_logger().error('Failed downloading: %s' % ex)
else:
# in/out stream
try:
return self._buffer.get(block=True, timeout=1.)
except Empty:
pass
raise StopIteration()
def read(self, size=None):
try:
data = self.next() if self._leftover is None else self._leftover
except StopIteration:
return six.b('')
self._leftover = None
try:
while size is None or not data or len(data) < size:
chunk = self.next()
if chunk is not None:
if data is not None:
data += chunk
else:
data = chunk
except StopIteration:
pass
if size is not None and data and len(data) > size:
self._leftover = data[size:]
return data[:size]
return data
def readline(self, size=None):
return self.read(size)
def readlines(self, sizehint=None):
pass
def truncate(self, size=None):
pass
def write(self, bytes):
self._buffer.put(bytes, block=True)
def writelines(self, sequence):
for s in sequence:
self.write(s)
class _Boto3Driver(_Driver):
""" Boto3 storage adapter (simple, enough for now) """
_min_pool_connections = 512
_max_multipart_concurrency = deferred_config('aws.boto3.max_multipart_concurrency', 16)
_pool_connections = deferred_config('aws.boto3.pool_connections', 512)
_stream_download_pool_connections = 128
_stream_download_pool = None
_containers = {}
scheme = 's3'
scheme_prefix = str(furl(scheme=scheme, netloc=''))
_bucket_location_failure_reported = set()
class _Container(object):
_creation_lock = threading.Lock()
def __init__(self, name, cfg):
try:
import boto3
import botocore.client
from botocore.exceptions import ClientError # noqa: F401
except ImportError:
raise UsageError(
'AWS S3 storage driver (boto3) not found. '
'Please install driver using: pip install \"boto3>=1.9\"'
)
# skip 's3://'
self.name = name[5:]
endpoint = (('https://' if cfg.secure else 'http://') + cfg.host) if cfg.host else None
# boto3 client creation isn't thread-safe (client itself is)
with self._creation_lock:
boto_kwargs = {
"endpoint_url": endpoint,
"use_ssl": cfg.secure,
"verify": cfg.verify,
"config": botocore.client.Config(
max_pool_connections=max(
_Boto3Driver._min_pool_connections,
_Boto3Driver._pool_connections)
)
}
if not cfg.use_credentials_chain:
boto_kwargs["aws_access_key_id"] = cfg.key
boto_kwargs["aws_secret_access_key"] = cfg.secret
self.resource = boto3.resource(
's3',
**boto_kwargs
)
self.config = cfg
bucket_name = self.name[len(cfg.host) + 1:] if cfg.host else self.name
self.bucket = self.resource.Bucket(bucket_name)
@attrs
class ListResult(object):
name = attrib(default=None)
def __init__(self):
pass
def _get_stream_download_pool(self):
if self._stream_download_pool is None:
self._stream_download_pool = ThreadPoolExecutor(max_workers=self._stream_download_pool_connections)
return self._stream_download_pool
def get_container(self, container_name, config=None, **kwargs):
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, cfg=config)
self._containers[container_name].config.retries = kwargs.get('retries', 5)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, callback=None, extra=None, **kwargs):
import boto3.s3.transfer
stream = _Stream(iterator)
try:
container.bucket.upload_fileobj(stream, object_name, Config=boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries),
Callback=callback,
)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def upload_object(self, file_path, container, object_name, callback=None, extra=None, **kwargs):
import boto3.s3.transfer
try:
container.bucket.upload_file(file_path, object_name, Config=boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries),
Callback=callback)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def list_container_objects(self, container, ex_prefix=None, **kwargs):
if ex_prefix:
res = container.bucket.objects.filter(Prefix=ex_prefix)
else:
res = container.bucket.objects.all()
for res in res:
yield self.ListResult(name=res.key)
def delete_object(self, object, **kwargs):
from botocore.exceptions import ClientError
object.delete()
try:
# Try loading the file to verify deletion
object.load()
return False
except ClientError as e:
return int(e.response['Error']['Code']) == 404
def get_object(self, container_name, object_name, *args, **kwargs):
full_container_name = 's3://' + container_name
container = self._containers[full_container_name]
obj = container.resource.Object(container.bucket.name, object_name)
obj.container_name = full_container_name
return obj
def download_object_as_stream(self, obj, chunk_size=64 * 1024, verbose=None, log=None, **_):
def async_download(a_obj, a_stream, cb, cfg):
try:
a_obj.download_fileobj(a_stream, Callback=cb, Config=cfg)
except Exception as ex:
(log or self.get_logger()).error('Failed downloading: %s' % ex)
a_stream.close()
import boto3.s3.transfer
# return iterable object
stream = _Stream()
container = self._containers[obj.container_name]
config = boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries)
total_size_mb = obj.content_length / (1024. * 1024.)
remote_path = os.path.join(obj.container_name, obj.key)
cb = DownloadProgressReport(total_size_mb, verbose, remote_path, log)
self._get_stream_download_pool().submit(async_download, obj, stream, cb, config)
return stream
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
import boto3.s3.transfer
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
container = self._containers[obj.container_name]
obj.download_file(str(p),
Callback=callback,
Config=boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries))
@classmethod
def _test_bucket_config(cls, conf, log, test_path='', raise_on_error=True, log_on_error=True):
try:
import boto3
from botocore.exceptions import ClientError
except ImportError:
return False
if not conf.bucket:
return False
try:
if not conf.is_valid():
raise Exception('Missing credentials')
fullname = furl(conf.bucket).add(path=test_path).add(path='%s-upload_test' % cls.__module__)
bucket_name = str(fullname.path.segments[0])
filename = str(furl(path=fullname.path.segments[1:]))
data = {
'user': getpass.getuser(),
'machine': gethostname(),
'time': datetime.utcnow().isoformat()
}
boto_session = boto3.Session(conf.key, conf.secret)
boto_resource = boto_session.resource('s3', conf.region)
bucket = boto_resource.Bucket(bucket_name)
bucket.put_object(Key=filename, Body=six.b(json.dumps(data)))
region = cls._get_bucket_region(conf=conf, log=log, report_info=True)
if region and ((conf.region and region != conf.region) or (not conf.region and region != 'us-east-1')):
msg = "incorrect region specified for bucket %s (detected region %s)" % (conf.bucket, region)
else:
return True
except ClientError as ex:
msg = ex.response['Error']['Message']
if log_on_error and log:
log.error(msg)
if raise_on_error:
raise
except Exception as ex:
msg = str(ex)
if log_on_error and log:
log.error(msg)
if raise_on_error:
raise
msg = ("Failed testing access to bucket %s: " % conf.bucket) + msg
if log_on_error and log:
log.error(msg)
if raise_on_error:
raise StorageError(msg)
return False
@classmethod
def _get_bucket_region(cls, conf, log=None, report_info=False):
import boto3
from botocore.exceptions import ClientError
if not conf.bucket:
return None
def report(msg):
if log and conf.get_bucket_host() not in cls._bucket_location_failure_reported:
if report_info:
log.debug(msg)
else:
log.warning(msg)
cls._bucket_location_failure_reported.add(conf.get_bucket_host())
try:
boto_session = boto3.Session(conf.key, conf.secret)
boto_resource = boto_session.resource('s3')
return boto_resource.meta.client.get_bucket_location(Bucket=conf.bucket)["LocationConstraint"]
except ClientError as ex:
report("Failed getting bucket location (region) for bucket "
"%s: %s (%s, access_key=%s). Default region will be used. "
"This is normal if you do not have GET_BUCKET_LOCATION permission"
% (conf.bucket, ex.response['Error']['Message'], ex.response['Error']['Code'], conf.key))
except Exception as ex:
report("Failed getting bucket location (region) for bucket %s: %s. Default region will be used."
% (conf.bucket, str(ex)))
return None
def get_direct_access(self, remote_path, **_):
return None
def test_upload(self, test_path, config, **_):
return True
class _GoogleCloudStorageDriver(_Driver):
"""Storage driver for google cloud storage"""
_stream_download_pool_connections = 128
_stream_download_pool = None
_containers = {}
scheme = 'gs'
scheme_prefix = str(furl(scheme=scheme, netloc=''))
class _Container(object):
def __init__(self, name, cfg):
try:
from google.cloud import storage
from google.oauth2 import service_account
except ImportError:
raise UsageError(
'Google cloud driver not found. '
'Please install driver using: pip install \"google-cloud-storage>=1.13.2\"'
)
self.name = name[len(_GoogleCloudStorageDriver.scheme_prefix):]
if cfg.credentials_json:
credentials = service_account.Credentials.from_service_account_file(cfg.credentials_json)
else:
credentials = None
self.client = storage.Client(project=cfg.project, credentials=credentials)
for adapter in self.client._http.adapters.values():
if cfg.pool_connections:
adapter._pool_connections = cfg.pool_connections
if cfg.pool_maxsize:
adapter._pool_maxsize = cfg.pool_maxsize
self.config = cfg
self.bucket = self.client.bucket(self.name)
def _get_stream_download_pool(self):
if self._stream_download_pool is None:
self._stream_download_pool = ThreadPoolExecutor(max_workers=self._stream_download_pool_connections)
return self._stream_download_pool
def get_container(self, container_name, config=None, **kwargs):
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, cfg=config)
self._containers[container_name].config.retries = kwargs.get('retries', 5)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, extra=None, **kwargs):
try:
blob = container.bucket.blob(object_name)
blob.upload_from_file(iterator)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def upload_object(self, file_path, container, object_name, extra=None, **kwargs):
try:
blob = container.bucket.blob(object_name)
blob.upload_from_filename(file_path)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def list_container_objects(self, container, **kwargs):
return list(container.bucket.list_blobs())
def delete_object(self, object, **kwargs):
try:
object.delete()
except Exception as ex:
try:
from google.cloud.exceptions import NotFound
if isinstance(ex, NotFound):
return False
except ImportError:
pass
name = getattr(object, "name", "")
self.get_logger().warning("Failed deleting object {}: {}".format(name, ex))
return False
return not object.exists()
def get_object(self, container_name, object_name, *args, **kwargs):
full_container_name = str(furl(scheme=self.scheme, netloc=container_name))
container = self._containers[full_container_name]
obj = container.bucket.blob(object_name)
obj.container_name = full_container_name
return obj
def download_object_as_stream(self, obj, chunk_size=256 * 1024, **_):
raise NotImplementedError('Unsupported for google storage')
def async_download(a_obj, a_stream):
try:
a_obj.download_to_file(a_stream)
except Exception as ex:
self.get_logger().error('Failed downloading: %s' % ex)
a_stream.close()
# return iterable object
stream = _Stream()
obj.chunk_size = chunk_size
self._get_stream_download_pool().submit(async_download, obj, stream)
return stream
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
obj.download_to_filename(str(p))
def test_upload(self, test_path, config, **_):
bucket_url = str(furl(scheme=self.scheme, netloc=config.bucket, path=config.subdir))
bucket = self.get_container(container_name=bucket_url, config=config).bucket
test_obj = bucket
if test_path:
if not test_path.endswith('/'):
test_path += '/'
blob = bucket.blob(test_path)
if blob.exists():
test_obj = blob
permissions_to_test = ('storage.objects.get', 'storage.objects.update')
return set(test_obj.test_iam_permissions(permissions_to_test)) == set(permissions_to_test)
def get_direct_access(self, remote_path, **_):
return None
class _AzureBlobServiceStorageDriver(_Driver):
scheme = 'azure'
_containers = {}
class _Container(object):
def __init__(self, name, config):
try:
from azure.common import AzureHttpError # noqa: F401
from azure.storage.blob import BlockBlobService
except ImportError:
raise UsageError(
'Azure blob storage driver not found. '
'Please install driver using: pip install \"azure.storage.blob<=2.1.0\"'
)
self.name = name
self.config = config
self.blob_service = BlockBlobService(
account_name=config.account_name,
account_key=config.account_key,
)
@attrs
class _Object(object):
container = attrib()
blob_name = attrib()
content_length = attrib()
def get_container(self, container_name=None, config=None, **kwargs):
container_name = container_name or config.container_name
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, config=config)
# self._containers[container_name].config.retries = kwargs.get('retries', 5)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, callback=None, extra=None, **kwargs):
from azure.common import AzureHttpError # noqa
blob_name = self._blob_name_from_object_path(object_name, container.name) # noqa: F841
try:
container.blob_service.MAX_SINGLE_PUT_SIZE = 16 * 1024 * 1024
container.blob_service.socket_timeout = (300, 2000)
container.blob_service.create_blob_from_bytes(
container.name,
object_name,
iterator.read() if hasattr(iterator, "read") else bytes(iterator),
# timeout=300,
max_connections=2,
progress_callback=callback,
)
return True
except AzureHttpError as ex:
self.get_logger().error('Failed uploading (Azure error): %s' % ex)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
def upload_object(self, file_path, container, object_name, callback=None, extra=None, **kwargs):
from azure.common import AzureHttpError # noqa
blob_name = self._blob_name_from_object_path(object_name, container.name)
stream = None
try:
from azure.storage.blob import ContentSettings # noqa
from mimetypes import guess_type
container.blob_service.MAX_SINGLE_PUT_SIZE = 16 * 1024 * 1024
container.blob_service.socket_timeout = (300, 2000)
container.blob_service.create_blob_from_path(
container.name,
blob_name,
file_path,
# timeout=300,
max_connections=2,
content_settings=ContentSettings(content_type=guess_type(file_path)),
progress_callback=callback,
)
return True
except AzureHttpError as ex:
self.get_logger().error('Failed uploading (Azure error): %s' % ex)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
finally:
if stream:
stream.close()
def list_container_objects(self, container, ex_prefix=None, **kwargs):
return list(container.blob_service.list_blobs(container_name=container.name, prefix=ex_prefix))
def delete_object(self, object, **kwargs):
container = object.container
container.blob_service.delete_blob(
container.name,
object.blob_name,
)
return not object.container.blob_service.exists(container.name, object.blob_name)
def get_object(self, container_name, object_name, *args, **kwargs):
container = self._containers.get(container_name)
if not container:
raise StorageError("Container `{}` not found for object {}".format(container_name, object_name))
# blob_name = self._blob_name_from_object_path(object_name, container_name)
blob = container.blob_service.get_blob_properties(container.name, object_name)
return self._Object(container=container, blob_name=blob.name, content_length=blob.properties.content_length)
def download_object_as_stream(self, obj, verbose, *_, **__):
container = obj.container
total_size_mb = obj.content_length / (1024. * 1024.)
remote_path = os.path.join(
"{}://".format(self.scheme),
container.config.account_name,
container.name,
obj.blob_name
)
cb = DownloadProgressReport(total_size_mb, verbose, remote_path, self.get_logger())
blob = container.blob_service.get_blob_to_bytes(
container.name,
obj.blob_name,
progress_callback=cb,
)
return blob.content
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
download_done = threading.Event()
download_done.counter = 0
def callback_func(current, total):
if callback:
chunk = current - download_done.counter
download_done.counter += chunk
callback(chunk)
if current >= total:
download_done.set()
container = obj.container
container.blob_service.MAX_SINGLE_GET_SIZE = 5 * 1024 * 1024
_ = container.blob_service.get_blob_to_path(
container.name,
obj.blob_name,
local_path,
max_connections=10,
progress_callback=callback_func,
)
download_done.wait()
def test_upload(self, test_path, config, **_):
container = self.get_container(config=config)
try:
container.blob_service.get_container_properties(container.name)
except Exception:
return False
else:
# Using the account Key, we can always upload...
return True
@classmethod
def _blob_name_from_object_path(cls, name, container_name):
scheme = urlparse(name).scheme
if scheme:
if scheme != cls.scheme:
raise StorageError(
"When using a URL, only the `{}` scheme is supported for Azure storage: {}",
cls.scheme,
name,
)
f = furl(name)
if not f.path.segments:
raise StorageError(
"Missing container name in URL {}",
name,
)
parsed_container_name = f.path.segments[0]
if parsed_container_name != container_name:
raise StorageError(
"Container name mismatch (expected {}, found {}) in {}",
container_name,
parsed_container_name,
name,
)
if len(f.path.segments) == 1:
raise StorageError(
"No path found following container name {} in {}",
container_name,
name,
)
return f.path.segments[0], os.path.join(*f.path.segments[1:])
return name
def get_direct_access(self, remote_path, **_):
return None
class _FileStorageDriver(_Driver):
"""
A base StorageDriver to derive from.
"""
scheme = "file"
CHUNK_SIZE = 8096
IGNORE_FOLDERS = ['.lock', '.hash']
Object = namedtuple("Object", ['name', 'size', 'extra', 'driver', 'container', 'hash', 'meta_data'])
class _Container(object):
def __init__(self, name, extra, driver):
self.name = name
self.extra = extra
self.driver = driver
def __init__(self, key, secret=None, secure=True, host=None, port=None,
**kwargs):
# Use the key as the path to the storage
self.base_path = key
def _make_path(self, path, ignore_existing=True):
"""
Create a path by checking if it already exists
"""
try:
os.makedirs(path)
except OSError:
exp = sys.exc_info()[1]
if exp.errno == errno.EEXIST and not ignore_existing:
raise exp
def _check_container_name(self, container_name):
"""
Check if the container name is valid
:param container_name: Container name
:type container_name: ``str``
"""
if '/' in container_name or '\\' in container_name:
raise ValueError("Container name \"{}\" cannot contain \\ or / ".format(container_name))
def _make_container(self, container_name):
"""
Create a container instance
:param container_name: Container name.
:type container_name: ``str``
:return: A Container instance.
"""
container_name = container_name or '.'
self._check_container_name(container_name)
full_path = os.path.realpath(os.path.join(self.base_path, container_name))
try:
stat = os.stat(full_path)
if not os.path.isdir(full_path):
raise OSError("Target path \"{}\" is not a directory".format(full_path))
except OSError:
raise OSError("Target path \"{}\" is not accessible or does not exist".format(full_path))
extra = {
'creation_time': stat.st_ctime,
'access_time': stat.st_atime,
'modify_time': stat.st_mtime,
}
return self._Container(name=container_name, extra=extra, driver=self)
def _make_object(self, container, object_name):
"""
Create an object instance
:param container: Container.
:type container: :class:`Container`
:param object_name: Object name.
:type object_name: ``str``
:return: A Object instance.
"""
full_path = os.path.realpath(os.path.join(self.base_path, container.name if container else '.', object_name))
if os.path.isdir(full_path):
raise ValueError("Target path \"{}\" already exist".format(full_path))
try:
stat = os.stat(full_path)
except Exception:
raise ValueError("Cannot access target path \"{}\"".format(full_path))
extra = {
'creation_time': stat.st_ctime,
'access_time': stat.st_atime,
'modify_time': stat.st_mtime,
}
return self.Object(name=object_name, size=stat.st_size, extra=extra,
driver=self, container=container, hash=None, meta_data=None)
def iterate_containers(self):
"""
Return a generator of containers.
:return: A generator of Container instances.
"""
for container_name in os.listdir(self.base_path):
full_path = os.path.join(self.base_path, container_name)
if not os.path.isdir(full_path):
continue
yield self._make_container(container_name)
def _get_objects(self, container):
"""
Recursively iterate through the file-system and return the object names
"""
cpath = self.get_container_cdn_url(container, check=True)
for folder, subfolders, files in os.walk(cpath, topdown=True):
# Remove unwanted subfolders
for subf in self.IGNORE_FOLDERS:
if subf in subfolders:
subfolders.remove(subf)
for name in files:
full_path = os.path.join(folder, name)
object_name = os.path.relpath(full_path, start=cpath)
yield self._make_object(container, object_name)
def iterate_container_objects(self, container):
"""
Returns a generator of objects for the given container.
:param container: Container instance
:type container: :class:`Container`
:return: A generator of Object instances.
"""
return self._get_objects(container)
def get_container(self, container_name, **_):
"""
Return a container instance.
:param container_name: Container name.
:type container_name: ``str``
:return: A Container instance.
"""
return self._make_container(container_name)
def get_container_cdn_url(self, container, check=False):
"""
Return a container CDN URL.
:param container: Container instance
:type container: :class:`Container`
:param check: Indicates if the path's existence must be checked
:type check: ``bool``
:return: A CDN URL for this container.
"""
path = os.path.realpath(os.path.join(self.base_path, container.name if container else '.'))
if check and not os.path.isdir(path):
raise ValueError("Target path \"{}\" does not exist".format(path))
return path
def get_object(self, container_name, object_name, **_):
"""
Return an object instance.
:param container_name: Container name.
:type container_name: ``str``
:param object_name: Object name.
:type object_name: ``str``
:return: An Object instance.
"""
container = self._make_container(container_name)
return self._make_object(container, object_name)
def get_object_cdn_url(self, obj):
"""
Return an object CDN URL.
:param obj: Object instance
:type obj: :class:`Object`
:return: A CDN URL for this object.
"""
return os.path.realpath(os.path.join(self.base_path, obj.container.name, obj.name))
def download_object(self, obj, destination_path, overwrite_existing=False, delete_on_failure=True, **_):
"""
Download an object to the specified destination path.
:param obj: Object instance.
:type obj: :class:`Object`
:param destination_path: Full path to a file or a directory where the
incoming file will be saved.
:type destination_path: ``str``
:param overwrite_existing: True to overwrite an existing file,
defaults to False.
:type overwrite_existing: ``bool``
:param delete_on_failure: True to delete a partially downloaded file if
the download was not successful (hash mismatch / file size).
:type delete_on_failure: ``bool``
:return: True, if an object has been successfully downloaded, False, otherwise.
"""
obj_path = self.get_object_cdn_url(obj)
base_name = os.path.basename(destination_path)
if not base_name and not os.path.exists(destination_path):
raise ValueError('Path \"{}\" does not exist'.format(destination_path))
if not base_name:
file_path = os.path.join(destination_path, obj.name)
else:
file_path = destination_path
if os.path.exists(file_path) and not overwrite_existing:
raise ValueError('File \"{}\" already exists, but overwrite_existing=False'.format(file_path))
try:
shutil.copy(obj_path, file_path)
except IOError:
if delete_on_failure:
# noinspection PyBroadException
try:
os.unlink(file_path)
except Exception:
pass
return False
return True
def download_object_as_stream(self, obj, chunk_size=None, **_):
"""
Return a generator which yields object data.
:param obj: Object instance
:type obj: :class:`Object`
:param chunk_size: Optional chunk size (in bytes).
:type chunk_size: ``int``
:return: A stream of binary chunks of data.
"""
path = self.get_object_cdn_url(obj)
with open(path, 'rb') as obj_file:
for data in self._read_in_chunks(obj_file, chunk_size=chunk_size):
yield data
def upload_object(self, file_path, container, object_name, extra=None, verify_hash=True, **_):
"""
Upload an object currently located on a disk.
:param file_path: Path to the object on disk.
:type file_path: ``str``
:param container: Destination container.
:type container: :class:`Container`
:param object_name: Object name.
:type object_name: ``str``
:param verify_hash: Verify hast
:type verify_hash: ``bool``
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
"""
path = self.get_container_cdn_url(container, check=True)
obj_path = os.path.join(path, object_name)
base_path = os.path.dirname(obj_path)
self._make_path(base_path)
shutil.copy(file_path, obj_path)
os.chmod(obj_path, int('664', 8))
return self._make_object(container, object_name)
def upload_object_via_stream(self, iterator, container, object_name, extra=None, **kwargs):
"""
Upload an object using an iterator.
If a provider supports it, chunked transfer encoding is used and you
don't need to know in advance the amount of data to be uploaded.
Otherwise if a provider doesn't support it, iterator will be exhausted
so a total size for data to be uploaded can be determined.
Note: Exhausting the iterator means that the whole data must be
buffered in memory which might result in memory exhausting when
uploading a very large object.
If a file is located on a disk you are advised to use upload_object
function which uses fs.stat function to determine the file size and it
doesn't need to buffer whole object in the memory.
:type iterator: ``object``
:param iterator: An object which implements the iterator
interface and yields binary chunks of data.
:type container: :class:`Container`
:param container: Destination container.
:type object_name: ``str``
:param object_name: Object name.
:type extra: ``dict``
:param extra: (optional) Extra attributes (driver specific). Note:
This dictionary must contain a 'content_type' key which represents
a content type of the stored object.
"""
path = self.get_container_cdn_url(container, check=True)
obj_path = os.path.join(path, object_name)
base_path = os.path.dirname(obj_path)
self._make_path(base_path)
obj_path = os.path.realpath(obj_path)
with open(obj_path, 'wb' if not isinstance(iterator, StringIO) else 'wt') as obj_file:
obj_file.write(iterator.read() if hasattr(iterator, 'read') else bytes(iterator))
os.chmod(obj_path, int('664', 8))
return self._make_object(container, object_name)
def delete_object(self, obj, **_):
"""
Delete an object.
:type obj: :class:`Object`
:param obj: Object instance.
:return: True on success.
"""
path = self.get_object_cdn_url(obj)
try:
os.unlink(path)
except Exception:
return False
# # Check and delete all the empty parent folders
# path = os.path.dirname(path)
# container_url = obj.container.get_cdn_url()
#
# # Delete the empty parent folders till the container's level
# while path != container_url:
# try:
# os.rmdir(path)
# except OSError:
# exp = sys.exc_info()[1]
# if exp.errno == errno.ENOTEMPTY:
# break
# raise exp
#
# path = os.path.dirname(path)
return True
def create_container(self, container_name):
"""
Create a new container.
:type container_name: ``str``
:param container_name: Container name.
:return: A Container instance on success.
"""
container_name = container_name or '.'
self._check_container_name(container_name)
path = os.path.join(self.base_path, container_name)
try:
self._make_path(path, ignore_existing=False)
except OSError:
exp = sys.exc_info()[1]
if exp.errno == errno.EEXIST:
raise ValueError('Container \"{}\" with this name already exists. The name '
'must be unique among all the containers in the '
'system'.format(container_name))
else:
raise ValueError('Error creating container \"{}\"'.format(container_name))
except Exception:
raise ValueError('Error creating container \"{}\"'.format(container_name))
return self._make_container(container_name)
def delete_container(self, container):
"""
Delete a container.
:type container: :class:`Container`
:param container: Container instance
:return: True on success, False otherwise.
"""
# Check if there are any objects inside this
for obj in self._get_objects(container):
raise ValueError('Container \"{}\" is not empty'.format(container.name))
path = self.get_container_cdn_url(container, check=True)
# noinspection PyBroadException
try:
shutil.rmtree(path)
except Exception:
return False
return True
def list_container_objects(self, container, **kwargs):
return list(self.iterate_container_objects(container))
@staticmethod
def _read_in_chunks(iterator, chunk_size=None, fill_size=False, yield_empty=False):
"""
Return a generator which yields data in chunks.
:param iterator: An object which implements an iterator interface
or a File like object with read method.
:type iterator: :class:`object` which implements iterator interface.
:param chunk_size: Optional chunk size (defaults to CHUNK_SIZE)
:type chunk_size: ``int``
:param fill_size: If True, make sure chunks are exactly chunk_size in
length (except for last chunk).
:type fill_size: ``bool``
:param yield_empty: If true and iterator returned no data, only yield empty
bytes object
:type yield_empty: ``bool``
TODO: At some point in the future we could use byte arrays here if version
>= Python 3. This should speed things up a bit and reduce memory usage.
"""
chunk_size = chunk_size or _FileStorageDriver.CHUNK_SIZE
if six.PY3:
from io import FileIO as file
if isinstance(iterator, (file)):
get_data = iterator.read
args = (chunk_size,)
else:
get_data = next
args = (iterator,)
data = bytes('')
empty = False
while not empty or len(data) > 0:
if not empty:
try:
chunk = bytes(get_data(*args))
if len(chunk) > 0:
data += chunk
else:
empty = True
except StopIteration:
empty = True
if len(data) == 0:
if empty and yield_empty:
yield bytes('')
return
if fill_size:
if empty or len(data) >= chunk_size:
yield data[:chunk_size]
data = data[chunk_size:]
else:
yield data
data = bytes('')
def get_direct_access(self, remote_path, **_):
# this will always make sure we have full path and file:// prefix
full_url = StorageHelper.conform_url(remote_path)
# now get rid of the file:// prefix
path = Path(full_url[7:])
if not path.exists():
raise ValueError("Requested path does not exist: {}".format(path))
return path.as_posix()
def test_upload(self, test_path, config, **kwargs):
return True
driver_schemes = set(
filter(
None,
itertools.chain(
(getattr(cls, "scheme", None) for cls in _Driver.__subclasses__()),
*(getattr(cls, "schemes", []) for cls in _Driver.__subclasses__())
)
)
)
remote_driver_schemes = driver_schemes - {_FileStorageDriver.scheme}
| 37.369776 | 120 | 0.599702 | from __future__ import with_statement
import errno
import getpass
import itertools
import json
import os
import shutil
import sys
import threading
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from concurrent.futures import ThreadPoolExecutor
from copy import copy
from datetime import datetime
from multiprocessing.pool import ThreadPool
from tempfile import mktemp
from time import time
from types import GeneratorType
import requests
import six
from _socket import gethostname
from attr import attrs, attrib, asdict
from furl import furl
from pathlib2 import Path
from requests.exceptions import ConnectionError
from six import binary_type, StringIO
from six.moves.queue import Queue, Empty
from six.moves.urllib.parse import urlparse
from six.moves.urllib.request import url2pathname
from .callbacks import UploadProgressReport, DownloadProgressReport
from .util import quote_url
from ..backend_api.utils import get_http_session_with_retry
from ..backend_config.bucket_config import S3BucketConfigurations, GSBucketConfigurations, AzureContainerConfigurations
from ..config import config, deferred_config
from ..debugging import get_logger
from ..errors import UsageError
class StorageError(Exception):
pass
class DownloadError(Exception):
pass
@six.add_metaclass(ABCMeta)
class _Driver(object):
@classmethod
def get_logger(cls):
return get_logger('storage')
@abstractmethod
def get_container(self, container_name, config=None, **kwargs):
pass
@abstractmethod
def test_upload(self, test_path, config, **kwargs):
pass
@abstractmethod
def upload_object_via_stream(self, iterator, container, object_name, extra, **kwargs):
pass
@abstractmethod
def list_container_objects(self, container, ex_prefix, **kwargs):
pass
@abstractmethod
def get_direct_access(self, remote_path, **kwargs):
pass
@abstractmethod
def download_object(self, obj, local_path, overwrite_existing, delete_on_failure, callback, **kwargs):
pass
@abstractmethod
def download_object_as_stream(self, obj, chunk_size, **kwargs):
pass
@abstractmethod
def delete_object(self, obj, **kwargs):
pass
@abstractmethod
def upload_object(self, file_path, container, object_name, extra, **kwargs):
pass
@abstractmethod
def get_object(self, container_name, object_name, **kwargs):
pass
class StorageHelper(object):
_temp_download_suffix = '.partially'
@classmethod
def _get_logger(cls):
return get_logger('storage')
@attrs
class _PathSubstitutionRule(object):
registered_prefix = attrib(type=str)
local_prefix = attrib(type=str)
replace_windows_sep = attrib(type=bool)
replace_linux_sep = attrib(type=bool)
path_substitution_config = 'storage.path_substitution'
@classmethod
def load_list_from_config(cls):
rules_list = []
for index, sub_config in enumerate(config.get(cls.path_substitution_config, list())):
rule = cls(
registered_prefix=sub_config.get('registered_prefix', None),
local_prefix=sub_config.get('local_prefix', None),
replace_windows_sep=sub_config.get('replace_windows_sep', False),
replace_linux_sep=sub_config.get('replace_linux_sep', False),
)
if any(prefix is None for prefix in (rule.registered_prefix, rule.local_prefix)):
StorageHelper._get_logger().warning(
"Illegal substitution rule configuration '{}[{}]': {}".format(
cls.path_substitution_config,
index,
asdict(rule),
))
continue
if all((rule.replace_windows_sep, rule.replace_linux_sep)):
StorageHelper._get_logger().warning(
"Only one of replace_windows_sep and replace_linux_sep flags may be set."
"'{}[{}]': {}".format(
cls.path_substitution_config,
index,
asdict(rule),
))
continue
rules_list.append(rule)
return rules_list
class _UploadData(object):
@property
def src_path(self):
return self._src_path
@property
def dest_path(self):
return self._dest_path
@property
def extra(self):
return self._extra
@property
def callback(self):
return self._callback
@property
def retries(self):
return self._retries
def __init__(self, src_path, dest_path, extra, callback, retries):
self._src_path = src_path
self._dest_path = dest_path
self._extra = extra
self._callback = callback
self._retries = retries
def __str__(self):
return "src=%s" % self.src_path
_helpers = {}
_terminate = threading.Event()
_async_upload_threads = set()
_upload_pool = None
_s3_configurations = deferred_config('aws.s3', {}, transform=S3BucketConfigurations.from_config)
_gs_configurations = deferred_config('google.storage', {}, transform=GSBucketConfigurations.from_config)
_azure_configurations = deferred_config('azure.storage', {}, transform=AzureContainerConfigurations.from_config)
_path_substitutions = deferred_config(transform=_PathSubstitutionRule.load_list_from_config)
@property
def log(self):
return self._log
@property
def scheme(self):
return self._scheme
@property
def secure(self):
return self._secure
@property
def base_url(self):
return self._base_url
@classmethod
def get(cls, url, logger=None, **kwargs):
# Handle URL substitution etc before locating the correct storage driver
url = cls._canonize_url(url)
# Get the credentials we should use for this url
base_url = cls._resolve_base_url(url)
instance_key = '%s_%s' % (base_url, threading.current_thread().ident or 0)
force_create = kwargs.pop('__force_create', False)
if (instance_key in cls._helpers) and (not force_create):
return cls._helpers[instance_key]
# Don't canonize URL since we already did it
try:
instance = cls(base_url=base_url, url=url, logger=logger, canonize_url=False, **kwargs)
except (StorageError, UsageError) as ex:
cls._get_logger().error(str(ex))
return None
except Exception as ex:
cls._get_logger().error("Failed creating storage object {} Reason: {}".format(
base_url or url, ex))
return None
cls._helpers[instance_key] = instance
return instance
@classmethod
def get_local_copy(cls, remote_url):
helper = cls.get(remote_url)
if not helper:
return None
file_name = '.' + remote_url.split('/')[-1].split(os.path.sep)[-1]
local_path = mktemp(suffix=file_name)
return helper.download_to_file(remote_url, local_path)
def __init__(self, base_url, url, key=None, secret=None, region=None, verbose=False, logger=None, retries=5,
**kwargs):
level = config.get('storage.log.level', None)
if level:
try:
self._get_logger().setLevel(level)
except (TypeError, ValueError):
self._get_logger().error('invalid storage log level in configuration: %s' % level)
self._log = logger or self._get_logger()
self._verbose = verbose
self._retries = retries
self._extra = {}
self._base_url = base_url
self._secure = True
self._driver = None
self._container = None
self._conf = None
if kwargs.get('canonize_url', True):
url = self._canonize_url(url)
parsed = urlparse(url)
self._scheme = parsed.scheme
if self._scheme == _AzureBlobServiceStorageDriver.scheme:
self._conf = copy(self._azure_configurations.get_config_by_uri(url))
if self._conf is None:
raise StorageError("Missing Azure Blob Storage configuration for {}".format(url))
if not self._conf.account_name or not self._conf.account_key:
raise StorageError(
"Missing account name or key for Azure Blob Storage access for {}".format(base_url)
)
self._driver = _AzureBlobServiceStorageDriver()
self._container = self._driver.get_container(config=self._conf)
elif self._scheme == _Boto3Driver.scheme:
self._conf = copy(self._s3_configurations.get_config_by_uri(url))
self._secure = self._conf.secure
final_region = region if region else self._conf.region
if not final_region:
final_region = None
self._conf.update(
key=key or self._conf.key,
secret=secret or self._conf.secret,
multipart=self._conf.multipart,
region=final_region,
use_credentials_chain=self._conf.use_credentials_chain
)
if not self._conf.use_credentials_chain:
if not self._conf.key or not self._conf.secret:
raise ValueError(
"Missing key and secret for S3 storage access (%s)" % base_url
)
self._driver = _Boto3Driver()
self._container = self._driver.get_container(container_name=self._base_url, retries=retries,
config=self._conf)
elif self._scheme == _GoogleCloudStorageDriver.scheme:
self._conf = copy(self._gs_configurations.get_config_by_uri(url))
self._driver = _GoogleCloudStorageDriver()
self._container = self._driver.get_container(
container_name=self._base_url,
config=self._conf
)
elif self._scheme in _HttpDriver.schemes:
self._driver = _HttpDriver(retries=retries)
self._container = self._driver.get_container(container_name=self._base_url)
else:
if self._scheme == 'file':
url = parsed.path
url = url.replace("\\", "/")
driver_uri = url2pathname(url)
path_driver_uri = Path(driver_uri)
h_driver_uri.root))
self._container = None
@classmethod
def terminate_uploads(cls, force=True, timeout=2.0):
if force:
return
cls._terminate.set()
remaining_timeout = timeout
for thread in cls._async_upload_threads:
t = time()
try:
thread.join(timeout=remaining_timeout)
except Exception:
pass
remaining_timeout -= (time() - t)
@classmethod
def get_configuration(cls, bucket_config):
return cls._s3_configurations.get_config_by_bucket(bucket_config.bucket, bucket_config.host)
@classmethod
def add_configuration(cls, bucket_config, log=None, _test_config=True):
use_existing = not bucket_config.is_valid()
existing = cls.get_configuration(bucket_config)
configs = cls._s3_configurations
if not use_existing:
if _test_config:
_Boto3Driver._test_bucket_config(bucket_config, log)
if existing:
if log:
log.warning('Overriding existing configuration for %s/%s'
% (existing.host or 'AWS', existing.bucket))
configs.remove_config(existing)
else:
good_config = False
if existing:
if log:
log.info('Using existing credentials for bucket %s/%s'
% (bucket_config.host or 'AWS', bucket_config.bucket))
good_config = _Boto3Driver._test_bucket_config(existing, log, raise_on_error=False)
if not good_config:
configs.update_config_with_defaults(bucket_config)
if log:
log.info('Using global credentials for bucket %s/%s'
% (bucket_config.host or 'AWS', bucket_config.bucket))
if _test_config:
_Boto3Driver._test_bucket_config(bucket_config, log)
else:
return
configs.add_config(bucket_config)
@classmethod
def add_path_substitution(
cls,
registered_prefix,
local_prefix,
replace_windows_sep=False,
replace_linux_sep=False,
):
if not registered_prefix or not local_prefix:
raise UsageError("Path substitution prefixes must be non empty strings")
if replace_windows_sep and replace_linux_sep:
raise UsageError("Only one of replace_windows_sep and replace_linux_sep may be set.")
rule = cls._PathSubstitutionRule(
registered_prefix=registered_prefix,
local_prefix=local_prefix,
replace_windows_sep=replace_windows_sep,
replace_linux_sep=replace_linux_sep,
)
cls._path_substitutions.append(rule)
@classmethod
def clear_path_substitutions(cls):
cls._path_substitutions = list()
def verify_upload(self, folder_uri='', raise_on_error=True, log_on_error=True):
folder_uri = self._canonize_url(folder_uri)
folder_uri = self.conform_url(folder_uri, self._base_url)
test_path = self._normalize_object_name(folder_uri)
if self._scheme == _Boto3Driver.scheme:
_Boto3Driver._test_bucket_config(
self._conf,
self._log,
test_path=test_path,
raise_on_error=raise_on_error,
log_on_error=log_on_error,
)
elif self._scheme == _GoogleCloudStorageDriver.scheme:
self._driver.test_upload(test_path, self._conf)
elif self._scheme == 'file':
Path(test_path).mkdir(parents=True, exist_ok=True)
Path(test_path).touch(exist_ok=True)
return folder_uri
def upload_from_stream(self, stream, dest_path, extra=None, retries=1):
dest_path = self._canonize_url(dest_path)
object_name = self._normalize_object_name(dest_path)
extra = extra.copy() if extra else {}
extra.update(self._extra)
last_ex = None
cb = UploadProgressReport.from_stream(stream, object_name, self._verbose, self._log)
for i in range(max(1, retries)):
try:
self._driver.upload_object_via_stream(
iterator=stream,
container=self._container,
object_name=object_name,
callback=cb,
extra=extra)
last_ex = None
break
except Exception as ex:
last_ex = ex
try:
stream.seek(0)
except Exception:
pass
if last_ex:
raise last_ex
if self.scheme in _HttpDriver.schemes:
dest_path = quote_url(dest_path)
return dest_path
def upload(self, src_path, dest_path=None, extra=None, async_enable=False, cb=None, retries=1):
if not dest_path:
dest_path = os.path.basename(src_path)
dest_path = self._canonize_url(dest_path)
if cb and self.scheme in _HttpDriver.schemes:
a_cb = cb
def callback(a_path):
return a_cb(quote_url(a_path) if a_path else a_path)
cb = callback
if async_enable:
data = self._UploadData(src_path=src_path, dest_path=dest_path, extra=extra, callback=cb, retries=retries)
StorageHelper._initialize_upload_pool()
return StorageHelper._upload_pool.apply_async(self._do_async_upload, args=(data,))
else:
res = self._do_upload(src_path, dest_path, extra, cb, verbose=False, retries=retries)
if res:
res = quote_url(res)
return res
def list(self, prefix=None):
if prefix:
if prefix.startswith(self._base_url):
prefix = prefix[len(self.base_url):].lstrip("/")
try:
res = self._driver.list_container_objects(self._container, ex_prefix=prefix)
except TypeError:
res = self._driver.list_container_objects(self._container)
return [
obj.name
for obj in res if
obj.name.startswith(prefix) and obj.name != prefix
]
else:
return [obj.name for obj in self._driver.list_container_objects(self._container)]
def download_to_file(self, remote_path, local_path, overwrite_existing=False, delete_on_failure=True, verbose=None):
def next_chunk(astream):
if isinstance(astream, binary_type):
chunk = astream
astream = None
elif astream:
try:
chunk = next(astream)
except StopIteration:
chunk = None
else:
chunk = None
return chunk, astream
remote_path = self._canonize_url(remote_path)
verbose = self._verbose if verbose is None else verbose
direct_access_path = self._driver.get_direct_access(remote_path)
if direct_access_path:
return direct_access_path
temp_local_path = None
try:
if verbose:
self._log.info('Start downloading from %s' % remote_path)
if not overwrite_existing and Path(local_path).is_file():
self._log.warning(
'File {} already exists, no need to download, thread id = {}'.format(
local_path,
threading.current_thread().ident,
),
)
return local_path
temp_local_path = '{}_{}{}'.format(local_path, time(), self._temp_download_suffix)
obj = self._get_object(remote_path)
if not obj:
return None
# object size in bytes
total_size_mb = -1
dl_total_mb = 0.
download_reported = False
# chunks size is ignored and always 5Mb
chunk_size_mb = 5
# make sure we have the destination folder
# noinspection PyBroadException
Path(temp_local_path).parent.mkdir(parents=True, exist_ok=True)
# try to get file size
try:
if isinstance(self._driver, _HttpDriver) and obj:
obj = self._driver._get_download_object(obj)
total_size_mb = float(obj.headers.get('Content-Length', 0)) / (1024 * 1024)
elif hasattr(obj, 'size'):
size = obj.size
# Google storage has the option to reload the object to get the size
if size is None and hasattr(obj, 'reload'):
obj.reload()
size = obj.size
total_size_mb = 0 if size is None else float(size) / (1024 * 1024)
elif hasattr(obj, 'content_length'):
total_size_mb = float(obj.content_length) / (1024 * 1024)
except (ValueError, AttributeError, KeyError):
pass
# if driver supports download with callback, use it (it might be faster)
if hasattr(self._driver, 'download_object'):
# callback
cb = DownloadProgressReport(total_size_mb, verbose, remote_path, self._log)
self._driver.download_object(obj, temp_local_path, callback=cb)
download_reported = bool(cb.last_reported)
dl_total_mb = cb.current_status_mb
else:
stream = self._driver.download_object_as_stream(obj, chunk_size_mb * 1024 * 1024)
if stream is None:
raise ValueError('Could not download %s' % remote_path)
with open(temp_local_path, 'wb') as fd:
data, stream = next_chunk(stream)
while data:
fd.write(data)
data, stream = next_chunk(stream)
if Path(temp_local_path).stat().st_size <= 0:
raise Exception('downloaded a 0-sized file')
# if we are on windows, we need to remove the target file before renaming
# otherwise posix rename will overwrite the target
if os.name != 'posix':
try:
os.remove(local_path)
except Exception:
pass
# rename temp file to local_file
# noinspection PyBroadException
try:
os.rename(temp_local_path, local_path)
except Exception:
# noinspection PyBroadException
try:
os.unlink(temp_local_path)
except Exception:
pass
# file was downloaded by a parallel process, check we have the final output and delete the partial copy
path_local_path = Path(local_path)
if not path_local_path.is_file() or path_local_path.stat().st_size <= 0:
raise Exception('Failed renaming partial file, downloaded file exists and a 0-sized file')
# report download if we are on the second chunk
if verbose or download_reported:
self._log.info(
'Downloaded %.2f MB successfully from %s , saved to %s' % (dl_total_mb, remote_path, local_path))
return local_path
except DownloadError:
raise
except Exception as e:
self._log.error("Could not download {} , err: {} ".format(remote_path, e))
if delete_on_failure:
# noinspection PyBroadException
try:
if temp_local_path:
os.remove(temp_local_path)
except Exception:
pass
return None
def download_as_stream(self, remote_path, chunk_size=None):
remote_path = self._canonize_url(remote_path)
try:
obj = self._get_object(remote_path)
return self._driver.download_object_as_stream(
obj, chunk_size=chunk_size, verbose=self._verbose, log=self.log
)
except DownloadError:
raise
except Exception as e:
self._log.error("Could not download file : %s, err:%s " % (remote_path, str(e)))
return None
def download_as_nparray(self, remote_path, chunk_size=None):
try:
stream = self.download_as_stream(remote_path, chunk_size)
if stream is None:
return
# TODO: ugly py3 hack, please remove ASAP
if six.PY3 and not isinstance(stream, GeneratorType):
import numpy as np
return np.frombuffer(stream, dtype=np.uint8)
else:
import numpy as np
return np.asarray(bytearray(b''.join(stream)), dtype=np.uint8)
except Exception as e:
self._log.error("Could not download file : %s, err:%s " % (remote_path, str(e)))
def delete(self, path):
return self._driver.delete_object(self._get_object(path))
def check_write_permissions(self, dest_path=None):
# create a temporary file, then delete it
base_url = dest_path or self._base_url
dest_path = base_url + '/.clearml.test'
# do not check http/s connection permissions
if dest_path.startswith('http'):
return True
try:
self.upload_from_stream(stream=six.BytesIO(b'clearml'), dest_path=dest_path)
self.delete(path=dest_path)
except Exception:
raise ValueError('Insufficient permissions for {}'.format(base_url))
return True
@classmethod
def download_from_url(cls, remote_url, local_path, overwrite_existing=False):
helper = cls.get(remote_url)
if not helper:
return None
return helper.download_to_file(remote_url, local_path, overwrite_existing=overwrite_existing)
@classmethod
def _canonize_url(cls, url):
return cls._apply_url_substitutions(url)
@classmethod
def _apply_url_substitutions(cls, url):
def replace_separator(_url, where, sep):
return _url[:where] + _url[where:].replace(sep, os.sep)
for index, rule in enumerate(cls._path_substitutions):
if url.startswith(rule.registered_prefix):
url = url.replace(
rule.registered_prefix,
rule.local_prefix,
1, # count. str.replace() does not support keyword arguments
)
if rule.replace_windows_sep:
url = replace_separator(url, len(rule.local_prefix), '\\')
if rule.replace_linux_sep:
url = replace_separator(url, len(rule.local_prefix), '/')
break
return url
@classmethod
def _resolve_base_url(cls, base_url):
parsed = urlparse(base_url)
if parsed.scheme == _Boto3Driver.scheme:
conf = cls._s3_configurations.get_config_by_uri(base_url)
bucket = conf.bucket
if not bucket:
parts = Path(parsed.path.strip('/')).parts
if parts:
bucket = parts[0]
return '/'.join(x for x in ('s3:/', conf.host, bucket) if x)
elif parsed.scheme == _AzureBlobServiceStorageDriver.scheme:
conf = cls._azure_configurations.get_config_by_uri(base_url)
if not conf:
raise StorageError("Can't find azure configuration for {}".format(base_url))
return str(furl(base_url).set(path=conf.container_name))
elif parsed.scheme == _GoogleCloudStorageDriver.scheme:
conf = cls._gs_configurations.get_config_by_uri(base_url)
return str(furl(scheme=parsed.scheme, netloc=conf.bucket))
elif parsed.scheme == 'http':
return 'http://'
elif parsed.scheme == 'https':
return 'https://'
else:
return 'file://'
@classmethod
def conform_url(cls, folder_uri, base_url=None):
if not folder_uri:
return folder_uri
_base_url = cls._resolve_base_url(folder_uri) if not base_url else base_url
if not folder_uri.startswith(_base_url):
prev_folder_uri = folder_uri
if _base_url == 'file://':
folder_uri = str(Path(folder_uri).absolute())
if folder_uri.startswith('/'):
folder_uri = _base_url + folder_uri
else:
folder_uri = '/'.join((_base_url, folder_uri))
cls._get_logger().debug('Upload destination {} amended to {} for registration purposes'.format(
prev_folder_uri, folder_uri))
else:
raise ValueError('folder_uri: {} does not start with base url: {}'.format(folder_uri, _base_url))
return folder_uri
def _absolute_object_name(self, path):
if not path.startswith(self.base_url):
return self.base_url.rstrip('/') + '///' + path.lstrip('/')
return path
def _normalize_object_name(self, path):
if path.startswith(self.base_url):
path = path[len(self.base_url):]
if path.startswith('/') and os.name == 'nt':
path = path[1:]
if self.scheme in (_Boto3Driver.scheme, _GoogleCloudStorageDriver.scheme,
_AzureBlobServiceStorageDriver.scheme):
path = path.lstrip('/')
return path
def _do_async_upload(self, data):
assert isinstance(data, self._UploadData)
return self._do_upload(data.src_path, data.dest_path, extra=data.extra, cb=data.callback,
verbose=True, retries=data.retries)
def _upload_from_file(self, local_path, dest_path, extra=None):
if not hasattr(self._driver, 'upload_object'):
with open(local_path, 'rb') as stream:
res = self.upload_from_stream(stream=stream, dest_path=dest_path, extra=extra)
else:
object_name = self._normalize_object_name(dest_path)
extra = extra.copy() if extra else {}
extra.update(self._extra)
cb = UploadProgressReport.from_file(local_path, self._verbose, self._log)
res = self._driver.upload_object(
file_path=local_path,
container=self._container,
object_name=object_name,
callback=cb,
extra=extra)
return res
def _do_upload(self, src_path, dest_path, extra=None, cb=None, verbose=False, retries=1):
object_name = self._normalize_object_name(dest_path)
if cb:
try:
cb(None)
except Exception as e:
self._log.error("Calling upload callback when starting upload: %s" % str(e))
if verbose:
msg = 'Starting upload: {} => {}{}'.format(
src_path,
(self._container.name if self._container.name.endswith('/') else self._container.name + '/')
if self._container and self._container.name else '', object_name)
if object_name.startswith('file://') or object_name.startswith('/'):
self._log.debug(msg)
else:
self._log.info(msg)
last_ex = None
for i in range(max(1, retries)):
try:
if not self._upload_from_file(local_path=src_path, dest_path=dest_path, extra=extra):
last_ex = ValueError("Upload failed")
continue
last_ex = None
break
except Exception as e:
last_ex = e
if last_ex:
self._log.error("Exception encountered while uploading %s" % str(last_ex))
if cb:
try:
cb(False)
except Exception as e:
self._log.warning("Exception on upload callback: %s" % str(e))
raise last_ex
if verbose:
self._log.debug("Finished upload: %s => %s" % (src_path, object_name))
if cb:
try:
cb(dest_path)
except Exception as e:
self._log.warning("Exception on upload callback: %s" % str(e))
return dest_path
def _get_object(self, path):
object_name = self._normalize_object_name(path)
try:
return self._driver.get_object(
container_name=self._container.name if self._container else '', object_name=object_name)
except ConnectionError:
raise DownloadError
except Exception as e:
self.log.warning('Storage helper problem for {}: {}'.format(str(object_name), str(e)))
return None
@staticmethod
def _initialize_upload_pool():
if not StorageHelper._upload_pool:
StorageHelper._upload_pool = ThreadPool(processes=1)
@staticmethod
def close_async_threads():
if StorageHelper._upload_pool:
pool = StorageHelper._upload_pool
StorageHelper._upload_pool = None
try:
pool.terminate()
pool.join()
except Exception:
pass
class _HttpDriver(_Driver):
timeout = (5.0, 30.)
min_kbps_speed = 50
schemes = ('http', 'https')
class _Container(object):
_default_backend_session = None
_default_files_server_host = None
def __init__(self, name, retries=5, **kwargs):
self.name = name
self.session = get_http_session_with_retry(total=retries, connect=retries, read=retries, redirect=retries)
def get_headers(self, url):
if not self._default_backend_session:
from ..backend_interface.base import InterfaceBase
self._default_backend_session = InterfaceBase._get_default_session()
if self._default_files_server_host is None:
self._default_files_server_host = self._default_backend_session.get_files_server_host().rstrip('/')
if url == self._default_files_server_host or url.startswith(self._default_files_server_host + '/'):
return self._default_backend_session.add_auth_headers({})
return None
class _HttpSessionHandle(object):
def __init__(self, url, is_stream, container_name, object_name):
self.url, self.is_stream, self.container_name, self.object_name = \
url, is_stream, container_name, object_name
def __init__(self, retries=5):
self._retries = retries
self._containers = {}
def get_container(self, container_name, config=None, **kwargs):
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, retries=self._retries, **kwargs)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, extra=None, callback=None, **kwargs):
url = object_name[:object_name.index('/')]
url_path = object_name[len(url) + 1:]
full_url = container.name + url
timeout = self.timeout[-1]
stream_size = 0
if hasattr(iterator, 'tell') and hasattr(iterator, 'seek'):
pos = iterator.tell()
iterator.seek(0, 2)
stream_size = iterator.tell() - pos
iterator.seek(pos, 0)
timeout = max(timeout, (stream_size / 1024) / float(self.min_kbps_speed))
res = container.session.post(full_url, files={url_path: iterator}, timeout=timeout,
headers=container.get_headers(full_url))
if res.status_code != requests.codes.ok:
raise ValueError('Failed uploading object %s (%d): %s' % (object_name, res.status_code, res.text))
return res
def list_container_objects(self, *args, **kwargs):
raise NotImplementedError('List is not implemented for http protocol')
def delete_object(self, obj, *args, **kwargs):
assert isinstance(obj, self._HttpSessionHandle)
container = self._containers[obj.container_name]
res = container.session.delete(obj.url, headers=container.get_headers(obj.url))
if res.status_code != requests.codes.ok:
self._get_logger().warning('Failed deleting object %s (%d): %s' % (
obj.object_name, res.status_code, res.text))
return False
return True
def get_object(self, container_name, object_name, *args, **kwargs):
is_stream = kwargs.get('stream', True)
url = ''.join((container_name, object_name.lstrip('/')))
return self._HttpSessionHandle(url, is_stream, container_name, object_name)
def _get_download_object(self, obj):
if not isinstance(obj, self._HttpSessionHandle):
return obj
container = self._containers[obj.container_name]
container.session.stream = obj.is_stream
res = container.session.get(obj.url, timeout=self.timeout, headers=container.get_headers(obj.url))
if res.status_code != requests.codes.ok:
raise ValueError('Failed getting object %s (%d): %s' % (obj.object_name, res.status_code, res.text))
return res
def download_object_as_stream(self, obj, chunk_size=64 * 1024, **_):
obj = self._get_download_object(obj)
return obj.iter_content(chunk_size=chunk_size)
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
obj = self._get_download_object(obj)
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
length = 0
with p.open(mode='wb') as f:
for chunk in obj.iter_content(chunk_size=5 * 1024 * 1024):
if not chunk:
continue
chunk_size = len(chunk)
f.write(chunk)
length += chunk_size
if callback:
callback(chunk_size)
return length
def get_direct_access(self, remote_path, **_):
return None
def test_upload(self, test_path, config, **kwargs):
return True
def upload_object(self, file_path, container, object_name, extra, callback=None, **kwargs):
with open(file_path, 'rb') as stream:
return self.upload_object_via_stream(iterator=stream, container=container,
object_name=object_name, extra=extra, callback=callback, **kwargs)
class _Stream(object):
encoding = None
mode = 'rw'
name = ''
newlines = '\n'
softspace = False
def __init__(self, input_iterator=None):
self.closed = False
self._buffer = Queue()
self._input_iterator = input_iterator
self._leftover = None
def __iter__(self):
return self
def __next__(self):
return self.next()
def close(self):
self.closed = True
def flush(self):
pass
def fileno(self):
return 87
def isatty(self):
return False
def next(self):
while not self.closed or not self._buffer.empty():
if self._input_iterator:
try:
chunck = next(self._input_iterator)
return chunck
except StopIteration:
self.closed = True
raise StopIteration()
except Exception as ex:
_Driver.get_logger().error('Failed downloading: %s' % ex)
else:
try:
return self._buffer.get(block=True, timeout=1.)
except Empty:
pass
raise StopIteration()
def read(self, size=None):
try:
data = self.next() if self._leftover is None else self._leftover
except StopIteration:
return six.b('')
self._leftover = None
try:
while size is None or not data or len(data) < size:
chunk = self.next()
if chunk is not None:
if data is not None:
data += chunk
else:
data = chunk
except StopIteration:
pass
if size is not None and data and len(data) > size:
self._leftover = data[size:]
return data[:size]
return data
def readline(self, size=None):
return self.read(size)
def readlines(self, sizehint=None):
pass
def truncate(self, size=None):
pass
def write(self, bytes):
self._buffer.put(bytes, block=True)
def writelines(self, sequence):
for s in sequence:
self.write(s)
class _Boto3Driver(_Driver):
_min_pool_connections = 512
_max_multipart_concurrency = deferred_config('aws.boto3.max_multipart_concurrency', 16)
_pool_connections = deferred_config('aws.boto3.pool_connections', 512)
_stream_download_pool_connections = 128
_stream_download_pool = None
_containers = {}
scheme = 's3'
scheme_prefix = str(furl(scheme=scheme, netloc=''))
_bucket_location_failure_reported = set()
class _Container(object):
_creation_lock = threading.Lock()
def __init__(self, name, cfg):
try:
import boto3
import botocore.client
from botocore.exceptions import ClientError
except ImportError:
raise UsageError(
'AWS S3 storage driver (boto3) not found. '
'Please install driver using: pip install \"boto3>=1.9\"'
)
self.name = name[5:]
endpoint = (('https://' if cfg.secure else 'http://') + cfg.host) if cfg.host else None
with self._creation_lock:
boto_kwargs = {
"endpoint_url": endpoint,
"use_ssl": cfg.secure,
"verify": cfg.verify,
"config": botocore.client.Config(
max_pool_connections=max(
_Boto3Driver._min_pool_connections,
_Boto3Driver._pool_connections)
)
}
if not cfg.use_credentials_chain:
boto_kwargs["aws_access_key_id"] = cfg.key
boto_kwargs["aws_secret_access_key"] = cfg.secret
self.resource = boto3.resource(
's3',
**boto_kwargs
)
self.config = cfg
bucket_name = self.name[len(cfg.host) + 1:] if cfg.host else self.name
self.bucket = self.resource.Bucket(bucket_name)
@attrs
class ListResult(object):
name = attrib(default=None)
def __init__(self):
pass
def _get_stream_download_pool(self):
if self._stream_download_pool is None:
self._stream_download_pool = ThreadPoolExecutor(max_workers=self._stream_download_pool_connections)
return self._stream_download_pool
def get_container(self, container_name, config=None, **kwargs):
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, cfg=config)
self._containers[container_name].config.retries = kwargs.get('retries', 5)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, callback=None, extra=None, **kwargs):
import boto3.s3.transfer
stream = _Stream(iterator)
try:
container.bucket.upload_fileobj(stream, object_name, Config=boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries),
Callback=callback,
)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def upload_object(self, file_path, container, object_name, callback=None, extra=None, **kwargs):
import boto3.s3.transfer
try:
container.bucket.upload_file(file_path, object_name, Config=boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries),
Callback=callback)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def list_container_objects(self, container, ex_prefix=None, **kwargs):
if ex_prefix:
res = container.bucket.objects.filter(Prefix=ex_prefix)
else:
res = container.bucket.objects.all()
for res in res:
yield self.ListResult(name=res.key)
def delete_object(self, object, **kwargs):
from botocore.exceptions import ClientError
object.delete()
try:
# Try loading the file to verify deletion
object.load()
return False
except ClientError as e:
return int(e.response['Error']['Code']) == 404
def get_object(self, container_name, object_name, *args, **kwargs):
full_container_name = 's3://' + container_name
container = self._containers[full_container_name]
obj = container.resource.Object(container.bucket.name, object_name)
obj.container_name = full_container_name
return obj
def download_object_as_stream(self, obj, chunk_size=64 * 1024, verbose=None, log=None, **_):
def async_download(a_obj, a_stream, cb, cfg):
try:
a_obj.download_fileobj(a_stream, Callback=cb, Config=cfg)
except Exception as ex:
(log or self.get_logger()).error('Failed downloading: %s' % ex)
a_stream.close()
import boto3.s3.transfer
# return iterable object
stream = _Stream()
container = self._containers[obj.container_name]
config = boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries)
total_size_mb = obj.content_length / (1024. * 1024.)
remote_path = os.path.join(obj.container_name, obj.key)
cb = DownloadProgressReport(total_size_mb, verbose, remote_path, log)
self._get_stream_download_pool().submit(async_download, obj, stream, cb, config)
return stream
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
import boto3.s3.transfer
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
container = self._containers[obj.container_name]
obj.download_file(str(p),
Callback=callback,
Config=boto3.s3.transfer.TransferConfig(
use_threads=container.config.multipart,
max_concurrency=self._max_multipart_concurrency if container.config.multipart else 1,
num_download_attempts=container.config.retries))
@classmethod
def _test_bucket_config(cls, conf, log, test_path='', raise_on_error=True, log_on_error=True):
try:
import boto3
from botocore.exceptions import ClientError
except ImportError:
return False
if not conf.bucket:
return False
try:
if not conf.is_valid():
raise Exception('Missing credentials')
fullname = furl(conf.bucket).add(path=test_path).add(path='%s-upload_test' % cls.__module__)
bucket_name = str(fullname.path.segments[0])
filename = str(furl(path=fullname.path.segments[1:]))
data = {
'user': getpass.getuser(),
'machine': gethostname(),
'time': datetime.utcnow().isoformat()
}
boto_session = boto3.Session(conf.key, conf.secret)
boto_resource = boto_session.resource('s3', conf.region)
bucket = boto_resource.Bucket(bucket_name)
bucket.put_object(Key=filename, Body=six.b(json.dumps(data)))
region = cls._get_bucket_region(conf=conf, log=log, report_info=True)
if region and ((conf.region and region != conf.region) or (not conf.region and region != 'us-east-1')):
msg = "incorrect region specified for bucket %s (detected region %s)" % (conf.bucket, region)
else:
return True
except ClientError as ex:
msg = ex.response['Error']['Message']
if log_on_error and log:
log.error(msg)
if raise_on_error:
raise
except Exception as ex:
msg = str(ex)
if log_on_error and log:
log.error(msg)
if raise_on_error:
raise
msg = ("Failed testing access to bucket %s: " % conf.bucket) + msg
if log_on_error and log:
log.error(msg)
if raise_on_error:
raise StorageError(msg)
return False
@classmethod
def _get_bucket_region(cls, conf, log=None, report_info=False):
import boto3
from botocore.exceptions import ClientError
if not conf.bucket:
return None
def report(msg):
if log and conf.get_bucket_host() not in cls._bucket_location_failure_reported:
if report_info:
log.debug(msg)
else:
log.warning(msg)
cls._bucket_location_failure_reported.add(conf.get_bucket_host())
try:
boto_session = boto3.Session(conf.key, conf.secret)
boto_resource = boto_session.resource('s3')
return boto_resource.meta.client.get_bucket_location(Bucket=conf.bucket)["LocationConstraint"]
except ClientError as ex:
report("Failed getting bucket location (region) for bucket "
"%s: %s (%s, access_key=%s). Default region will be used. "
"This is normal if you do not have GET_BUCKET_LOCATION permission"
% (conf.bucket, ex.response['Error']['Message'], ex.response['Error']['Code'], conf.key))
except Exception as ex:
report("Failed getting bucket location (region) for bucket %s: %s. Default region will be used."
% (conf.bucket, str(ex)))
return None
def get_direct_access(self, remote_path, **_):
return None
def test_upload(self, test_path, config, **_):
return True
class _GoogleCloudStorageDriver(_Driver):
_stream_download_pool_connections = 128
_stream_download_pool = None
_containers = {}
scheme = 'gs'
scheme_prefix = str(furl(scheme=scheme, netloc=''))
class _Container(object):
def __init__(self, name, cfg):
try:
from google.cloud import storage
from google.oauth2 import service_account
except ImportError:
raise UsageError(
'Google cloud driver not found. '
'Please install driver using: pip install \"google-cloud-storage>=1.13.2\"'
)
self.name = name[len(_GoogleCloudStorageDriver.scheme_prefix):]
if cfg.credentials_json:
credentials = service_account.Credentials.from_service_account_file(cfg.credentials_json)
else:
credentials = None
self.client = storage.Client(project=cfg.project, credentials=credentials)
for adapter in self.client._http.adapters.values():
if cfg.pool_connections:
adapter._pool_connections = cfg.pool_connections
if cfg.pool_maxsize:
adapter._pool_maxsize = cfg.pool_maxsize
self.config = cfg
self.bucket = self.client.bucket(self.name)
def _get_stream_download_pool(self):
if self._stream_download_pool is None:
self._stream_download_pool = ThreadPoolExecutor(max_workers=self._stream_download_pool_connections)
return self._stream_download_pool
def get_container(self, container_name, config=None, **kwargs):
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, cfg=config)
self._containers[container_name].config.retries = kwargs.get('retries', 5)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, extra=None, **kwargs):
try:
blob = container.bucket.blob(object_name)
blob.upload_from_file(iterator)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def upload_object(self, file_path, container, object_name, extra=None, **kwargs):
try:
blob = container.bucket.blob(object_name)
blob.upload_from_filename(file_path)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
return True
def list_container_objects(self, container, **kwargs):
return list(container.bucket.list_blobs())
def delete_object(self, object, **kwargs):
try:
object.delete()
except Exception as ex:
try:
from google.cloud.exceptions import NotFound
if isinstance(ex, NotFound):
return False
except ImportError:
pass
name = getattr(object, "name", "")
self.get_logger().warning("Failed deleting object {}: {}".format(name, ex))
return False
return not object.exists()
def get_object(self, container_name, object_name, *args, **kwargs):
full_container_name = str(furl(scheme=self.scheme, netloc=container_name))
container = self._containers[full_container_name]
obj = container.bucket.blob(object_name)
obj.container_name = full_container_name
return obj
def download_object_as_stream(self, obj, chunk_size=256 * 1024, **_):
raise NotImplementedError('Unsupported for google storage')
def async_download(a_obj, a_stream):
try:
a_obj.download_to_file(a_stream)
except Exception as ex:
self.get_logger().error('Failed downloading: %s' % ex)
a_stream.close()
# return iterable object
stream = _Stream()
obj.chunk_size = chunk_size
self._get_stream_download_pool().submit(async_download, obj, stream)
return stream
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
obj.download_to_filename(str(p))
def test_upload(self, test_path, config, **_):
bucket_url = str(furl(scheme=self.scheme, netloc=config.bucket, path=config.subdir))
bucket = self.get_container(container_name=bucket_url, config=config).bucket
test_obj = bucket
if test_path:
if not test_path.endswith('/'):
test_path += '/'
blob = bucket.blob(test_path)
if blob.exists():
test_obj = blob
permissions_to_test = ('storage.objects.get', 'storage.objects.update')
return set(test_obj.test_iam_permissions(permissions_to_test)) == set(permissions_to_test)
def get_direct_access(self, remote_path, **_):
return None
class _AzureBlobServiceStorageDriver(_Driver):
scheme = 'azure'
_containers = {}
class _Container(object):
def __init__(self, name, config):
try:
from azure.common import AzureHttpError # noqa: F401
from azure.storage.blob import BlockBlobService
except ImportError:
raise UsageError(
'Azure blob storage driver not found. '
'Please install driver using: pip install \"azure.storage.blob<=2.1.0\"'
)
self.name = name
self.config = config
self.blob_service = BlockBlobService(
account_name=config.account_name,
account_key=config.account_key,
)
@attrs
class _Object(object):
container = attrib()
blob_name = attrib()
content_length = attrib()
def get_container(self, container_name=None, config=None, **kwargs):
container_name = container_name or config.container_name
if container_name not in self._containers:
self._containers[container_name] = self._Container(name=container_name, config=config)
# self._containers[container_name].config.retries = kwargs.get('retries', 5)
return self._containers[container_name]
def upload_object_via_stream(self, iterator, container, object_name, callback=None, extra=None, **kwargs):
from azure.common import AzureHttpError # noqa
blob_name = self._blob_name_from_object_path(object_name, container.name) # noqa: F841
try:
container.blob_service.MAX_SINGLE_PUT_SIZE = 16 * 1024 * 1024
container.blob_service.socket_timeout = (300, 2000)
container.blob_service.create_blob_from_bytes(
container.name,
object_name,
iterator.read() if hasattr(iterator, "read") else bytes(iterator),
# timeout=300,
max_connections=2,
progress_callback=callback,
)
return True
except AzureHttpError as ex:
self.get_logger().error('Failed uploading (Azure error): %s' % ex)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
return False
def upload_object(self, file_path, container, object_name, callback=None, extra=None, **kwargs):
from azure.common import AzureHttpError # noqa
blob_name = self._blob_name_from_object_path(object_name, container.name)
stream = None
try:
from azure.storage.blob import ContentSettings # noqa
from mimetypes import guess_type
container.blob_service.MAX_SINGLE_PUT_SIZE = 16 * 1024 * 1024
container.blob_service.socket_timeout = (300, 2000)
container.blob_service.create_blob_from_path(
container.name,
blob_name,
file_path,
# timeout=300,
max_connections=2,
content_settings=ContentSettings(content_type=guess_type(file_path)),
progress_callback=callback,
)
return True
except AzureHttpError as ex:
self.get_logger().error('Failed uploading (Azure error): %s' % ex)
except Exception as ex:
self.get_logger().error('Failed uploading: %s' % ex)
finally:
if stream:
stream.close()
def list_container_objects(self, container, ex_prefix=None, **kwargs):
return list(container.blob_service.list_blobs(container_name=container.name, prefix=ex_prefix))
def delete_object(self, object, **kwargs):
container = object.container
container.blob_service.delete_blob(
container.name,
object.blob_name,
)
return not object.container.blob_service.exists(container.name, object.blob_name)
def get_object(self, container_name, object_name, *args, **kwargs):
container = self._containers.get(container_name)
if not container:
raise StorageError("Container `{}` not found for object {}".format(container_name, object_name))
# blob_name = self._blob_name_from_object_path(object_name, container_name)
blob = container.blob_service.get_blob_properties(container.name, object_name)
return self._Object(container=container, blob_name=blob.name, content_length=blob.properties.content_length)
def download_object_as_stream(self, obj, verbose, *_, **__):
container = obj.container
total_size_mb = obj.content_length / (1024. * 1024.)
remote_path = os.path.join(
"{}://".format(self.scheme),
container.config.account_name,
container.name,
obj.blob_name
)
cb = DownloadProgressReport(total_size_mb, verbose, remote_path, self.get_logger())
blob = container.blob_service.get_blob_to_bytes(
container.name,
obj.blob_name,
progress_callback=cb,
)
return blob.content
def download_object(self, obj, local_path, overwrite_existing=True, delete_on_failure=True, callback=None, **_):
p = Path(local_path)
if not overwrite_existing and p.is_file():
self.get_logger().warning('failed saving after download: overwrite=False and file exists (%s)' % str(p))
return
download_done = threading.Event()
download_done.counter = 0
def callback_func(current, total):
if callback:
chunk = current - download_done.counter
download_done.counter += chunk
callback(chunk)
if current >= total:
download_done.set()
container = obj.container
container.blob_service.MAX_SINGLE_GET_SIZE = 5 * 1024 * 1024
_ = container.blob_service.get_blob_to_path(
container.name,
obj.blob_name,
local_path,
max_connections=10,
progress_callback=callback_func,
)
download_done.wait()
def test_upload(self, test_path, config, **_):
container = self.get_container(config=config)
try:
container.blob_service.get_container_properties(container.name)
except Exception:
return False
else:
# Using the account Key, we can always upload...
return True
@classmethod
def _blob_name_from_object_path(cls, name, container_name):
scheme = urlparse(name).scheme
if scheme:
if scheme != cls.scheme:
raise StorageError(
"When using a URL, only the `{}` scheme is supported for Azure storage: {}",
cls.scheme,
name,
)
f = furl(name)
if not f.path.segments:
raise StorageError(
"Missing container name in URL {}",
name,
)
parsed_container_name = f.path.segments[0]
if parsed_container_name != container_name:
raise StorageError(
"Container name mismatch (expected {}, found {}) in {}",
container_name,
parsed_container_name,
name,
)
if len(f.path.segments) == 1:
raise StorageError(
"No path found following container name {} in {}",
container_name,
name,
)
return f.path.segments[0], os.path.join(*f.path.segments[1:])
return name
def get_direct_access(self, remote_path, **_):
return None
class _FileStorageDriver(_Driver):
scheme = "file"
CHUNK_SIZE = 8096
IGNORE_FOLDERS = ['.lock', '.hash']
Object = namedtuple("Object", ['name', 'size', 'extra', 'driver', 'container', 'hash', 'meta_data'])
class _Container(object):
def __init__(self, name, extra, driver):
self.name = name
self.extra = extra
self.driver = driver
def __init__(self, key, secret=None, secure=True, host=None, port=None,
**kwargs):
# Use the key as the path to the storage
self.base_path = key
def _make_path(self, path, ignore_existing=True):
try:
os.makedirs(path)
except OSError:
exp = sys.exc_info()[1]
if exp.errno == errno.EEXIST and not ignore_existing:
raise exp
def _check_container_name(self, container_name):
if '/' in container_name or '\\' in container_name:
raise ValueError("Container name \"{}\" cannot contain \\ or / ".format(container_name))
def _make_container(self, container_name):
container_name = container_name or '.'
self._check_container_name(container_name)
full_path = os.path.realpath(os.path.join(self.base_path, container_name))
try:
stat = os.stat(full_path)
if not os.path.isdir(full_path):
raise OSError("Target path \"{}\" is not a directory".format(full_path))
except OSError:
raise OSError("Target path \"{}\" is not accessible or does not exist".format(full_path))
extra = {
'creation_time': stat.st_ctime,
'access_time': stat.st_atime,
'modify_time': stat.st_mtime,
}
return self._Container(name=container_name, extra=extra, driver=self)
def _make_object(self, container, object_name):
full_path = os.path.realpath(os.path.join(self.base_path, container.name if container else '.', object_name))
if os.path.isdir(full_path):
raise ValueError("Target path \"{}\" already exist".format(full_path))
try:
stat = os.stat(full_path)
except Exception:
raise ValueError("Cannot access target path \"{}\"".format(full_path))
extra = {
'creation_time': stat.st_ctime,
'access_time': stat.st_atime,
'modify_time': stat.st_mtime,
}
return self.Object(name=object_name, size=stat.st_size, extra=extra,
driver=self, container=container, hash=None, meta_data=None)
def iterate_containers(self):
for container_name in os.listdir(self.base_path):
full_path = os.path.join(self.base_path, container_name)
if not os.path.isdir(full_path):
continue
yield self._make_container(container_name)
def _get_objects(self, container):
cpath = self.get_container_cdn_url(container, check=True)
for folder, subfolders, files in os.walk(cpath, topdown=True):
# Remove unwanted subfolders
for subf in self.IGNORE_FOLDERS:
if subf in subfolders:
subfolders.remove(subf)
for name in files:
full_path = os.path.join(folder, name)
object_name = os.path.relpath(full_path, start=cpath)
yield self._make_object(container, object_name)
def iterate_container_objects(self, container):
return self._get_objects(container)
def get_container(self, container_name, **_):
return self._make_container(container_name)
def get_container_cdn_url(self, container, check=False):
path = os.path.realpath(os.path.join(self.base_path, container.name if container else '.'))
if check and not os.path.isdir(path):
raise ValueError("Target path \"{}\" does not exist".format(path))
return path
def get_object(self, container_name, object_name, **_):
container = self._make_container(container_name)
return self._make_object(container, object_name)
def get_object_cdn_url(self, obj):
return os.path.realpath(os.path.join(self.base_path, obj.container.name, obj.name))
def download_object(self, obj, destination_path, overwrite_existing=False, delete_on_failure=True, **_):
obj_path = self.get_object_cdn_url(obj)
base_name = os.path.basename(destination_path)
if not base_name and not os.path.exists(destination_path):
raise ValueError('Path \"{}\" does not exist'.format(destination_path))
if not base_name:
file_path = os.path.join(destination_path, obj.name)
else:
file_path = destination_path
if os.path.exists(file_path) and not overwrite_existing:
raise ValueError('File \"{}\" already exists, but overwrite_existing=False'.format(file_path))
try:
shutil.copy(obj_path, file_path)
except IOError:
if delete_on_failure:
# noinspection PyBroadException
try:
os.unlink(file_path)
except Exception:
pass
return False
return True
def download_object_as_stream(self, obj, chunk_size=None, **_):
path = self.get_object_cdn_url(obj)
with open(path, 'rb') as obj_file:
for data in self._read_in_chunks(obj_file, chunk_size=chunk_size):
yield data
def upload_object(self, file_path, container, object_name, extra=None, verify_hash=True, **_):
path = self.get_container_cdn_url(container, check=True)
obj_path = os.path.join(path, object_name)
base_path = os.path.dirname(obj_path)
self._make_path(base_path)
shutil.copy(file_path, obj_path)
os.chmod(obj_path, int('664', 8))
return self._make_object(container, object_name)
def upload_object_via_stream(self, iterator, container, object_name, extra=None, **kwargs):
path = self.get_container_cdn_url(container, check=True)
obj_path = os.path.join(path, object_name)
base_path = os.path.dirname(obj_path)
self._make_path(base_path)
obj_path = os.path.realpath(obj_path)
with open(obj_path, 'wb' if not isinstance(iterator, StringIO) else 'wt') as obj_file:
obj_file.write(iterator.read() if hasattr(iterator, 'read') else bytes(iterator))
os.chmod(obj_path, int('664', 8))
return self._make_object(container, object_name)
def delete_object(self, obj, **_):
path = self.get_object_cdn_url(obj)
try:
os.unlink(path)
except Exception:
return False
# # Check and delete all the empty parent folders
# path = os.path.dirname(path)
# container_url = obj.container.get_cdn_url()
#
# # Delete the empty parent folders till the container's level
return True
def create_container(self, container_name):
container_name = container_name or '.'
self._check_container_name(container_name)
path = os.path.join(self.base_path, container_name)
try:
self._make_path(path, ignore_existing=False)
except OSError:
exp = sys.exc_info()[1]
if exp.errno == errno.EEXIST:
raise ValueError('Container \"{}\" with this name already exists. The name '
'must be unique among all the containers in the '
'system'.format(container_name))
else:
raise ValueError('Error creating container \"{}\"'.format(container_name))
except Exception:
raise ValueError('Error creating container \"{}\"'.format(container_name))
return self._make_container(container_name)
def delete_container(self, container):
for obj in self._get_objects(container):
raise ValueError('Container \"{}\" is not empty'.format(container.name))
path = self.get_container_cdn_url(container, check=True)
try:
shutil.rmtree(path)
except Exception:
return False
return True
def list_container_objects(self, container, **kwargs):
return list(self.iterate_container_objects(container))
@staticmethod
def _read_in_chunks(iterator, chunk_size=None, fill_size=False, yield_empty=False):
chunk_size = chunk_size or _FileStorageDriver.CHUNK_SIZE
if six.PY3:
from io import FileIO as file
if isinstance(iterator, (file)):
get_data = iterator.read
args = (chunk_size,)
else:
get_data = next
args = (iterator,)
data = bytes('')
empty = False
while not empty or len(data) > 0:
if not empty:
try:
chunk = bytes(get_data(*args))
if len(chunk) > 0:
data += chunk
else:
empty = True
except StopIteration:
empty = True
if len(data) == 0:
if empty and yield_empty:
yield bytes('')
return
if fill_size:
if empty or len(data) >= chunk_size:
yield data[:chunk_size]
data = data[chunk_size:]
else:
yield data
data = bytes('')
def get_direct_access(self, remote_path, **_):
full_url = StorageHelper.conform_url(remote_path)
path = Path(full_url[7:])
if not path.exists():
raise ValueError("Requested path does not exist: {}".format(path))
return path.as_posix()
def test_upload(self, test_path, config, **kwargs):
return True
driver_schemes = set(
filter(
None,
itertools.chain(
(getattr(cls, "scheme", None) for cls in _Driver.__subclasses__()),
*(getattr(cls, "schemes", []) for cls in _Driver.__subclasses__())
)
)
)
remote_driver_schemes = driver_schemes - {_FileStorageDriver.scheme}
| true | true |
f71bca0add55e9dbed05726a9b8f1b5d8f31a0fe | 544 | py | Python | loja.py | Felipe-Gs/Exerciccios-Python3 | bdbd49e7daa48df336b83ef3a2e36e42ede297ab | [
"MIT"
] | 3 | 2021-06-01T14:11:58.000Z | 2022-03-20T02:30:13.000Z | loja.py | Felipe-Gs/Exercicios-Python3 | bdbd49e7daa48df336b83ef3a2e36e42ede297ab | [
"MIT"
] | null | null | null | loja.py | Felipe-Gs/Exercicios-Python3 | bdbd49e7daa48df336b83ef3a2e36e42ede297ab | [
"MIT"
] | null | null | null | '''quantidade = int(input('quantos produtos o senhor comprou?'))
preco = quantidade * 1.99
print(preco)'''
print("====TABELA DE PREÇOS====")
for c in range(1, 51):
print(c, "- R$", c * 1.99)
quantidade = int(input('quantos produtos o senhor comprou?'))
preco = quantidade * 1.99
print(preco, " é o preço que voce tem que pagar!")
dinheiro = float(input('com quanto dinheiro vc pretende pagar?'))
if dinheiro <= preco:
print('dinheiro insuficiente')
else:
troco = dinheiro - preco
print("obrigado, esse é seu troco:",troco )
| 32 | 66 | 0.665441 | print("====TABELA DE PREÇOS====")
for c in range(1, 51):
print(c, "- R$", c * 1.99)
quantidade = int(input('quantos produtos o senhor comprou?'))
preco = quantidade * 1.99
print(preco, " é o preço que voce tem que pagar!")
dinheiro = float(input('com quanto dinheiro vc pretende pagar?'))
if dinheiro <= preco:
print('dinheiro insuficiente')
else:
troco = dinheiro - preco
print("obrigado, esse é seu troco:",troco )
| true | true |
f71bca13cf7f1910de2246a9b822851a12529735 | 11,981 | py | Python | downstream/tinypersons/mmdet/datasets/pipelines/formating.py | bwconrad/solo-learn | ec510d803a4428d7d8803b90fa1484c42cb9cb52 | [
"MIT"
] | 271 | 2021-06-19T08:41:11.000Z | 2022-03-31T05:42:42.000Z | downstream/tinypersons/mmdet/datasets/pipelines/formating.py | bwconrad/solo-learn | ec510d803a4428d7d8803b90fa1484c42cb9cb52 | [
"MIT"
] | 48 | 2021-07-06T07:17:12.000Z | 2022-03-14T11:38:36.000Z | downstream/tinypersons/mmdet/datasets/pipelines/formating.py | bwconrad/solo-learn | ec510d803a4428d7d8803b90fa1484c42cb9cb52 | [
"MIT"
] | 54 | 2021-07-07T08:40:49.000Z | 2022-03-16T05:02:35.000Z | from collections.abc import Sequence
import mmcv
import numpy as np
import torch
from mmcv.parallel import DataContainer as DC
from ..builder import PIPELINES
def to_tensor(data):
"""Convert objects of various python types to :obj:`torch.Tensor`.
Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`,
:class:`Sequence`, :class:`int` and :class:`float`.
Args:
data (torch.Tensor | numpy.ndarray | Sequence | int | float): Data to
be converted.
"""
if isinstance(data, torch.Tensor):
return data
elif isinstance(data, np.ndarray):
return torch.from_numpy(data)
elif isinstance(data, Sequence) and not mmcv.is_str(data):
return torch.tensor(data)
elif isinstance(data, int):
return torch.LongTensor([data])
elif isinstance(data, float):
return torch.FloatTensor([data])
else:
raise TypeError(f'type {type(data)} cannot be converted to tensor.')
@PIPELINES.register_module()
class ToTensor:
"""Convert some results to :obj:`torch.Tensor` by given keys.
Args:
keys (Sequence[str]): Keys that need to be converted to Tensor.
"""
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
"""Call function to convert data in results to :obj:`torch.Tensor`.
Args:
results (dict): Result dict contains the data to convert.
Returns:
dict: The result dict contains the data converted
to :obj:`torch.Tensor`.
"""
for key in self.keys:
results[key] = to_tensor(results[key])
return results
def __repr__(self):
return self.__class__.__name__ + f'(keys={self.keys})'
@PIPELINES.register_module()
class ImageToTensor:
"""Convert image to :obj:`torch.Tensor` by given keys.
The dimension order of input image is (H, W, C). The pipeline will convert
it to (C, H, W). If only 2 dimension (H, W) is given, the output would be
(1, H, W).
Args:
keys (Sequence[str]): Key of images to be converted to Tensor.
"""
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
"""Call function to convert image in results to :obj:`torch.Tensor` and
transpose the channel order.
Args:
results (dict): Result dict contains the image data to convert.
Returns:
dict: The result dict contains the image converted
to :obj:`torch.Tensor` and transposed to (C, H, W) order.
"""
for key in self.keys:
img = results[key]
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
results[key] = to_tensor(img.transpose(2, 0, 1))
return results
def __repr__(self):
return self.__class__.__name__ + f'(keys={self.keys})'
@PIPELINES.register_module()
class Transpose:
"""Transpose some results by given keys.
Args:
keys (Sequence[str]): Keys of results to be transposed.
order (Sequence[int]): Order of transpose.
"""
def __init__(self, keys, order):
self.keys = keys
self.order = order
def __call__(self, results):
"""Call function to transpose the channel order of data in results.
Args:
results (dict): Result dict contains the data to transpose.
Returns:
dict: The result dict contains the data transposed to \
``self.order``.
"""
for key in self.keys:
results[key] = results[key].transpose(self.order)
return results
def __repr__(self):
return self.__class__.__name__ + \
f'(keys={self.keys}, order={self.order})'
@PIPELINES.register_module()
class ToDataContainer:
"""Convert results to :obj:`mmcv.DataContainer` by given fields.
Args:
fields (Sequence[dict]): Each field is a dict like
``dict(key='xxx', **kwargs)``. The ``key`` in result will
be converted to :obj:`mmcv.DataContainer` with ``**kwargs``.
Default: ``(dict(key='img', stack=True), dict(key='gt_bboxes'),
dict(key='gt_labels'))``.
"""
def __init__(self,
fields=(dict(key='img', stack=True), dict(key='gt_bboxes'),
dict(key='gt_labels'))):
self.fields = fields
def __call__(self, results):
"""Call function to convert data in results to
:obj:`mmcv.DataContainer`.
Args:
results (dict): Result dict contains the data to convert.
Returns:
dict: The result dict contains the data converted to \
:obj:`mmcv.DataContainer`.
"""
for field in self.fields:
field = field.copy()
key = field.pop('key')
results[key] = DC(results[key], **field)
return results
def __repr__(self):
return self.__class__.__name__ + f'(fields={self.fields})'
@PIPELINES.register_module()
class DefaultFormatBundle:
"""Default formatting bundle.
It simplifies the pipeline of formatting common fields, including "img",
"proposals", "gt_bboxes", "gt_labels", "gt_masks" and "gt_semantic_seg".
These fields are formatted as follows.
- img: (1)transpose, (2)to tensor, (3)to DataContainer (stack=True)
- proposals: (1)to tensor, (2)to DataContainer
- gt_bboxes: (1)to tensor, (2)to DataContainer
- gt_bboxes_ignore: (1)to tensor, (2)to DataContainer
- gt_labels: (1)to tensor, (2)to DataContainer
- gt_masks: (1)to tensor, (2)to DataContainer (cpu_only=True)
- gt_semantic_seg: (1)unsqueeze dim-0 (2)to tensor, \
(3)to DataContainer (stack=True)
"""
def __call__(self, results):
"""Call function to transform and format common fields in results.
Args:
results (dict): Result dict contains the data to convert.
Returns:
dict: The result dict contains the data that is formatted with \
default bundle.
"""
if 'img' in results:
img = results['img']
# add default meta keys
results = self._add_default_meta_keys(results)
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
img = np.ascontiguousarray(img.transpose(2, 0, 1))
results['img'] = DC(to_tensor(img), stack=True)
for key in ['proposals', 'gt_bboxes', 'gt_bboxes_ignore', 'gt_labels']:
if key not in results:
continue
results[key] = DC(to_tensor(results[key]))
if 'gt_masks' in results:
results['gt_masks'] = DC(results['gt_masks'], cpu_only=True)
if 'gt_semantic_seg' in results:
results['gt_semantic_seg'] = DC(
to_tensor(results['gt_semantic_seg'][None, ...]), stack=True)
return results
def _add_default_meta_keys(self, results):
"""Add default meta keys.
We set default meta keys including `pad_shape`, `scale_factor` and
`img_norm_cfg` to avoid the case where no `Resize`, `Normalize` and
`Pad` are implemented during the whole pipeline.
Args:
results (dict): Result dict contains the data to convert.
Returns:
results (dict): Updated result dict contains the data to convert.
"""
img = results['img']
results.setdefault('pad_shape', img.shape)
results.setdefault('scale_factor', 1.0)
num_channels = 1 if len(img.shape) < 3 else img.shape[2]
results.setdefault(
'img_norm_cfg',
dict(
mean=np.zeros(num_channels, dtype=np.float32),
std=np.ones(num_channels, dtype=np.float32),
to_rgb=False))
return results
def __repr__(self):
return self.__class__.__name__
@PIPELINES.register_module()
class Collect:
"""Collect data from the loader relevant to the specific task.
This is usually the last stage of the data loader pipeline. Typically keys
is set to some subset of "img", "proposals", "gt_bboxes",
"gt_bboxes_ignore", "gt_labels", and/or "gt_masks".
The "img_meta" item is always populated. The contents of the "img_meta"
dictionary depends on "meta_keys". By default this includes:
- "img_shape": shape of the image input to the network as a tuple \
(h, w, c). Note that images may be zero padded on the \
bottom/right if the batch tensor is larger than this shape.
- "scale_factor": a float indicating the preprocessing scale
- "flip": a boolean indicating if image flip transform was used
- "filename": path to the image file
- "ori_shape": original shape of the image as a tuple (h, w, c)
- "pad_shape": image shape after padding
- "img_norm_cfg": a dict of normalization information:
- mean - per channel mean subtraction
- std - per channel std divisor
- to_rgb - bool indicating if bgr was converted to rgb
Args:
keys (Sequence[str]): Keys of results to be collected in ``data``.
meta_keys (Sequence[str], optional): Meta keys to be converted to
``mmcv.DataContainer`` and collected in ``data[img_metas]``.
Default: ``('filename', 'ori_filename', 'ori_shape', 'img_shape',
'pad_shape', 'scale_factor', 'flip', 'flip_direction',
'img_norm_cfg')``
"""
def __init__(self,
keys,
meta_keys=('filename', 'ori_filename', 'ori_shape',
'img_shape', 'pad_shape', 'scale_factor', 'flip',
'flip_direction', 'img_norm_cfg')):
self.keys = keys
self.meta_keys = meta_keys
def __call__(self, results):
"""Call function to collect keys in results. The keys in ``meta_keys``
will be converted to :obj:mmcv.DataContainer.
Args:
results (dict): Result dict contains the data to collect.
Returns:
dict: The result dict contains the following keys
- keys in``self.keys``
- ``img_metas``
"""
data = {}
img_meta = {}
for key in self.meta_keys:
img_meta[key] = results[key]
data['img_metas'] = DC(img_meta, cpu_only=True)
for key in self.keys:
data[key] = results[key]
return data
def __repr__(self):
return self.__class__.__name__ + \
f'(keys={self.keys}, meta_keys={self.meta_keys})'
@PIPELINES.register_module()
class WrapFieldsToLists:
"""Wrap fields of the data dictionary into lists for evaluation.
This class can be used as a last step of a test or validation
pipeline for single image evaluation or inference.
Example:
>>> test_pipeline = [
>>> dict(type='LoadImageFromFile'),
>>> dict(type='Normalize',
mean=[123.675, 116.28, 103.53],
std=[58.395, 57.12, 57.375],
to_rgb=True),
>>> dict(type='Pad', size_divisor=32),
>>> dict(type='ImageToTensor', keys=['img']),
>>> dict(type='Collect', keys=['img']),
>>> dict(type='WrapFieldsToLists')
>>> ]
"""
def __call__(self, results):
"""Call function to wrap fields into lists.
Args:
results (dict): Result dict contains the data to wrap.
Returns:
dict: The result dict where value of ``self.keys`` are wrapped \
into list.
"""
# Wrap dict fields into lists
for key, val in results.items():
results[key] = [val]
return results
def __repr__(self):
return f'{self.__class__.__name__}()'
| 32.824658 | 79 | 0.591687 | from collections.abc import Sequence
import mmcv
import numpy as np
import torch
from mmcv.parallel import DataContainer as DC
from ..builder import PIPELINES
def to_tensor(data):
if isinstance(data, torch.Tensor):
return data
elif isinstance(data, np.ndarray):
return torch.from_numpy(data)
elif isinstance(data, Sequence) and not mmcv.is_str(data):
return torch.tensor(data)
elif isinstance(data, int):
return torch.LongTensor([data])
elif isinstance(data, float):
return torch.FloatTensor([data])
else:
raise TypeError(f'type {type(data)} cannot be converted to tensor.')
@PIPELINES.register_module()
class ToTensor:
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
for key in self.keys:
results[key] = to_tensor(results[key])
return results
def __repr__(self):
return self.__class__.__name__ + f'(keys={self.keys})'
@PIPELINES.register_module()
class ImageToTensor:
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
for key in self.keys:
img = results[key]
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
results[key] = to_tensor(img.transpose(2, 0, 1))
return results
def __repr__(self):
return self.__class__.__name__ + f'(keys={self.keys})'
@PIPELINES.register_module()
class Transpose:
def __init__(self, keys, order):
self.keys = keys
self.order = order
def __call__(self, results):
for key in self.keys:
results[key] = results[key].transpose(self.order)
return results
def __repr__(self):
return self.__class__.__name__ + \
f'(keys={self.keys}, order={self.order})'
@PIPELINES.register_module()
class ToDataContainer:
def __init__(self,
fields=(dict(key='img', stack=True), dict(key='gt_bboxes'),
dict(key='gt_labels'))):
self.fields = fields
def __call__(self, results):
for field in self.fields:
field = field.copy()
key = field.pop('key')
results[key] = DC(results[key], **field)
return results
def __repr__(self):
return self.__class__.__name__ + f'(fields={self.fields})'
@PIPELINES.register_module()
class DefaultFormatBundle:
def __call__(self, results):
if 'img' in results:
img = results['img']
results = self._add_default_meta_keys(results)
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
img = np.ascontiguousarray(img.transpose(2, 0, 1))
results['img'] = DC(to_tensor(img), stack=True)
for key in ['proposals', 'gt_bboxes', 'gt_bboxes_ignore', 'gt_labels']:
if key not in results:
continue
results[key] = DC(to_tensor(results[key]))
if 'gt_masks' in results:
results['gt_masks'] = DC(results['gt_masks'], cpu_only=True)
if 'gt_semantic_seg' in results:
results['gt_semantic_seg'] = DC(
to_tensor(results['gt_semantic_seg'][None, ...]), stack=True)
return results
def _add_default_meta_keys(self, results):
img = results['img']
results.setdefault('pad_shape', img.shape)
results.setdefault('scale_factor', 1.0)
num_channels = 1 if len(img.shape) < 3 else img.shape[2]
results.setdefault(
'img_norm_cfg',
dict(
mean=np.zeros(num_channels, dtype=np.float32),
std=np.ones(num_channels, dtype=np.float32),
to_rgb=False))
return results
def __repr__(self):
return self.__class__.__name__
@PIPELINES.register_module()
class Collect:
def __init__(self,
keys,
meta_keys=('filename', 'ori_filename', 'ori_shape',
'img_shape', 'pad_shape', 'scale_factor', 'flip',
'flip_direction', 'img_norm_cfg')):
self.keys = keys
self.meta_keys = meta_keys
def __call__(self, results):
data = {}
img_meta = {}
for key in self.meta_keys:
img_meta[key] = results[key]
data['img_metas'] = DC(img_meta, cpu_only=True)
for key in self.keys:
data[key] = results[key]
return data
def __repr__(self):
return self.__class__.__name__ + \
f'(keys={self.keys}, meta_keys={self.meta_keys})'
@PIPELINES.register_module()
class WrapFieldsToLists:
def __call__(self, results):
for key, val in results.items():
results[key] = [val]
return results
def __repr__(self):
return f'{self.__class__.__name__}()'
| true | true |
f71bcae3ada7450e03517099c8f34cabe1579cb5 | 1,307 | py | Python | trust_simple_three/tests.py | gerhardriener/cherry_picking_code | 5dbcd32ad2e9929eac785eb06e7d44c60b1ffc18 | [
"MIT"
] | null | null | null | trust_simple_three/tests.py | gerhardriener/cherry_picking_code | 5dbcd32ad2e9929eac785eb06e7d44c60b1ffc18 | [
"MIT"
] | null | null | null | trust_simple_three/tests.py | gerhardriener/cherry_picking_code | 5dbcd32ad2e9929eac785eb06e7d44c60b1ffc18 | [
"MIT"
] | null | null | null | from otree.api import Currency as c, currency_range, SubmissionMustFail
from . import pages
from ._builtin import Bot
from .models import Constants
class PlayerBot(Bot):
cases = [
{'offer': 1, 'return_1': 1, 'return_2': 1,
'return_2_A': 1, 'return_2_B': 1,
'p1_payoff': 5, 'p2_payoff': 5, 'p3_payoff': 5},
]
def play_round(self):
case = self.case
if self.player.id_in_group == 1:
yield (pages.Send, {"sent_amount": case['offer']})
elif self.player.id_in_group == 2:
yield (pages.SendBack1, {"sent_back_amount_1": case['return_1']})
else:
for invalid_return in [-1, case['offer']
* Constants.multiplier + 1]:
yield SubmissionMustFail(pages.SendBack,
{'sent_back_amount_1':
invalid_return})
yield (pages.SendBack, {'sent_back_amount_1': case['return']})
if self.player.id_in_group == 1:
expected_payoff = case['p1_payoff']
elif self.player.id_in_group == 2:
expected_payoff = case['p2_payoff']
else:
expected_payoff = case['p3_payoff']
assert self.player.payoff == expected_payoff
| 34.394737 | 77 | 0.55394 | from otree.api import Currency as c, currency_range, SubmissionMustFail
from . import pages
from ._builtin import Bot
from .models import Constants
class PlayerBot(Bot):
cases = [
{'offer': 1, 'return_1': 1, 'return_2': 1,
'return_2_A': 1, 'return_2_B': 1,
'p1_payoff': 5, 'p2_payoff': 5, 'p3_payoff': 5},
]
def play_round(self):
case = self.case
if self.player.id_in_group == 1:
yield (pages.Send, {"sent_amount": case['offer']})
elif self.player.id_in_group == 2:
yield (pages.SendBack1, {"sent_back_amount_1": case['return_1']})
else:
for invalid_return in [-1, case['offer']
* Constants.multiplier + 1]:
yield SubmissionMustFail(pages.SendBack,
{'sent_back_amount_1':
invalid_return})
yield (pages.SendBack, {'sent_back_amount_1': case['return']})
if self.player.id_in_group == 1:
expected_payoff = case['p1_payoff']
elif self.player.id_in_group == 2:
expected_payoff = case['p2_payoff']
else:
expected_payoff = case['p3_payoff']
assert self.player.payoff == expected_payoff
| true | true |
f71bcbceeb060b7a31ed8e3353c036d8c37f27b4 | 621 | py | Python | supriya/ugens/BRF.py | deeuu/supriya | 14fcb5316eccb4dafbe498932ceff56e1abb9d27 | [
"MIT"
] | null | null | null | supriya/ugens/BRF.py | deeuu/supriya | 14fcb5316eccb4dafbe498932ceff56e1abb9d27 | [
"MIT"
] | null | null | null | supriya/ugens/BRF.py | deeuu/supriya | 14fcb5316eccb4dafbe498932ceff56e1abb9d27 | [
"MIT"
] | null | null | null | import collections
from supriya import CalculationRate
from supriya.ugens.Filter import Filter
class BRF(Filter):
"""
A 2nd order Butterworth band-reject filter.
::
>>> source = supriya.ugens.In.ar(bus=0)
>>> b_r_f =supriya.ugens.BRF.ar(source=source)
>>> b_r_f
BRF.ar()
"""
### CLASS VARIABLES ###
__documentation_section__ = "Filter UGens"
_ordered_input_names = collections.OrderedDict(
[("source", None), ("frequency", 440.0), ("reciprocal_of_q", 1.0)]
)
_valid_calculation_rates = (CalculationRate.AUDIO, CalculationRate.CONTROL)
| 21.413793 | 79 | 0.645733 | import collections
from supriya import CalculationRate
from supriya.ugens.Filter import Filter
class BRF(Filter):
_ordered_input_names = collections.OrderedDict(
[("source", None), ("frequency", 440.0), ("reciprocal_of_q", 1.0)]
)
_valid_calculation_rates = (CalculationRate.AUDIO, CalculationRate.CONTROL)
| true | true |
f71bcc0ec5e7d7ea7b83b7093e62b94f604e0b6e | 1,989 | py | Python | visualizer.py | shaandesai1/transfer_diffeq | 29ab4f3ff16a58bc7b1751428e540a3bb135778c | [
"MIT"
] | null | null | null | visualizer.py | shaandesai1/transfer_diffeq | 29ab4f3ff16a58bc7b1751428e540a3bb135778c | [
"MIT"
] | null | null | null | visualizer.py | shaandesai1/transfer_diffeq | 29ab4f3ff16a58bc7b1751428e540a3bb135778c | [
"MIT"
] | null | null | null |
import numpy as np
import torch
import matplotlib.pyplot as plt
from neurodiffeq import diff # the differentiation operation
from neurodiffeq.conditions import IVP # the initial condition
from neurodiffeq.networks import FCNN # fully-connect neural network
from neurodiffeq.solvers import Solver1D
from neurodiffeq.callbacks import WeightCallback
from neurodiffeq.callbacks import WeightCallback1, SolutionCallback, SaddleCallback
from neurodiffeq.callbacks import PeriodLocal
from sklearn.metrics import mean_squared_error
# from sklearn.metrics.pairwise import cosine_similarity
import copy
import matplotlib.pyplot as plt
DIFFEQS_TRAIN = {
'exp': lambda u, t: [diff(u, t) + u],
'exp1': lambda u, t: [diff(u, t) - u],
'tanh': lambda u, t: [diff(u, t) + u ** 2 - 1],
'psig': lambda u, t: [diff(u, t) - 3 * u + u ** 2],
'r1': lambda u, t: [diff(u, t) - u + u ** 2 + u ** 3],
'r2': lambda u, t: [diff(u, t) + u + u ** 2],
'r3': lambda u, t: [diff(u, t) + u ** 2],
'r4': lambda u, t: [diff(u, t) - u ** 2],
'q1': lambda u, t: [diff(u, t) - u + u ** 2],
'q2': lambda u, t: [diff(u, t) - u + u ** 2 - u ** 3],
'q3': lambda u, t: [diff(u, t) + u ** 2 + u ** 4],
'q4': lambda u, t: [diff(u, t) - u ** 2 - u ** 4],
'high_order1': lambda u, t: [diff(u, t) + u - u ** 2 + u ** 3 - u ** 4 + u ** 5],
'high_order2': lambda u, t: [diff(u, t) - u + u ** 2 - u ** 3 + u ** 4 - u ** 5],
'baseline': lambda u, t: [diff(u,t)]
}
solsa = np.load('data/q3_train_solution/3000.npy')
solsb = np.load('data/baseline_train_solution/3000.npy')
analytical =np.load('data/q3_gt_test_solution/3000.npy')
# pre1 =np.load('data/q2_q2_pretrain_500_solution/500.npy')
# pre2 =np.load('data/baseline_q2_pretrain_500_solution/500.npy')
plt.figure()
plt.plot(solsa,label='q2')
plt.plot(solsb,label='high_order_2')
plt.plot(analytical,label='analytical_q2')
# plt.plot(pre1,label='pre_q2_q2')
# plt.plot(pre2,label='pre_baseline_q2')
plt.legend()
plt.show() | 40.591837 | 85 | 0.64002 |
import numpy as np
import torch
import matplotlib.pyplot as plt
from neurodiffeq import diff
from neurodiffeq.conditions import IVP
from neurodiffeq.networks import FCNN
from neurodiffeq.solvers import Solver1D
from neurodiffeq.callbacks import WeightCallback
from neurodiffeq.callbacks import WeightCallback1, SolutionCallback, SaddleCallback
from neurodiffeq.callbacks import PeriodLocal
from sklearn.metrics import mean_squared_error
import copy
import matplotlib.pyplot as plt
DIFFEQS_TRAIN = {
'exp': lambda u, t: [diff(u, t) + u],
'exp1': lambda u, t: [diff(u, t) - u],
'tanh': lambda u, t: [diff(u, t) + u ** 2 - 1],
'psig': lambda u, t: [diff(u, t) - 3 * u + u ** 2],
'r1': lambda u, t: [diff(u, t) - u + u ** 2 + u ** 3],
'r2': lambda u, t: [diff(u, t) + u + u ** 2],
'r3': lambda u, t: [diff(u, t) + u ** 2],
'r4': lambda u, t: [diff(u, t) - u ** 2],
'q1': lambda u, t: [diff(u, t) - u + u ** 2],
'q2': lambda u, t: [diff(u, t) - u + u ** 2 - u ** 3],
'q3': lambda u, t: [diff(u, t) + u ** 2 + u ** 4],
'q4': lambda u, t: [diff(u, t) - u ** 2 - u ** 4],
'high_order1': lambda u, t: [diff(u, t) + u - u ** 2 + u ** 3 - u ** 4 + u ** 5],
'high_order2': lambda u, t: [diff(u, t) - u + u ** 2 - u ** 3 + u ** 4 - u ** 5],
'baseline': lambda u, t: [diff(u,t)]
}
solsa = np.load('data/q3_train_solution/3000.npy')
solsb = np.load('data/baseline_train_solution/3000.npy')
analytical =np.load('data/q3_gt_test_solution/3000.npy')
plt.figure()
plt.plot(solsa,label='q2')
plt.plot(solsb,label='high_order_2')
plt.plot(analytical,label='analytical_q2')
plt.legend()
plt.show() | true | true |
f71bcd5fb6a9e9c5a061b59f9eb248ca4c085954 | 5,479 | py | Python | create_7segment_dataset.py | Kazuhito00/7seg-image-generator | 2ab059814348800b289c033f839c7a255b72a1ac | [
"Apache-2.0"
] | null | null | null | create_7segment_dataset.py | Kazuhito00/7seg-image-generator | 2ab059814348800b289c033f839c7a255b72a1ac | [
"Apache-2.0"
] | null | null | null | create_7segment_dataset.py | Kazuhito00/7seg-image-generator | 2ab059814348800b289c033f839c7a255b72a1ac | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import random
import argparse
import cv2 as cv
import numpy as np
from tqdm import tqdm
from create_7segment_image import create_7segment_image
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--width", help='image width', type=int, default=96)
parser.add_argument("--height", help='image height', type=int, default=96)
parser.add_argument("--number_width_min", type=float, default=0.1)
parser.add_argument("--number_width_max", type=float, default=0.9)
parser.add_argument("--number_height_min", type=float, default=0.4)
parser.add_argument("--number_height_max", type=float, default=0.9)
parser.add_argument("--thickness_min", type=float, default=0.01)
parser.add_argument("--thickness_max", type=float, default=0.25)
parser.add_argument("--blank_ratio_min", type=float, default=0.0)
parser.add_argument("--blank_ratio_max", type=float, default=0.1)
parser.add_argument("--shear_x_min", type=int, default=-10)
parser.add_argument("--shear_x_max", type=int, default=30)
parser.add_argument("--shift_x_min", type=int, default=-10)
parser.add_argument("--shift_x_max", type=int, default=10)
parser.add_argument("--shift_y_min", type=int, default=-10)
parser.add_argument("--shift_y_max", type=int, default=10)
parser.add_argument("--steps", help='create steps', type=int, default=3000)
parser.add_argument('--erase_debug_window', action='store_true')
parser.add_argument("--seed", help='random seed', type=int, default=42)
parser.add_argument("--start_count", type=int, default=0)
args = parser.parse_args()
return args
def main():
# 引数解析 #################################################################
args = get_args()
image_width = args.width
image_height = args.height
number_width_min = args.number_width_min
number_width_max = args.number_width_max
number_height_min = args.number_height_min
number_height_max = args.number_height_max
thickness_min = args.thickness_min
thickness_max = args.thickness_max
blank_ratio_min = args.blank_ratio_min
blank_ratio_max = args.blank_ratio_max
shear_x_min = args.shear_x_min
shear_x_max = args.shear_x_max
shift_x_min = args.shift_x_min
shift_x_max = args.shift_x_max
shift_y_min = args.shift_y_min
shift_y_max = args.shift_y_max
steps = args.steps
erase_debug_window = args.erase_debug_window
seed = args.seed
image_count = args.start_count
random.seed(seed)
# 格納ディレクトリ作成
dataset_dir = 'dataset/'
for number in range(12):
os.makedirs(dataset_dir + '{:02}'.format(number), exist_ok=True)
# カラーセット
color_set_list = [
# bg_color, line_color, line_bg_color
[(110, 120, 120), (10, 20, 20), (90, 100, 100)],
[(113, 167, 154), (0, 6, 0), (104, 139, 129)],
[(2, 5, 19), (246, 247, 247), (17, 20, 35)],
[(242, 242, 242), (2, 2, 2), (222, 222, 222)],
[(3, 0, 12), (39, 87, 211), (68, 71, 72)],
[(3, 0, 12), (234, 157, 9), (68, 71, 72)],
[(3, 1, 29), (6, 0, 105), (49, 56, 63)],
[(14, 123, 0), (235, 235, 235), (14, 123, 0)],
[(2, 197, 147), (37, 86, 70), (2, 197, 147)],
[(200, 219, 211), (55, 55, 55), (147, 165, 158)],
[(64, 64, 64), (35, 233, 155), (64, 64, 64)],
[(30, 27, 85), (235, 240, 237), (32, 23, 183)],
[(34, 15, 49), (247, 247, 240), (164, 131, 121)],
[(7, 0, 3), (0, 215, 238), (66, 68, 68)],
[(0, 161, 255), (21, 98, 195), (0, 161, 255)],
[(253, 146, 64), (238, 9, 5), (253, 146, 64)],
]
for _ in tqdm(range(steps)):
# 画像生成設定
number_width = random.uniform(number_width_min, number_width_max)
number_height = random.uniform(number_height_min, number_height_max)
thickness = random.uniform(thickness_min, thickness_max)
blank_ratio = random.uniform(blank_ratio_min, blank_ratio_max)
shear_x = random.uniform(shear_x_min, shear_x_max)
shift_x = random.uniform(shift_x_min, shift_x_max)
shift_y = random.uniform(shift_y_min, shift_y_max)
color_index = int(random.uniform(0, len(color_set_list)))
for number_id in range(12):
# 画像生成
image = create_7segment_image(
number=number_id,
image_size=(image_width, image_height),
bg_color=color_set_list[color_index][0],
line_color=color_set_list[color_index][1],
line_bg_color=color_set_list[color_index][2],
number_width=number_width,
number_height=number_height,
thickness=thickness,
blank_ratio=blank_ratio,
shear_x=shear_x,
shift=(shift_x, shift_y),
)
# 描画
if not erase_debug_window:
cv.imshow('7seg generator', image)
cv.waitKey(10)
# 画像保存
save_path = os.path.join(dataset_dir, '{:02}'.format(number_id),
'{:08}.png'.format(image_count))
cv.imwrite(save_path, image)
image_count += 1
cv.destroyAllWindows()
if __name__ == '__main__':
main() | 38.048611 | 80 | 0.592626 |
import os
import random
import argparse
import cv2 as cv
import numpy as np
from tqdm import tqdm
from create_7segment_image import create_7segment_image
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--width", help='image width', type=int, default=96)
parser.add_argument("--height", help='image height', type=int, default=96)
parser.add_argument("--number_width_min", type=float, default=0.1)
parser.add_argument("--number_width_max", type=float, default=0.9)
parser.add_argument("--number_height_min", type=float, default=0.4)
parser.add_argument("--number_height_max", type=float, default=0.9)
parser.add_argument("--thickness_min", type=float, default=0.01)
parser.add_argument("--thickness_max", type=float, default=0.25)
parser.add_argument("--blank_ratio_min", type=float, default=0.0)
parser.add_argument("--blank_ratio_max", type=float, default=0.1)
parser.add_argument("--shear_x_min", type=int, default=-10)
parser.add_argument("--shear_x_max", type=int, default=30)
parser.add_argument("--shift_x_min", type=int, default=-10)
parser.add_argument("--shift_x_max", type=int, default=10)
parser.add_argument("--shift_y_min", type=int, default=-10)
parser.add_argument("--shift_y_max", type=int, default=10)
parser.add_argument("--steps", help='create steps', type=int, default=3000)
parser.add_argument('--erase_debug_window', action='store_true')
parser.add_argument("--seed", help='random seed', type=int, default=42)
parser.add_argument("--start_count", type=int, default=0)
args = parser.parse_args()
return args
def main():
blank_ratio_max)
shear_x = random.uniform(shear_x_min, shear_x_max)
shift_x = random.uniform(shift_x_min, shift_x_max)
shift_y = random.uniform(shift_y_min, shift_y_max)
color_index = int(random.uniform(0, len(color_set_list)))
for number_id in range(12):
image = create_7segment_image(
number=number_id,
image_size=(image_width, image_height),
bg_color=color_set_list[color_index][0],
line_color=color_set_list[color_index][1],
line_bg_color=color_set_list[color_index][2],
number_width=number_width,
number_height=number_height,
thickness=thickness,
blank_ratio=blank_ratio,
shear_x=shear_x,
shift=(shift_x, shift_y),
)
if not erase_debug_window:
cv.imshow('7seg generator', image)
cv.waitKey(10)
save_path = os.path.join(dataset_dir, '{:02}'.format(number_id),
'{:08}.png'.format(image_count))
cv.imwrite(save_path, image)
image_count += 1
cv.destroyAllWindows()
if __name__ == '__main__':
main() | true | true |
f71bcdce19046a219d824e7ae538b1d15a34fb6e | 27,412 | py | Python | sklearn/preprocessing/tests/test_polynomial.py | talahajeer/scikit-learn | d66b42708a5912039740cd08f747229433e579b5 | [
"BSD-3-Clause"
] | 1 | 2021-12-28T09:33:38.000Z | 2021-12-28T09:33:38.000Z | sklearn/preprocessing/tests/test_polynomial.py | talahajeer/scikit-learn | d66b42708a5912039740cd08f747229433e579b5 | [
"BSD-3-Clause"
] | null | null | null | sklearn/preprocessing/tests/test_polynomial.py | talahajeer/scikit-learn | d66b42708a5912039740cd08f747229433e579b5 | [
"BSD-3-Clause"
] | 2 | 2017-01-16T17:53:31.000Z | 2017-04-22T06:13:07.000Z | import numpy as np
import pytest
from scipy import sparse
from scipy.sparse import random as sparse_random
from sklearn.utils._testing import assert_array_almost_equal
from numpy.testing import assert_allclose, assert_array_equal
from scipy.interpolate import BSpline
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import (
KBinsDiscretizer,
PolynomialFeatures,
SplineTransformer,
)
from sklearn.utils.fixes import linspace, sp_version, parse_version
@pytest.mark.parametrize("est", (PolynomialFeatures, SplineTransformer))
def test_polynomial_and_spline_array_order(est):
"""Test that output array has the given order."""
X = np.arange(10).reshape(5, 2)
def is_c_contiguous(a):
return np.isfortran(a.T)
assert is_c_contiguous(est().fit_transform(X))
assert is_c_contiguous(est(order="C").fit_transform(X))
assert np.isfortran(est(order="F").fit_transform(X))
@pytest.mark.parametrize(
"params, err_msg",
[
({"degree": -1}, "degree must be a non-negative integer"),
({"degree": 2.5}, "degree must be a non-negative integer"),
({"degree": "string"}, "degree must be a non-negative integer"),
({"n_knots": 1}, "n_knots must be a positive integer >= 2."),
({"n_knots": 1}, "n_knots must be a positive integer >= 2."),
({"n_knots": 2.5}, "n_knots must be a positive integer >= 2."),
({"n_knots": "string"}, "n_knots must be a positive integer >= 2."),
({"knots": 1}, "Expected 2D array, got scalar array instead:"),
({"knots": [1, 2]}, "Expected 2D array, got 1D array instead:"),
(
{"knots": [[1]]},
r"Number of knots, knots.shape\[0\], must be >= 2.",
),
(
{"knots": [[1, 5], [2, 6]]},
r"knots.shape\[1\] == n_features is violated.",
),
(
{"knots": [[1], [1], [2]]},
"knots must be sorted without duplicates.",
),
({"knots": [[2], [1]]}, "knots must be sorted without duplicates."),
(
{"extrapolation": None},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
(
{"extrapolation": 1},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
(
{"extrapolation": "string"},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
({"include_bias": None}, "include_bias must be bool."),
({"include_bias": 1}, "include_bias must be bool."),
({"include_bias": "string"}, "include_bias must be bool."),
(
{"extrapolation": "periodic", "n_knots": 3, "degree": 3},
"Periodic splines require degree < n_knots. Got n_knots=3 and degree=3.",
),
(
{"extrapolation": "periodic", "knots": [[0], [1]], "degree": 2},
"Periodic splines require degree < n_knots. Got n_knots=2 and degree=2.",
),
],
)
def test_spline_transformer_input_validation(params, err_msg):
"""Test that we raise errors for invalid input in SplineTransformer."""
X = [[1], [2]]
with pytest.raises(ValueError, match=err_msg):
SplineTransformer(**params).fit(X)
def test_spline_transformer_manual_knot_input():
"""
Test that array-like knot positions in SplineTransformer are accepted.
"""
X = np.arange(20).reshape(10, 2)
knots = [[0.5, 1], [1.5, 2], [5, 10]]
st1 = SplineTransformer(degree=3, knots=knots, n_knots=None).fit(X)
knots = np.asarray(knots)
st2 = SplineTransformer(degree=3, knots=knots, n_knots=None).fit(X)
for i in range(X.shape[1]):
assert_allclose(st1.bsplines_[i].t, st2.bsplines_[i].t)
@pytest.mark.parametrize("extrapolation", ["continue", "periodic"])
def test_spline_transformer_integer_knots(extrapolation):
"""Test that SplineTransformer accepts integer value knot positions."""
X = np.arange(20).reshape(10, 2)
knots = [[0, 1], [1, 2], [5, 5], [11, 10], [12, 11]]
_ = SplineTransformer(
degree=3, knots=knots, extrapolation=extrapolation
).fit_transform(X)
def test_spline_transformer_feature_names():
"""Test that SplineTransformer generates correct features name."""
X = np.arange(20).reshape(10, 2)
splt = SplineTransformer(n_knots=3, degree=3, include_bias=True).fit(X)
feature_names = splt.get_feature_names()
assert_array_equal(
feature_names,
[
"x0_sp_0",
"x0_sp_1",
"x0_sp_2",
"x0_sp_3",
"x0_sp_4",
"x1_sp_0",
"x1_sp_1",
"x1_sp_2",
"x1_sp_3",
"x1_sp_4",
],
)
splt = SplineTransformer(n_knots=3, degree=3, include_bias=False).fit(X)
feature_names = splt.get_feature_names(["a", "b"])
assert_array_equal(
feature_names,
[
"a_sp_0",
"a_sp_1",
"a_sp_2",
"a_sp_3",
"b_sp_0",
"b_sp_1",
"b_sp_2",
"b_sp_3",
],
)
@pytest.mark.parametrize("degree", range(1, 5))
@pytest.mark.parametrize("n_knots", range(3, 5))
@pytest.mark.parametrize("knots", ["uniform", "quantile"])
@pytest.mark.parametrize("extrapolation", ["constant", "periodic"])
def test_spline_transformer_unity_decomposition(degree, n_knots, knots, extrapolation):
"""Test that B-splines are indeed a decomposition of unity.
Splines basis functions must sum up to 1 per row, if we stay in between
boundaries.
"""
X = np.linspace(0, 1, 100)[:, None]
# make the boundaries 0 and 1 part of X_train, for sure.
X_train = np.r_[[[0]], X[::2, :], [[1]]]
X_test = X[1::2, :]
if extrapolation == "periodic":
n_knots = n_knots + degree # periodic splines require degree < n_knots
splt = SplineTransformer(
n_knots=n_knots,
degree=degree,
knots=knots,
include_bias=True,
extrapolation=extrapolation,
)
splt.fit(X_train)
for X in [X_train, X_test]:
assert_allclose(np.sum(splt.transform(X), axis=1), 1)
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
def test_spline_transformer_linear_regression(bias, intercept):
"""Test that B-splines fit a sinusodial curve pretty well."""
X = np.linspace(0, 10, 100)[:, None]
y = np.sin(X[:, 0]) + 2 # +2 to avoid the value 0 in assert_allclose
pipe = Pipeline(
steps=[
(
"spline",
SplineTransformer(
n_knots=15,
degree=3,
include_bias=bias,
extrapolation="constant",
),
),
("ols", LinearRegression(fit_intercept=intercept)),
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict(X), y, rtol=1e-3)
@pytest.mark.parametrize(
"knots, n_knots, degree",
[
("uniform", 5, 3),
("uniform", 12, 8),
(
[[-1.0, 0.0], [0, 1.0], [0.1, 2.0], [0.2, 3.0], [0.3, 4.0], [1, 5.0]],
None,
3,
),
],
)
def test_spline_transformer_periodicity_of_extrapolation(knots, n_knots, degree):
"""Test that the SplineTransformer is periodic for multiple features."""
X_1 = linspace((-1, 0), (1, 5), 10)
X_2 = linspace((1, 5), (3, 10), 10)
splt = SplineTransformer(
knots=knots, n_knots=n_knots, degree=degree, extrapolation="periodic"
)
splt.fit(X_1)
assert_allclose(splt.transform(X_1), splt.transform(X_2))
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
def test_spline_transformer_periodic_linear_regression(bias, intercept):
"""Test that B-splines fit a periodic curve pretty well."""
# "+ 3" to avoid the value 0 in assert_allclose
def f(x):
return np.sin(2 * np.pi * x) - np.sin(8 * np.pi * x) + 3
X = np.linspace(0, 1, 101)[:, None]
pipe = Pipeline(
steps=[
(
"spline",
SplineTransformer(
n_knots=20,
degree=3,
include_bias=bias,
extrapolation="periodic",
),
),
("ols", LinearRegression(fit_intercept=intercept)),
]
)
pipe.fit(X, f(X[:, 0]))
# Generate larger array to check periodic extrapolation
X_ = np.linspace(-1, 2, 301)[:, None]
predictions = pipe.predict(X_)
assert_allclose(predictions, f(X_[:, 0]), atol=0.01, rtol=0.01)
assert_allclose(predictions[0:100], predictions[100:200], rtol=1e-3)
@pytest.mark.skipif(
sp_version < parse_version("1.0.0"),
reason="Periodic extrapolation not yet implemented for BSpline.",
)
def test_spline_transformer_periodic_spline_backport():
"""Test that the backport of extrapolate="periodic" works correctly"""
X = np.linspace(-2, 3.5, 10)[:, None]
degree = 2
# Use periodic extrapolation backport in SplineTransformer
transformer = SplineTransformer(
degree=degree, extrapolation="periodic", knots=[[-1.0], [0.0], [1.0]]
)
Xt = transformer.fit_transform(X)
# Use periodic extrapolation in BSpline
coef = np.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0], [0.0, 1.0]])
spl = BSpline(np.arange(-3, 4), coef, degree, "periodic")
Xspl = spl(X[:, 0])
assert_allclose(Xt, Xspl)
def test_spline_transformer_periodic_splines_periodicity():
"""
Test if shifted knots result in the same transformation up to permutation.
"""
X = np.linspace(0, 10, 101)[:, None]
transformer_1 = SplineTransformer(
degree=3,
extrapolation="periodic",
knots=[[0.0], [1.0], [3.0], [4.0], [5.0], [8.0]],
)
transformer_2 = SplineTransformer(
degree=3,
extrapolation="periodic",
knots=[[1.0], [3.0], [4.0], [5.0], [8.0], [9.0]],
)
Xt_1 = transformer_1.fit_transform(X)
Xt_2 = transformer_2.fit_transform(X)
assert_allclose(Xt_1, Xt_2[:, [4, 0, 1, 2, 3]])
@pytest.mark.parametrize("degree", [3, 5])
def test_spline_transformer_periodic_splines_smoothness(degree):
"""Test that spline transformation is smooth at first / last knot."""
X = np.linspace(-2, 10, 10_000)[:, None]
transformer = SplineTransformer(
degree=degree,
extrapolation="periodic",
knots=[[0.0], [1.0], [3.0], [4.0], [5.0], [8.0]],
)
Xt = transformer.fit_transform(X)
delta = (X.max() - X.min()) / len(X)
tol = 10 * delta
dXt = Xt
# We expect splines of degree `degree` to be (`degree`-1) times
# continuously differentiable. I.e. for d = 0, ..., `degree` - 1 the d-th
# derivative should be continous. This is the case if the (d+1)-th
# numerical derivative is reasonably small (smaller than `tol` in absolute
# value). We thus compute d-th numeric derivatives for d = 1, ..., `degree`
# and compare them to `tol`.
#
# Note that the 0-th derivative is the function itself, such that we are
# also checking its continuity.
for d in range(1, degree + 1):
# Check continuity of the (d-1)-th derivative
diff = np.diff(dXt, axis=0)
assert np.abs(diff).max() < tol
# Compute d-th numeric derivative
dXt = diff / delta
# As degree `degree` splines are not `degree` times continously
# differentiable at the knots, the `degree + 1`-th numeric derivative
# should have spikes at the knots.
diff = np.diff(dXt, axis=0)
assert np.abs(diff).max() > 1
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
@pytest.mark.parametrize("degree", [1, 2, 3, 4, 5])
def test_spline_transformer_extrapolation(bias, intercept, degree):
"""Test that B-spline extrapolation works correctly."""
# we use a straight line for that
X = np.linspace(-1, 1, 100)[:, None]
y = X.squeeze()
# 'constant'
pipe = Pipeline(
[
[
"spline",
SplineTransformer(
n_knots=4,
degree=degree,
include_bias=bias,
extrapolation="constant",
),
],
["ols", LinearRegression(fit_intercept=intercept)],
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict([[-10], [5]]), [-1, 1])
# 'linear'
pipe = Pipeline(
[
[
"spline",
SplineTransformer(
n_knots=4,
degree=degree,
include_bias=bias,
extrapolation="linear",
),
],
["ols", LinearRegression(fit_intercept=intercept)],
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict([[-10], [5]]), [-10, 5])
# 'error'
splt = SplineTransformer(
n_knots=4, degree=degree, include_bias=bias, extrapolation="error"
)
splt.fit(X)
with pytest.raises(ValueError):
splt.transform([[-10]])
with pytest.raises(ValueError):
splt.transform([[5]])
def test_spline_transformer_kbindiscretizer():
"""Test that a B-spline of degree=0 is equivalent to KBinsDiscretizer."""
rng = np.random.RandomState(97531)
X = rng.randn(200).reshape(200, 1)
n_bins = 5
n_knots = n_bins + 1
splt = SplineTransformer(
n_knots=n_knots, degree=0, knots="quantile", include_bias=True
)
splines = splt.fit_transform(X)
kbd = KBinsDiscretizer(n_bins=n_bins, encode="onehot-dense", strategy="quantile")
kbins = kbd.fit_transform(X)
# Though they should be exactly equal, we test approximately with high
# accuracy.
assert_allclose(splines, kbins, rtol=1e-13)
@pytest.mark.parametrize("n_knots", [5, 10])
@pytest.mark.parametrize("include_bias", [True, False])
@pytest.mark.parametrize("degree", [3, 5])
def test_spline_transformer_n_features_out(n_knots, include_bias, degree):
"""Test that transform results in n_features_out_ features."""
splt = SplineTransformer(n_knots=n_knots, degree=degree, include_bias=include_bias)
X = np.linspace(0, 1, 10)[:, None]
splt.fit(X)
assert splt.transform(X).shape[1] == splt.n_features_out_
@pytest.mark.parametrize(
"params, err_msg",
[
({"degree": -1}, "degree must be a non-negative integer"),
({"degree": 2.5}, "degree must be a non-negative int or tuple"),
({"degree": "12"}, r"degree=\(min_degree, max_degree\) must"),
({"degree": "string"}, "degree must be a non-negative int or tuple"),
({"degree": (-1, 2)}, r"degree=\(min_degree, max_degree\) must"),
({"degree": (0, 1.5)}, r"degree=\(min_degree, max_degree\) must"),
({"degree": (3, 2)}, r"degree=\(min_degree, max_degree\) must"),
],
)
def test_polynomial_features_input_validation(params, err_msg):
"""Test that we raise errors for invalid input in PolynomialFeatures."""
X = [[1], [2]]
with pytest.raises(ValueError, match=err_msg):
PolynomialFeatures(**params).fit(X)
@pytest.fixture()
def single_feature_degree3():
X = np.arange(6)[:, np.newaxis]
P = np.hstack([np.ones_like(X), X, X ** 2, X ** 3])
return X, P
@pytest.mark.parametrize(
"degree, include_bias, interaction_only, indices",
[
(3, True, False, slice(None, None)),
(3, False, False, slice(1, None)),
(3, True, True, [0, 1]),
(3, False, True, [1]),
((2, 3), True, False, [0, 2, 3]),
((2, 3), False, False, [2, 3]),
((2, 3), True, True, [0]),
((2, 3), False, True, []),
],
)
@pytest.mark.parametrize(
"sparse_X",
[False, sparse.csr_matrix, sparse.csc_matrix],
)
def test_polynomial_features_one_feature(
single_feature_degree3,
degree,
include_bias,
interaction_only,
indices,
sparse_X,
):
"""Test PolynomialFeatures on single feature up to degree 3."""
X, P = single_feature_degree3
if sparse_X:
X = sparse_X(X)
tf = PolynomialFeatures(
degree=degree, include_bias=include_bias, interaction_only=interaction_only
).fit(X)
out = tf.transform(X)
if sparse_X:
out = out.toarray()
assert_allclose(out, P[:, indices])
if tf.n_output_features_ > 0:
assert tf.powers_.shape == (tf.n_output_features_, tf.n_features_in_)
@pytest.fixture()
def two_features_degree3():
X = np.arange(6).reshape((3, 2))
x1 = X[:, :1]
x2 = X[:, 1:]
P = np.hstack(
[
x1 ** 0 * x2 ** 0, # 0
x1 ** 1 * x2 ** 0, # 1
x1 ** 0 * x2 ** 1, # 2
x1 ** 2 * x2 ** 0, # 3
x1 ** 1 * x2 ** 1, # 4
x1 ** 0 * x2 ** 2, # 5
x1 ** 3 * x2 ** 0, # 6
x1 ** 2 * x2 ** 1, # 7
x1 ** 1 * x2 ** 2, # 8
x1 ** 0 * x2 ** 3, # 9
]
)
return X, P
@pytest.mark.parametrize(
"degree, include_bias, interaction_only, indices",
[
(2, True, False, slice(0, 6)),
(2, False, False, slice(1, 6)),
(2, True, True, [0, 1, 2, 4]),
(2, False, True, [1, 2, 4]),
((2, 2), True, False, [0, 3, 4, 5]),
((2, 2), False, False, [3, 4, 5]),
((2, 2), True, True, [0, 4]),
((2, 2), False, True, [4]),
(3, True, False, slice(None, None)),
(3, False, False, slice(1, None)),
(3, True, True, [0, 1, 2, 4]),
(3, False, True, [1, 2, 4]),
((2, 3), True, False, [0, 3, 4, 5, 6, 7, 8, 9]),
((2, 3), False, False, slice(3, None)),
((2, 3), True, True, [0, 4]),
((2, 3), False, True, [4]),
((3, 3), True, False, [0, 6, 7, 8, 9]),
((3, 3), False, False, [6, 7, 8, 9]),
((3, 3), True, True, [0]),
((3, 3), False, True, []), # would need 3 input features
],
)
@pytest.mark.parametrize(
"sparse_X",
[False, sparse.csr_matrix, sparse.csc_matrix],
)
def test_polynomial_features_two_features(
two_features_degree3,
degree,
include_bias,
interaction_only,
indices,
sparse_X,
):
"""Test PolynomialFeatures on 2 features up to degree 3."""
X, P = two_features_degree3
if sparse_X:
X = sparse_X(X)
tf = PolynomialFeatures(
degree=degree, include_bias=include_bias, interaction_only=interaction_only
).fit(X)
out = tf.transform(X)
if sparse_X:
out = out.toarray()
assert_allclose(out, P[:, indices])
if tf.n_output_features_ > 0:
assert tf.powers_.shape == (tf.n_output_features_, tf.n_features_in_)
def test_polynomial_feature_names():
X = np.arange(30).reshape(10, 3)
poly = PolynomialFeatures(degree=2, include_bias=True).fit(X)
feature_names = poly.get_feature_names()
assert_array_equal(
["1", "x0", "x1", "x2", "x0^2", "x0 x1", "x0 x2", "x1^2", "x1 x2", "x2^2"],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=3, include_bias=False).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(
[
"a",
"b",
"c",
"a^2",
"a b",
"a c",
"b^2",
"b c",
"c^2",
"a^3",
"a^2 b",
"a^2 c",
"a b^2",
"a b c",
"a c^2",
"b^3",
"b^2 c",
"b c^2",
"c^3",
],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=(2, 3), include_bias=False).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(
[
"a^2",
"a b",
"a c",
"b^2",
"b c",
"c^2",
"a^3",
"a^2 b",
"a^2 c",
"a b^2",
"a b c",
"a c^2",
"b^3",
"b^2 c",
"b c^2",
"c^3",
],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(
degree=(3, 3), include_bias=True, interaction_only=True
).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(["1", "a b c"], feature_names)
assert len(feature_names) == poly.transform(X).shape[1]
# test some unicode
poly = PolynomialFeatures(degree=1, include_bias=True).fit(X)
feature_names = poly.get_feature_names(["\u0001F40D", "\u262E", "\u05D0"])
assert_array_equal(["1", "\u0001F40D", "\u262E", "\u05D0"], feature_names)
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(1, True, False, int),
(2, True, False, int),
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
(4, False, False, np.float64),
(4, False, True, np.float64),
],
)
def test_polynomial_features_csc_X(deg, include_bias, interaction_only, dtype):
rng = np.random.RandomState(0)
X = rng.randint(0, 2, (100, 2))
X_csc = sparse.csc_matrix(X)
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csc = est.fit_transform(X_csc.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype))
assert isinstance(Xt_csc, sparse.csc_matrix)
assert Xt_csc.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csc.A, Xt_dense)
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(1, True, False, int),
(2, True, False, int),
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
],
)
def test_polynomial_features_csr_X(deg, include_bias, interaction_only, dtype):
rng = np.random.RandomState(0)
X = rng.randint(0, 2, (100, 2))
X_csr = sparse.csr_matrix(X)
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype, copy=False))
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize("n_features", [1, 4, 5])
@pytest.mark.parametrize(
"min_degree, max_degree", [(0, 1), (0, 2), (1, 3), (0, 4), (3, 4)]
)
@pytest.mark.parametrize("interaction_only", [True, False])
@pytest.mark.parametrize("include_bias", [True, False])
def test_num_combinations(
n_features,
min_degree,
max_degree,
interaction_only,
include_bias,
):
"""
Test that n_output_features_ is calculated correctly.
"""
x = sparse.csr_matrix(([1], ([0], [n_features - 1])))
est = PolynomialFeatures(
degree=max_degree,
interaction_only=interaction_only,
include_bias=include_bias,
)
est.fit(x)
num_combos = est.n_output_features_
combos = PolynomialFeatures._combinations(
n_features=n_features,
min_degree=0,
max_degree=max_degree,
interaction_only=interaction_only,
include_bias=include_bias,
)
assert num_combos == sum([1 for _ in combos])
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
],
)
def test_polynomial_features_csr_X_floats(deg, include_bias, interaction_only, dtype):
X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype))
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["zero_row_index", "deg", "interaction_only"],
[
(0, 2, True),
(1, 2, True),
(2, 2, True),
(0, 3, True),
(1, 3, True),
(2, 3, True),
(0, 2, False),
(1, 2, False),
(2, 2, False),
(0, 3, False),
(1, 3, False),
(2, 3, False),
],
)
def test_polynomial_features_csr_X_zero_row(zero_row_index, deg, interaction_only):
X_csr = sparse_random(3, 10, 1.0, random_state=0).tocsr()
X_csr[zero_row_index, :] = 0.0
X = X_csr.toarray()
est = PolynomialFeatures(deg, include_bias=False, interaction_only=interaction_only)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
# This degree should always be one more than the highest degree supported by
# _csr_expansion.
@pytest.mark.parametrize(
["include_bias", "interaction_only"],
[(True, True), (True, False), (False, True), (False, False)],
)
def test_polynomial_features_csr_X_degree_4(include_bias, interaction_only):
X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(
4, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["deg", "dim", "interaction_only"],
[
(2, 1, True),
(2, 2, True),
(3, 1, True),
(3, 2, True),
(3, 3, True),
(2, 1, False),
(2, 2, False),
(3, 1, False),
(3, 2, False),
(3, 3, False),
],
)
def test_polynomial_features_csr_X_dim_edges(deg, dim, interaction_only):
X_csr = sparse_random(1000, dim, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(deg, interaction_only=interaction_only)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
def test_polynomial_features_deprecated_n_input_features():
# check that we raise a deprecation warning when accessing
# `n_input_features_`. FIXME: remove in 1.2
depr_msg = (
"The attribute `n_input_features_` was deprecated in version "
"1.0 and will be removed in 1.2."
)
X = np.arange(10).reshape(5, 2)
with pytest.warns(FutureWarning, match=depr_msg):
PolynomialFeatures().fit(X).n_input_features_
| 31.948718 | 88 | 0.583212 | import numpy as np
import pytest
from scipy import sparse
from scipy.sparse import random as sparse_random
from sklearn.utils._testing import assert_array_almost_equal
from numpy.testing import assert_allclose, assert_array_equal
from scipy.interpolate import BSpline
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import (
KBinsDiscretizer,
PolynomialFeatures,
SplineTransformer,
)
from sklearn.utils.fixes import linspace, sp_version, parse_version
@pytest.mark.parametrize("est", (PolynomialFeatures, SplineTransformer))
def test_polynomial_and_spline_array_order(est):
X = np.arange(10).reshape(5, 2)
def is_c_contiguous(a):
return np.isfortran(a.T)
assert is_c_contiguous(est().fit_transform(X))
assert is_c_contiguous(est(order="C").fit_transform(X))
assert np.isfortran(est(order="F").fit_transform(X))
@pytest.mark.parametrize(
"params, err_msg",
[
({"degree": -1}, "degree must be a non-negative integer"),
({"degree": 2.5}, "degree must be a non-negative integer"),
({"degree": "string"}, "degree must be a non-negative integer"),
({"n_knots": 1}, "n_knots must be a positive integer >= 2."),
({"n_knots": 1}, "n_knots must be a positive integer >= 2."),
({"n_knots": 2.5}, "n_knots must be a positive integer >= 2."),
({"n_knots": "string"}, "n_knots must be a positive integer >= 2."),
({"knots": 1}, "Expected 2D array, got scalar array instead:"),
({"knots": [1, 2]}, "Expected 2D array, got 1D array instead:"),
(
{"knots": [[1]]},
r"Number of knots, knots.shape\[0\], must be >= 2.",
),
(
{"knots": [[1, 5], [2, 6]]},
r"knots.shape\[1\] == n_features is violated.",
),
(
{"knots": [[1], [1], [2]]},
"knots must be sorted without duplicates.",
),
({"knots": [[2], [1]]}, "knots must be sorted without duplicates."),
(
{"extrapolation": None},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
(
{"extrapolation": 1},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
(
{"extrapolation": "string"},
"extrapolation must be one of 'error', 'constant', 'linear', "
"'continue' or 'periodic'.",
),
({"include_bias": None}, "include_bias must be bool."),
({"include_bias": 1}, "include_bias must be bool."),
({"include_bias": "string"}, "include_bias must be bool."),
(
{"extrapolation": "periodic", "n_knots": 3, "degree": 3},
"Periodic splines require degree < n_knots. Got n_knots=3 and degree=3.",
),
(
{"extrapolation": "periodic", "knots": [[0], [1]], "degree": 2},
"Periodic splines require degree < n_knots. Got n_knots=2 and degree=2.",
),
],
)
def test_spline_transformer_input_validation(params, err_msg):
X = [[1], [2]]
with pytest.raises(ValueError, match=err_msg):
SplineTransformer(**params).fit(X)
def test_spline_transformer_manual_knot_input():
X = np.arange(20).reshape(10, 2)
knots = [[0.5, 1], [1.5, 2], [5, 10]]
st1 = SplineTransformer(degree=3, knots=knots, n_knots=None).fit(X)
knots = np.asarray(knots)
st2 = SplineTransformer(degree=3, knots=knots, n_knots=None).fit(X)
for i in range(X.shape[1]):
assert_allclose(st1.bsplines_[i].t, st2.bsplines_[i].t)
@pytest.mark.parametrize("extrapolation", ["continue", "periodic"])
def test_spline_transformer_integer_knots(extrapolation):
X = np.arange(20).reshape(10, 2)
knots = [[0, 1], [1, 2], [5, 5], [11, 10], [12, 11]]
_ = SplineTransformer(
degree=3, knots=knots, extrapolation=extrapolation
).fit_transform(X)
def test_spline_transformer_feature_names():
X = np.arange(20).reshape(10, 2)
splt = SplineTransformer(n_knots=3, degree=3, include_bias=True).fit(X)
feature_names = splt.get_feature_names()
assert_array_equal(
feature_names,
[
"x0_sp_0",
"x0_sp_1",
"x0_sp_2",
"x0_sp_3",
"x0_sp_4",
"x1_sp_0",
"x1_sp_1",
"x1_sp_2",
"x1_sp_3",
"x1_sp_4",
],
)
splt = SplineTransformer(n_knots=3, degree=3, include_bias=False).fit(X)
feature_names = splt.get_feature_names(["a", "b"])
assert_array_equal(
feature_names,
[
"a_sp_0",
"a_sp_1",
"a_sp_2",
"a_sp_3",
"b_sp_0",
"b_sp_1",
"b_sp_2",
"b_sp_3",
],
)
@pytest.mark.parametrize("degree", range(1, 5))
@pytest.mark.parametrize("n_knots", range(3, 5))
@pytest.mark.parametrize("knots", ["uniform", "quantile"])
@pytest.mark.parametrize("extrapolation", ["constant", "periodic"])
def test_spline_transformer_unity_decomposition(degree, n_knots, knots, extrapolation):
X = np.linspace(0, 1, 100)[:, None]
X_train = np.r_[[[0]], X[::2, :], [[1]]]
X_test = X[1::2, :]
if extrapolation == "periodic":
n_knots = n_knots + degree
splt = SplineTransformer(
n_knots=n_knots,
degree=degree,
knots=knots,
include_bias=True,
extrapolation=extrapolation,
)
splt.fit(X_train)
for X in [X_train, X_test]:
assert_allclose(np.sum(splt.transform(X), axis=1), 1)
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
def test_spline_transformer_linear_regression(bias, intercept):
X = np.linspace(0, 10, 100)[:, None]
y = np.sin(X[:, 0]) + 2
pipe = Pipeline(
steps=[
(
"spline",
SplineTransformer(
n_knots=15,
degree=3,
include_bias=bias,
extrapolation="constant",
),
),
("ols", LinearRegression(fit_intercept=intercept)),
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict(X), y, rtol=1e-3)
@pytest.mark.parametrize(
"knots, n_knots, degree",
[
("uniform", 5, 3),
("uniform", 12, 8),
(
[[-1.0, 0.0], [0, 1.0], [0.1, 2.0], [0.2, 3.0], [0.3, 4.0], [1, 5.0]],
None,
3,
),
],
)
def test_spline_transformer_periodicity_of_extrapolation(knots, n_knots, degree):
X_1 = linspace((-1, 0), (1, 5), 10)
X_2 = linspace((1, 5), (3, 10), 10)
splt = SplineTransformer(
knots=knots, n_knots=n_knots, degree=degree, extrapolation="periodic"
)
splt.fit(X_1)
assert_allclose(splt.transform(X_1), splt.transform(X_2))
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
def test_spline_transformer_periodic_linear_regression(bias, intercept):
def f(x):
return np.sin(2 * np.pi * x) - np.sin(8 * np.pi * x) + 3
X = np.linspace(0, 1, 101)[:, None]
pipe = Pipeline(
steps=[
(
"spline",
SplineTransformer(
n_knots=20,
degree=3,
include_bias=bias,
extrapolation="periodic",
),
),
("ols", LinearRegression(fit_intercept=intercept)),
]
)
pipe.fit(X, f(X[:, 0]))
X_ = np.linspace(-1, 2, 301)[:, None]
predictions = pipe.predict(X_)
assert_allclose(predictions, f(X_[:, 0]), atol=0.01, rtol=0.01)
assert_allclose(predictions[0:100], predictions[100:200], rtol=1e-3)
@pytest.mark.skipif(
sp_version < parse_version("1.0.0"),
reason="Periodic extrapolation not yet implemented for BSpline.",
)
def test_spline_transformer_periodic_spline_backport():
X = np.linspace(-2, 3.5, 10)[:, None]
degree = 2
transformer = SplineTransformer(
degree=degree, extrapolation="periodic", knots=[[-1.0], [0.0], [1.0]]
)
Xt = transformer.fit_transform(X)
coef = np.array([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0], [0.0, 1.0]])
spl = BSpline(np.arange(-3, 4), coef, degree, "periodic")
Xspl = spl(X[:, 0])
assert_allclose(Xt, Xspl)
def test_spline_transformer_periodic_splines_periodicity():
X = np.linspace(0, 10, 101)[:, None]
transformer_1 = SplineTransformer(
degree=3,
extrapolation="periodic",
knots=[[0.0], [1.0], [3.0], [4.0], [5.0], [8.0]],
)
transformer_2 = SplineTransformer(
degree=3,
extrapolation="periodic",
knots=[[1.0], [3.0], [4.0], [5.0], [8.0], [9.0]],
)
Xt_1 = transformer_1.fit_transform(X)
Xt_2 = transformer_2.fit_transform(X)
assert_allclose(Xt_1, Xt_2[:, [4, 0, 1, 2, 3]])
@pytest.mark.parametrize("degree", [3, 5])
def test_spline_transformer_periodic_splines_smoothness(degree):
X = np.linspace(-2, 10, 10_000)[:, None]
transformer = SplineTransformer(
degree=degree,
extrapolation="periodic",
knots=[[0.0], [1.0], [3.0], [4.0], [5.0], [8.0]],
)
Xt = transformer.fit_transform(X)
delta = (X.max() - X.min()) / len(X)
tol = 10 * delta
dXt = Xt
for d in range(1, degree + 1):
diff = np.diff(dXt, axis=0)
assert np.abs(diff).max() < tol
dXt = diff / delta
diff = np.diff(dXt, axis=0)
assert np.abs(diff).max() > 1
@pytest.mark.parametrize(["bias", "intercept"], [(True, False), (False, True)])
@pytest.mark.parametrize("degree", [1, 2, 3, 4, 5])
def test_spline_transformer_extrapolation(bias, intercept, degree):
X = np.linspace(-1, 1, 100)[:, None]
y = X.squeeze()
pipe = Pipeline(
[
[
"spline",
SplineTransformer(
n_knots=4,
degree=degree,
include_bias=bias,
extrapolation="constant",
),
],
["ols", LinearRegression(fit_intercept=intercept)],
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict([[-10], [5]]), [-1, 1])
pipe = Pipeline(
[
[
"spline",
SplineTransformer(
n_knots=4,
degree=degree,
include_bias=bias,
extrapolation="linear",
),
],
["ols", LinearRegression(fit_intercept=intercept)],
]
)
pipe.fit(X, y)
assert_allclose(pipe.predict([[-10], [5]]), [-10, 5])
splt = SplineTransformer(
n_knots=4, degree=degree, include_bias=bias, extrapolation="error"
)
splt.fit(X)
with pytest.raises(ValueError):
splt.transform([[-10]])
with pytest.raises(ValueError):
splt.transform([[5]])
def test_spline_transformer_kbindiscretizer():
rng = np.random.RandomState(97531)
X = rng.randn(200).reshape(200, 1)
n_bins = 5
n_knots = n_bins + 1
splt = SplineTransformer(
n_knots=n_knots, degree=0, knots="quantile", include_bias=True
)
splines = splt.fit_transform(X)
kbd = KBinsDiscretizer(n_bins=n_bins, encode="onehot-dense", strategy="quantile")
kbins = kbd.fit_transform(X)
assert_allclose(splines, kbins, rtol=1e-13)
@pytest.mark.parametrize("n_knots", [5, 10])
@pytest.mark.parametrize("include_bias", [True, False])
@pytest.mark.parametrize("degree", [3, 5])
def test_spline_transformer_n_features_out(n_knots, include_bias, degree):
splt = SplineTransformer(n_knots=n_knots, degree=degree, include_bias=include_bias)
X = np.linspace(0, 1, 10)[:, None]
splt.fit(X)
assert splt.transform(X).shape[1] == splt.n_features_out_
@pytest.mark.parametrize(
"params, err_msg",
[
({"degree": -1}, "degree must be a non-negative integer"),
({"degree": 2.5}, "degree must be a non-negative int or tuple"),
({"degree": "12"}, r"degree=\(min_degree, max_degree\) must"),
({"degree": "string"}, "degree must be a non-negative int or tuple"),
({"degree": (-1, 2)}, r"degree=\(min_degree, max_degree\) must"),
({"degree": (0, 1.5)}, r"degree=\(min_degree, max_degree\) must"),
({"degree": (3, 2)}, r"degree=\(min_degree, max_degree\) must"),
],
)
def test_polynomial_features_input_validation(params, err_msg):
X = [[1], [2]]
with pytest.raises(ValueError, match=err_msg):
PolynomialFeatures(**params).fit(X)
@pytest.fixture()
def single_feature_degree3():
X = np.arange(6)[:, np.newaxis]
P = np.hstack([np.ones_like(X), X, X ** 2, X ** 3])
return X, P
@pytest.mark.parametrize(
"degree, include_bias, interaction_only, indices",
[
(3, True, False, slice(None, None)),
(3, False, False, slice(1, None)),
(3, True, True, [0, 1]),
(3, False, True, [1]),
((2, 3), True, False, [0, 2, 3]),
((2, 3), False, False, [2, 3]),
((2, 3), True, True, [0]),
((2, 3), False, True, []),
],
)
@pytest.mark.parametrize(
"sparse_X",
[False, sparse.csr_matrix, sparse.csc_matrix],
)
def test_polynomial_features_one_feature(
single_feature_degree3,
degree,
include_bias,
interaction_only,
indices,
sparse_X,
):
X, P = single_feature_degree3
if sparse_X:
X = sparse_X(X)
tf = PolynomialFeatures(
degree=degree, include_bias=include_bias, interaction_only=interaction_only
).fit(X)
out = tf.transform(X)
if sparse_X:
out = out.toarray()
assert_allclose(out, P[:, indices])
if tf.n_output_features_ > 0:
assert tf.powers_.shape == (tf.n_output_features_, tf.n_features_in_)
@pytest.fixture()
def two_features_degree3():
X = np.arange(6).reshape((3, 2))
x1 = X[:, :1]
x2 = X[:, 1:]
P = np.hstack(
[
x1 ** 0 * x2 ** 0,
x1 ** 1 * x2 ** 0,
x1 ** 0 * x2 ** 1,
x1 ** 2 * x2 ** 0,
x1 ** 1 * x2 ** 1,
x1 ** 0 * x2 ** 2,
x1 ** 3 * x2 ** 0,
x1 ** 2 * x2 ** 1,
x1 ** 1 * x2 ** 2,
x1 ** 0 * x2 ** 3,
]
)
return X, P
@pytest.mark.parametrize(
"degree, include_bias, interaction_only, indices",
[
(2, True, False, slice(0, 6)),
(2, False, False, slice(1, 6)),
(2, True, True, [0, 1, 2, 4]),
(2, False, True, [1, 2, 4]),
((2, 2), True, False, [0, 3, 4, 5]),
((2, 2), False, False, [3, 4, 5]),
((2, 2), True, True, [0, 4]),
((2, 2), False, True, [4]),
(3, True, False, slice(None, None)),
(3, False, False, slice(1, None)),
(3, True, True, [0, 1, 2, 4]),
(3, False, True, [1, 2, 4]),
((2, 3), True, False, [0, 3, 4, 5, 6, 7, 8, 9]),
((2, 3), False, False, slice(3, None)),
((2, 3), True, True, [0, 4]),
((2, 3), False, True, [4]),
((3, 3), True, False, [0, 6, 7, 8, 9]),
((3, 3), False, False, [6, 7, 8, 9]),
((3, 3), True, True, [0]),
((3, 3), False, True, []),
],
)
@pytest.mark.parametrize(
"sparse_X",
[False, sparse.csr_matrix, sparse.csc_matrix],
)
def test_polynomial_features_two_features(
two_features_degree3,
degree,
include_bias,
interaction_only,
indices,
sparse_X,
):
X, P = two_features_degree3
if sparse_X:
X = sparse_X(X)
tf = PolynomialFeatures(
degree=degree, include_bias=include_bias, interaction_only=interaction_only
).fit(X)
out = tf.transform(X)
if sparse_X:
out = out.toarray()
assert_allclose(out, P[:, indices])
if tf.n_output_features_ > 0:
assert tf.powers_.shape == (tf.n_output_features_, tf.n_features_in_)
def test_polynomial_feature_names():
X = np.arange(30).reshape(10, 3)
poly = PolynomialFeatures(degree=2, include_bias=True).fit(X)
feature_names = poly.get_feature_names()
assert_array_equal(
["1", "x0", "x1", "x2", "x0^2", "x0 x1", "x0 x2", "x1^2", "x1 x2", "x2^2"],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=3, include_bias=False).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(
[
"a",
"b",
"c",
"a^2",
"a b",
"a c",
"b^2",
"b c",
"c^2",
"a^3",
"a^2 b",
"a^2 c",
"a b^2",
"a b c",
"a c^2",
"b^3",
"b^2 c",
"b c^2",
"c^3",
],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=(2, 3), include_bias=False).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(
[
"a^2",
"a b",
"a c",
"b^2",
"b c",
"c^2",
"a^3",
"a^2 b",
"a^2 c",
"a b^2",
"a b c",
"a c^2",
"b^3",
"b^2 c",
"b c^2",
"c^3",
],
feature_names,
)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(
degree=(3, 3), include_bias=True, interaction_only=True
).fit(X)
feature_names = poly.get_feature_names(["a", "b", "c"])
assert_array_equal(["1", "a b c"], feature_names)
assert len(feature_names) == poly.transform(X).shape[1]
poly = PolynomialFeatures(degree=1, include_bias=True).fit(X)
feature_names = poly.get_feature_names(["\u0001F40D", "\u262E", "\u05D0"])
assert_array_equal(["1", "\u0001F40D", "\u262E", "\u05D0"], feature_names)
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(1, True, False, int),
(2, True, False, int),
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
(4, False, False, np.float64),
(4, False, True, np.float64),
],
)
def test_polynomial_features_csc_X(deg, include_bias, interaction_only, dtype):
rng = np.random.RandomState(0)
X = rng.randint(0, 2, (100, 2))
X_csc = sparse.csc_matrix(X)
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csc = est.fit_transform(X_csc.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype))
assert isinstance(Xt_csc, sparse.csc_matrix)
assert Xt_csc.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csc.A, Xt_dense)
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(1, True, False, int),
(2, True, False, int),
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
],
)
def test_polynomial_features_csr_X(deg, include_bias, interaction_only, dtype):
rng = np.random.RandomState(0)
X = rng.randint(0, 2, (100, 2))
X_csr = sparse.csr_matrix(X)
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype, copy=False))
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize("n_features", [1, 4, 5])
@pytest.mark.parametrize(
"min_degree, max_degree", [(0, 1), (0, 2), (1, 3), (0, 4), (3, 4)]
)
@pytest.mark.parametrize("interaction_only", [True, False])
@pytest.mark.parametrize("include_bias", [True, False])
def test_num_combinations(
n_features,
min_degree,
max_degree,
interaction_only,
include_bias,
):
x = sparse.csr_matrix(([1], ([0], [n_features - 1])))
est = PolynomialFeatures(
degree=max_degree,
interaction_only=interaction_only,
include_bias=include_bias,
)
est.fit(x)
num_combos = est.n_output_features_
combos = PolynomialFeatures._combinations(
n_features=n_features,
min_degree=0,
max_degree=max_degree,
interaction_only=interaction_only,
include_bias=include_bias,
)
assert num_combos == sum([1 for _ in combos])
@pytest.mark.parametrize(
["deg", "include_bias", "interaction_only", "dtype"],
[
(2, True, False, np.float32),
(2, True, False, np.float64),
(3, False, False, np.float64),
(3, False, True, np.float64),
],
)
def test_polynomial_features_csr_X_floats(deg, include_bias, interaction_only, dtype):
X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(
deg, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr.astype(dtype))
Xt_dense = est.fit_transform(X.astype(dtype))
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["zero_row_index", "deg", "interaction_only"],
[
(0, 2, True),
(1, 2, True),
(2, 2, True),
(0, 3, True),
(1, 3, True),
(2, 3, True),
(0, 2, False),
(1, 2, False),
(2, 2, False),
(0, 3, False),
(1, 3, False),
(2, 3, False),
],
)
def test_polynomial_features_csr_X_zero_row(zero_row_index, deg, interaction_only):
X_csr = sparse_random(3, 10, 1.0, random_state=0).tocsr()
X_csr[zero_row_index, :] = 0.0
X = X_csr.toarray()
est = PolynomialFeatures(deg, include_bias=False, interaction_only=interaction_only)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["include_bias", "interaction_only"],
[(True, True), (True, False), (False, True), (False, False)],
)
def test_polynomial_features_csr_X_degree_4(include_bias, interaction_only):
X_csr = sparse_random(1000, 10, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(
4, include_bias=include_bias, interaction_only=interaction_only
)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
@pytest.mark.parametrize(
["deg", "dim", "interaction_only"],
[
(2, 1, True),
(2, 2, True),
(3, 1, True),
(3, 2, True),
(3, 3, True),
(2, 1, False),
(2, 2, False),
(3, 1, False),
(3, 2, False),
(3, 3, False),
],
)
def test_polynomial_features_csr_X_dim_edges(deg, dim, interaction_only):
X_csr = sparse_random(1000, dim, 0.5, random_state=0).tocsr()
X = X_csr.toarray()
est = PolynomialFeatures(deg, interaction_only=interaction_only)
Xt_csr = est.fit_transform(X_csr)
Xt_dense = est.fit_transform(X)
assert isinstance(Xt_csr, sparse.csr_matrix)
assert Xt_csr.dtype == Xt_dense.dtype
assert_array_almost_equal(Xt_csr.A, Xt_dense)
def test_polynomial_features_deprecated_n_input_features():
depr_msg = (
"The attribute `n_input_features_` was deprecated in version "
"1.0 and will be removed in 1.2."
)
X = np.arange(10).reshape(5, 2)
with pytest.warns(FutureWarning, match=depr_msg):
PolynomialFeatures().fit(X).n_input_features_
| true | true |
f71bce762094aac77e775115a4361cc778bec8f7 | 22,413 | py | Python | tests/test_iso_parsing.py | mtauban/OWSLib | 0b64e7a8f7eb9e1fca369716f9803821066bf0f3 | [
"BSD-3-Clause"
] | null | null | null | tests/test_iso_parsing.py | mtauban/OWSLib | 0b64e7a8f7eb9e1fca369716f9803821066bf0f3 | [
"BSD-3-Clause"
] | null | null | null | tests/test_iso_parsing.py | mtauban/OWSLib | 0b64e7a8f7eb9e1fca369716f9803821066bf0f3 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import io
from owslib import util
from owslib.etree import etree
from owslib.iso import (
MD_Metadata,
)
from owslib.namespaces import Namespaces
def get_md_resource(file_path):
"""Read the file and parse into an XML tree.
Parameters
----------
file_path : str
Path of the file to read.
Returns
-------
etree.ElementTree
XML tree of the resource on disk.
"""
namespaces = Namespaces().get_namespaces(keys=('gmd', 'gmi'))
with io.open(file_path, mode='r', encoding='utf-8') as f:
data = f.read().encode('utf-8')
data = etree.fromstring(data)
mdelem = data.find('.//' + util.nspath_eval(
'gmd:MD_Metadata', namespaces)) or data.find(
'.//' + util.nspath_eval('gmi:MI_Metadata', namespaces))
if mdelem is None and data.tag in ['{http://www.isotc211.org/2005/gmd}MD_Metadata',
'{http://www.isotc211.org/2005/gmi}MI_Metadata']:
mdelem = data
return mdelem
def assert_list(var, length):
"""Assert a given variable is a list with given size.
Parameters
----------
var : variable
Variable to test (i.e. should be a list).
length : int
The length/size of the list.
"""
assert type(var) is list
assert len(var) == length
def test_md_parsing_dov():
"""Test the parsing of a metadatarecord from DOV
GetRecordById response available in
tests/resources/csw_dov_getrecordbyid.xml
"""
md_resource = get_md_resource('tests/resources/csw_dov_getrecordbyid.xml')
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.identifier == '6c39d716-aecc-4fbc-bac8-4f05a49a78d5'
assert md.dataseturi is None
assert md.parentidentifier is None
assert md.language is None
assert md.languagecode == 'dut'
assert md.charset == 'utf8'
assert md.datestamp == '2018-02-21T16:14:24'
assert md.hierarchy == 'dataset'
assert_list(md.contact, 1)
contact = md.contact[0]
assert contact.organization == 'Vlaamse overheid - Vlaamse ' \
'MilieuMaatschappij - Afdeling ' \
'Operationeel Waterbeheer'
assert contact.address == 'Koning Albert II-laan 20 bus 16'
assert contact.city == 'Brussel'
assert contact.postcode == '1000'
assert contact.country == u'België'
assert contact.email == 'info@vmm.be'
assert contact.onlineresource.url == 'https://www.vmm.be'
assert contact.role == 'pointOfContact'
assert md.stdname == 'ISO 19115/2003/Cor.1:2006'
assert md.stdver == 'GDI-Vlaanderen Best Practices - versie 1.0'
assert md.referencesystem.code == '31370'
assert md.referencesystem.codeSpace == 'EPSG'
assert_list(md.identificationinfo, 1)
iden = md.identificationinfo[0]
assert iden.title == 'Grondwatermeetnetten'
assert iden.alternatetitle == 'Grondwatermeetnetten beschikbaar op DOV'
assert_list(iden.date, 2)
assert iden.date[0].date == '2002-05-22'
assert iden.date[0].type == 'creation'
assert iden.date[1].date == '2002-05-22'
assert iden.date[1].type == 'publication'
assert_list(iden.uricode, 1)
assert iden.uricode[0] == 'A64F073B-9FBE-91DD-36FDE7462BBAFA61'
assert_list(iden.uricodespace, 1)
assert iden.uricodespace[0] == 'DOV-be'
assert_list(iden.uselimitation, 3)
assert "Zie 'Overige beperkingen'" in iden.uselimitation
assert "Bij het gebruik van de informatie die DOV aanbiedt, dient steeds " \
"volgende standaardreferentie gebruikt te worden: Databank " \
"Ondergrond Vlaanderen - (vermelding van de beheerder en de " \
"specifieke geraadpleegde gegevens) - Geraadpleegd op dd/mm/jjjj, " \
"op https://www.dov.vlaanderen.be" in iden.uselimitation
assert "Volgende aansprakelijkheidsbepalingen gelden: " \
"https://www.dov.vlaanderen.be/page/disclaimer" in iden.uselimitation
assert_list(iden.uselimitation_url, 0)
assert_list(iden.accessconstraints, 1)
assert iden.accessconstraints[0] == 'otherRestrictions'
assert_list(iden.classification, 0)
assert_list(iden.otherconstraints, 1)
assert iden.otherconstraints[
0] == "Data beschikbaar voor hergebruik volgens de " \
"Modellicentie Gratis Hergebruik. Toelichting " \
"beschikbaar op " \
"https://www.dov.vlaanderen.be/page/gebruiksvoorwaarden-dov-services"
assert_list(iden.securityconstraints, 1)
assert iden.securityconstraints[0] == 'unclassified'
assert_list(iden.useconstraints, 0)
assert_list(iden.denominators, 1)
assert iden.denominators[0] == '10000'
assert_list(iden.distance, 0)
assert_list(iden.uom, 0)
assert_list(iden.resourcelanguage, 0)
assert_list(iden.resourcelanguagecode, 1)
assert iden.resourcelanguagecode[0] == 'dut'
assert_list(iden.creator, 0)
assert_list(iden.publisher, 0)
assert_list(iden.contributor, 0)
assert iden.edition is None
assert iden.abstract.startswith("In de Databank Ondergrond Vlaanderen "
"zijn verschillende grondwatermeetnetten "
"opgenomen.")
assert iden.purpose.startswith(
"Het doel van de meetnetten is inzicht krijgen in de kwaliteit en "
"kwantiteit van de watervoerende lagen in de ondergrond van "
"Vlaanderen. Algemeen kan gesteld worden dat de grondwatermeetnetten "
"een belangrijk beleidsinstrument vormen")
assert iden.status == 'onGoing'
assert_list(iden.contact, 2)
assert iden.contact[0].organization == 'Vlaamse overheid - Vlaamse MilieuMaatschappij - Afdeling Operationeel Waterbeheer'
assert iden.contact[0].address == 'Koning Albert II-laan 20 bus 16'
assert iden.contact[0].city == 'Brussel'
assert iden.contact[0].postcode == '1000'
assert iden.contact[0].country == u'België'
assert iden.contact[0].email == 'info@vmm.be'
assert iden.contact[0].onlineresource.url == 'https://www.vmm.be'
assert iden.contact[0].role == 'pointOfContact'
assert iden.contact[1].organization == 'Databank Ondergrond Vlaanderen (' \
'DOV)'
assert iden.contact[1].address == 'Technologiepark Gebouw 905'
assert iden.contact[1].city == 'Zwijnaarde'
assert iden.contact[1].postcode == '9052'
assert iden.contact[1].country == u'België'
assert iden.contact[1].email == 'dov@vlaanderen.be'
assert iden.contact[1].onlineresource.url == \
'https://www.dov.vlaanderen.be'
assert iden.contact[1].role == 'distributor'
assert_list(iden.spatialrepresentationtype, 1)
assert iden.spatialrepresentationtype[0] == 'vector'
assert_list(iden.keywords, 5)
assert type(iden.keywords[0]) is dict
assert iden.keywords[0]['type'] == ''
assert iden.keywords[0]['thesaurus']['title'] == "GEMET - INSPIRE thema's, versie 1.0"
assert iden.keywords[0]['thesaurus']['date'] == '2008-06-01'
assert iden.keywords[0]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[0]['keywords'], 1)
assert iden.keywords[0]['keywords'] == ['Geologie']
assert type(iden.keywords[1]) is dict
assert iden.keywords[1]['type'] == ''
assert iden.keywords[1]['thesaurus'][
'title'] == "GEMET - Concepten, versie 2.4"
assert iden.keywords[1]['thesaurus']['date'] == '2010-01-13'
assert iden.keywords[1]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[1]['keywords'], 2)
assert iden.keywords[1]['keywords'] == ['grondwater', 'meetnet(werk)']
assert type(iden.keywords[2]) is dict
assert iden.keywords[2]['type'] == ''
assert iden.keywords[2]['thesaurus'][
'title'] == "Vlaamse regio's"
assert iden.keywords[2]['thesaurus']['date'] == '2013-09-25'
assert iden.keywords[2]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[2]['keywords'], 1)
assert iden.keywords[2]['keywords'] == ['Vlaams Gewest']
assert type(iden.keywords[3]) is dict
assert iden.keywords[3]['type'] is None
assert iden.keywords[3]['thesaurus'][
'title'] == "GDI-Vlaanderen Trefwoorden"
assert iden.keywords[3]['thesaurus']['date'] == '2014-02-26'
assert iden.keywords[3]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[3]['keywords'], 7)
assert iden.keywords[3]['keywords'] == [
'Toegevoegd GDI-Vl', 'Herbruikbaar', 'Vlaamse Open data',
'Kosteloos', 'Lijst M&R INSPIRE', 'Metadata INSPIRE-conform',
'Metadata GDI-Vl-conform']
assert type(iden.keywords[4]) is dict
assert iden.keywords[4]['type'] is None
assert iden.keywords[4]['thesaurus']['title'] == "DOV"
assert iden.keywords[4]['thesaurus']['date'] == '2010-12-01'
assert iden.keywords[4]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[4]['keywords'], 7)
assert iden.keywords[4]['keywords'] == [
'Ondergrond', 'DOV', 'Vlaanderen', 'monitoring', 'meetnetten',
'Kaderrichtlijn Water', 'Decreet Integraal waterbeleid']
assert_list(iden.keywords2, 5)
assert iden.keywords2[0].type == ''
assert iden.keywords2[0].thesaurus[
'title'] == "GEMET - INSPIRE thema's, versie 1.0"
assert iden.keywords2[0].thesaurus['date'] == '2008-06-01'
assert iden.keywords2[0].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[0].keywords, 1)
assert iden.keywords2[0].keywords == ['Geologie']
assert iden.keywords2[1].type == ''
assert iden.keywords2[1].thesaurus[
'title'] == "GEMET - Concepten, versie 2.4"
assert iden.keywords2[1].thesaurus['date'] == '2010-01-13'
assert iden.keywords2[1].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[1].keywords, 2)
assert iden.keywords2[1].keywords == ['grondwater', 'meetnet(werk)']
assert iden.keywords2[2].type == ''
assert iden.keywords2[2].thesaurus[
'title'] == "Vlaamse regio's"
assert iden.keywords2[2].thesaurus['date'] == '2013-09-25'
assert iden.keywords2[2].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[2].keywords, 1)
assert iden.keywords2[2].keywords == ['Vlaams Gewest']
assert iden.keywords2[3].type is None
assert iden.keywords2[3].thesaurus[
'title'] == "GDI-Vlaanderen Trefwoorden"
assert iden.keywords2[3].thesaurus['date'] == '2014-02-26'
assert iden.keywords2[3].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[3].keywords, 7)
assert iden.keywords2[3].keywords == [
'Toegevoegd GDI-Vl', 'Herbruikbaar', 'Vlaamse Open data',
'Kosteloos', 'Lijst M&R INSPIRE', 'Metadata INSPIRE-conform',
'Metadata GDI-Vl-conform']
assert iden.keywords2[4].type is None
assert iden.keywords2[4].thesaurus['title'] == "DOV"
assert iden.keywords2[4].thesaurus['date'] == '2010-12-01'
assert iden.keywords2[4].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[4].keywords, 7)
assert iden.keywords2[4].keywords == [
'Ondergrond', 'DOV', 'Vlaanderen', 'monitoring', 'meetnetten',
'Kaderrichtlijn Water', 'Decreet Integraal waterbeleid']
assert_list(iden.topiccategory, 1)
assert iden.topiccategory[0] == 'geoscientificInformation'
assert iden.supplementalinformation == \
"https://www.dov.vlaanderen.be/page/grondwatermeetnet"
assert_list(md.contentinfo, 1)
ci = md.contentinfo[0]
assert ci.compliancecode is None
assert_list(ci.language, 0)
assert ci.includedwithdataset == True
assert_list(ci.featuretypenames, 0)
assert_list(ci.featurecatalogues, 1)
assert ci.featurecatalogues[0] == 'b142965f-b2aa-429e-86ff-a7cb0e065d48'
def test_md_parsing_geobretagne():
"""Test the parsing of a metadatarecord from GéoBretagne
MD_Metadata record available in
tests/resources/csw_geobretagne_mdmetadata.xml
"""
md_resource = get_md_resource(
'tests/resources/csw_geobretagne_mdmetadata.xml')
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.identifier == '955c3e47-411e-4969-b61b-3556d1b9f879'
assert md.dataseturi is None
assert md.parentidentifier is None
assert md.language == 'fre'
assert md.languagecode is None
assert md.charset == 'utf8'
assert md.datestamp == '2018-07-30T14:19:40'
assert md.hierarchy == 'dataset'
assert_list(md.contact, 1)
contact = md.contact[0]
assert contact.organization == 'DIRECTION GENERALE DES FINANCES ' \
'PUBLIQUES BUREAU GF-3A'
assert contact.address is None
assert contact.city is None
assert contact.postcode is None
assert contact.country is None
assert contact.email == 'bureau.gf3a@dgfip.finances.gouv.fr'
assert contact.onlineresource is None
assert contact.role == 'pointOfContact'
assert md.stdname == 'ISO 19115'
assert md.stdver == '1.0'
assert md.referencesystem.code == 'RGF93 / CC48 (EPSG:3948)'
assert md.referencesystem.codeSpace == 'EPSG'
assert_list(md.identificationinfo, 1)
iden = md.identificationinfo[0]
assert iden.title == 'Cadastre 2018 en Bretagne'
assert iden.alternatetitle is None
assert_list(iden.date, 1)
assert iden.date[0].date == '2018-09-01'
assert iden.date[0].type == 'revision'
assert_list(iden.uricode, 1)
assert iden.uricode[0] == 'https://geobretagne.fr/geonetwork/apps/georchestra/?uuid=363e3a8e-d0ce-497d-87a9-2a2d58d82772'
assert_list(iden.uricodespace, 0)
assert_list(iden.uselimitation, 2)
assert u"le plan cadastral décrit les limites apparentes de la " \
u"propriété." in iden.uselimitation
assert_list(iden.uselimitation_url, 0)
assert_list(iden.accessconstraints, 1)
assert iden.accessconstraints[0] == 'otherRestrictions'
assert_list(iden.classification, 0)
assert_list(iden.otherconstraints, 1)
assert iden.otherconstraints[
0] == u'Usage libre sous réserve des mentions obligatoires ' \
u'sur tout document de diffusion : "Source : DGFIP"'
assert_list(iden.securityconstraints, 0)
assert_list(iden.useconstraints, 1)
assert iden.useconstraints[0] == 'copyright'
assert_list(iden.denominators, 1)
assert iden.denominators[0] == '500'
assert_list(iden.distance, 0)
assert_list(iden.uom, 0)
assert_list(iden.resourcelanguage, 1)
assert iden.resourcelanguage[0] == 'fre'
assert_list(iden.resourcelanguagecode, 0)
assert_list(iden.creator, 0)
assert_list(iden.publisher, 0)
assert_list(iden.contributor, 0)
assert iden.edition is None
assert iden.abstract.startswith(
u"Le plan du cadastre est un document administratif qui propose "
u"l’unique plan parcellaire à grande échelle couvrant le territoire "
u"national.")
assert iden.purpose.startswith(
u"Le but premier du plan cadastral est d'identifier, de localiser et "
u"représenter la propriété foncière, ainsi que de servir à l'assise "
u"de la fiscalité locale des propriétés non bâties.")
assert iden.status == 'completed'
assert_list(iden.contact, 1)
assert iden.contact[0].organization == 'DGFIP Bretagne'
assert iden.contact[0].name == 'DIRECTION GENERALE DES FINANCES PUBLIQUES'
assert iden.contact[0].address is None
assert iden.contact[0].city is None
assert iden.contact[0].postcode is None
assert iden.contact[0].country is None
assert iden.contact[0].email == 'bureau.gf3a@dgfip.finances.gouv.fr'
assert iden.contact[0].onlineresource is None
assert iden.contact[0].role == 'pointOfContact'
assert_list(iden.spatialrepresentationtype, 1)
assert iden.spatialrepresentationtype[0] == 'vector'
assert_list(iden.keywords, 7)
assert type(iden.keywords[0]) is dict
assert iden.keywords[0]['type'] == 'place'
assert iden.keywords[0]['thesaurus']['title'] is None
assert iden.keywords[0]['thesaurus']['date'] is None
assert iden.keywords[0]['thesaurus']['datetype'] is None
assert_list(iden.keywords[0]['keywords'], 1)
assert iden.keywords[0]['keywords'] == ['France']
assert type(iden.keywords[1]) is dict
assert iden.keywords[1]['type'] is None
assert iden.keywords[1]['thesaurus']['title'] is None
assert iden.keywords[1]['thesaurus']['date'] is None
assert iden.keywords[1]['thesaurus']['datetype'] is None
assert_list(iden.keywords[1]['keywords'], 0)
assert type(iden.keywords[2]) is dict
assert iden.keywords[2]['type'] == 'theme'
assert iden.keywords[2]['thesaurus']['title'] is None
assert iden.keywords[2]['thesaurus']['date'] is None
assert iden.keywords[2]['thesaurus']['datetype'] is None
assert_list(iden.keywords[2]['keywords'], 7)
assert iden.keywords[2]['keywords'] == [
u'bâtiments', 'adresses', 'parcelles cadastrales', 'hydrographie',
u'réseaux de transport', u'unités administratives',
u'référentiels de coordonnées']
assert type(iden.keywords[3]) is dict
assert iden.keywords[3]['type'] == 'theme'
assert iden.keywords[3]['thesaurus']['title'] is None
assert iden.keywords[3]['thesaurus']['date'] is None
assert iden.keywords[3]['thesaurus']['datetype'] is None
assert_list(iden.keywords[3]['keywords'], 5)
assert iden.keywords[3]['keywords'] == [
u'bâtis', 'sections', 'parcelles', 'cadastre', 'cadastrale']
assert type(iden.keywords[4]) is dict
assert iden.keywords[4]['type'] == 'theme'
assert iden.keywords[4]['thesaurus']['title'] == u"GéoBretagne v 2.0"
assert iden.keywords[4]['thesaurus']['date'] == '2014-01-13'
assert iden.keywords[4]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[4]['keywords'], 1)
assert iden.keywords[4]['keywords'] == [u'référentiels : cadastre']
assert type(iden.keywords[5]) is dict
assert iden.keywords[5]['type'] == 'theme'
assert iden.keywords[5]['thesaurus']['title'] == "INSPIRE themes"
assert iden.keywords[5]['thesaurus']['date'] == '2008-06-01'
assert iden.keywords[5]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[5]['keywords'], 1)
assert iden.keywords[5]['keywords'] == ['Parcelles cadastrales']
assert type(iden.keywords[6]) is dict
assert iden.keywords[6]['type'] == 'theme'
assert iden.keywords[6]['thesaurus']['title'] == "GEMET"
assert iden.keywords[6]['thesaurus']['date'] == '2012-07-20'
assert iden.keywords[6]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[6]['keywords'], 2)
assert iden.keywords[6]['keywords'] == ['cadastre', u'bâtiment']
assert_list(iden.keywords2, 6)
assert iden.keywords2[0].type == 'place'
assert iden.keywords2[0].thesaurus is None
assert_list(iden.keywords2[0].keywords, 1)
assert iden.keywords2[0].keywords == ['France']
assert iden.keywords2[1].type == 'theme'
assert iden.keywords2[1].thesaurus is None
assert_list(iden.keywords2[1].keywords, 7)
assert iden.keywords2[1].keywords == [
u'bâtiments', 'adresses', 'parcelles cadastrales', 'hydrographie',
u'réseaux de transport', u'unités administratives',
u'référentiels de coordonnées']
assert iden.keywords2[2].type == 'theme'
assert iden.keywords2[2].thesaurus is None
assert_list(iden.keywords2[2].keywords, 5)
assert iden.keywords2[2].keywords == [
u'bâtis', 'sections', 'parcelles', 'cadastre', 'cadastrale']
assert iden.keywords2[3].type == 'theme'
assert iden.keywords2[3].thesaurus['title'] == u"GéoBretagne v 2.0"
assert iden.keywords2[3].thesaurus['date'] == '2014-01-13'
assert iden.keywords2[3].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[3].keywords, 1)
assert iden.keywords2[3].keywords == [u'référentiels : cadastre']
assert iden.keywords2[4].type == 'theme'
assert iden.keywords2[4].thesaurus['title'] == "INSPIRE themes"
assert iden.keywords2[4].thesaurus['date'] == '2008-06-01'
assert iden.keywords2[4].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[4].keywords, 1)
assert iden.keywords2[4].keywords == ['Parcelles cadastrales']
assert iden.keywords2[5].type == 'theme'
assert iden.keywords2[5].thesaurus['title'] == "GEMET"
assert iden.keywords2[5].thesaurus['date'] == '2012-07-20'
assert iden.keywords2[5].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[5].keywords, 2)
assert iden.keywords2[5].keywords == ['cadastre', u'bâtiment']
assert_list(iden.topiccategory, 1)
assert iden.topiccategory[0] == 'planningCadastre'
assert iden.supplementalinformation == \
u"La légende du plan cadastral est consultable sur: " \
"http://www.cadastre.gouv.fr/scpc/pdf/legendes/FR_fr/Legende%20du" \
"%20plan%20sur%20internet.pdf"
assert_list(md.contentinfo, 1)
ci = md.contentinfo[0]
assert ci.compliancecode is None
assert_list(ci.language, 0)
assert ci.includedwithdataset == False
assert_list(ci.featuretypenames, 0)
assert_list(ci.featurecatalogues, 0)
def test_md_parsing_19115_2():
"""Test the parsing of a 19115-2 document
MD_Metadata record available in
tests/resources/iso_mi.xml
"""
md_resource = get_md_resource(
'tests/resources/iso_mi.xml')
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.identifier == '3f342f64-9348-11df-ba6a-0014c2c00eab'
ci = md.contentinfo[0]
assert ci.type == 'image'
assert ci.cloud_cover == '72'
assert ci.processing_level == '1.0'
band = ci.bands[0]
assert band.id == 'B1'
assert band.units == 'nm'
assert band.min == '932'
assert band.max == '958'
plt = md.acquisition.platforms[0]
assert plt.identifier == 'LANDSAT_8'
assert plt.description == 'Landsat 8'
inst = plt.instruments[0]
assert inst.identifier == 'OLI_TIRS'
assert inst.type == 'INS-NOBS'
| 37.923858 | 126 | 0.664659 |
import io
from owslib import util
from owslib.etree import etree
from owslib.iso import (
MD_Metadata,
)
from owslib.namespaces import Namespaces
def get_md_resource(file_path):
namespaces = Namespaces().get_namespaces(keys=('gmd', 'gmi'))
with io.open(file_path, mode='r', encoding='utf-8') as f:
data = f.read().encode('utf-8')
data = etree.fromstring(data)
mdelem = data.find('.//' + util.nspath_eval(
'gmd:MD_Metadata', namespaces)) or data.find(
'.//' + util.nspath_eval('gmi:MI_Metadata', namespaces))
if mdelem is None and data.tag in ['{http://www.isotc211.org/2005/gmd}MD_Metadata',
'{http://www.isotc211.org/2005/gmi}MI_Metadata']:
mdelem = data
return mdelem
def assert_list(var, length):
assert type(var) is list
assert len(var) == length
def test_md_parsing_dov():
md_resource = get_md_resource('tests/resources/csw_dov_getrecordbyid.xml')
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.identifier == '6c39d716-aecc-4fbc-bac8-4f05a49a78d5'
assert md.dataseturi is None
assert md.parentidentifier is None
assert md.language is None
assert md.languagecode == 'dut'
assert md.charset == 'utf8'
assert md.datestamp == '2018-02-21T16:14:24'
assert md.hierarchy == 'dataset'
assert_list(md.contact, 1)
contact = md.contact[0]
assert contact.organization == 'Vlaamse overheid - Vlaamse ' \
'MilieuMaatschappij - Afdeling ' \
'Operationeel Waterbeheer'
assert contact.address == 'Koning Albert II-laan 20 bus 16'
assert contact.city == 'Brussel'
assert contact.postcode == '1000'
assert contact.country == u'België'
assert contact.email == 'info@vmm.be'
assert contact.onlineresource.url == 'https://www.vmm.be'
assert contact.role == 'pointOfContact'
assert md.stdname == 'ISO 19115/2003/Cor.1:2006'
assert md.stdver == 'GDI-Vlaanderen Best Practices - versie 1.0'
assert md.referencesystem.code == '31370'
assert md.referencesystem.codeSpace == 'EPSG'
assert_list(md.identificationinfo, 1)
iden = md.identificationinfo[0]
assert iden.title == 'Grondwatermeetnetten'
assert iden.alternatetitle == 'Grondwatermeetnetten beschikbaar op DOV'
assert_list(iden.date, 2)
assert iden.date[0].date == '2002-05-22'
assert iden.date[0].type == 'creation'
assert iden.date[1].date == '2002-05-22'
assert iden.date[1].type == 'publication'
assert_list(iden.uricode, 1)
assert iden.uricode[0] == 'A64F073B-9FBE-91DD-36FDE7462BBAFA61'
assert_list(iden.uricodespace, 1)
assert iden.uricodespace[0] == 'DOV-be'
assert_list(iden.uselimitation, 3)
assert "Zie 'Overige beperkingen'" in iden.uselimitation
assert "Bij het gebruik van de informatie die DOV aanbiedt, dient steeds " \
"volgende standaardreferentie gebruikt te worden: Databank " \
"Ondergrond Vlaanderen - (vermelding van de beheerder en de " \
"specifieke geraadpleegde gegevens) - Geraadpleegd op dd/mm/jjjj, " \
"op https://www.dov.vlaanderen.be" in iden.uselimitation
assert "Volgende aansprakelijkheidsbepalingen gelden: " \
"https://www.dov.vlaanderen.be/page/disclaimer" in iden.uselimitation
assert_list(iden.uselimitation_url, 0)
assert_list(iden.accessconstraints, 1)
assert iden.accessconstraints[0] == 'otherRestrictions'
assert_list(iden.classification, 0)
assert_list(iden.otherconstraints, 1)
assert iden.otherconstraints[
0] == "Data beschikbaar voor hergebruik volgens de " \
"Modellicentie Gratis Hergebruik. Toelichting " \
"beschikbaar op " \
"https://www.dov.vlaanderen.be/page/gebruiksvoorwaarden-dov-services"
assert_list(iden.securityconstraints, 1)
assert iden.securityconstraints[0] == 'unclassified'
assert_list(iden.useconstraints, 0)
assert_list(iden.denominators, 1)
assert iden.denominators[0] == '10000'
assert_list(iden.distance, 0)
assert_list(iden.uom, 0)
assert_list(iden.resourcelanguage, 0)
assert_list(iden.resourcelanguagecode, 1)
assert iden.resourcelanguagecode[0] == 'dut'
assert_list(iden.creator, 0)
assert_list(iden.publisher, 0)
assert_list(iden.contributor, 0)
assert iden.edition is None
assert iden.abstract.startswith("In de Databank Ondergrond Vlaanderen "
"zijn verschillende grondwatermeetnetten "
"opgenomen.")
assert iden.purpose.startswith(
"Het doel van de meetnetten is inzicht krijgen in de kwaliteit en "
"kwantiteit van de watervoerende lagen in de ondergrond van "
"Vlaanderen. Algemeen kan gesteld worden dat de grondwatermeetnetten "
"een belangrijk beleidsinstrument vormen")
assert iden.status == 'onGoing'
assert_list(iden.contact, 2)
assert iden.contact[0].organization == 'Vlaamse overheid - Vlaamse MilieuMaatschappij - Afdeling Operationeel Waterbeheer'
assert iden.contact[0].address == 'Koning Albert II-laan 20 bus 16'
assert iden.contact[0].city == 'Brussel'
assert iden.contact[0].postcode == '1000'
assert iden.contact[0].country == u'België'
assert iden.contact[0].email == 'info@vmm.be'
assert iden.contact[0].onlineresource.url == 'https://www.vmm.be'
assert iden.contact[0].role == 'pointOfContact'
assert iden.contact[1].organization == 'Databank Ondergrond Vlaanderen (' \
'DOV)'
assert iden.contact[1].address == 'Technologiepark Gebouw 905'
assert iden.contact[1].city == 'Zwijnaarde'
assert iden.contact[1].postcode == '9052'
assert iden.contact[1].country == u'België'
assert iden.contact[1].email == 'dov@vlaanderen.be'
assert iden.contact[1].onlineresource.url == \
'https://www.dov.vlaanderen.be'
assert iden.contact[1].role == 'distributor'
assert_list(iden.spatialrepresentationtype, 1)
assert iden.spatialrepresentationtype[0] == 'vector'
assert_list(iden.keywords, 5)
assert type(iden.keywords[0]) is dict
assert iden.keywords[0]['type'] == ''
assert iden.keywords[0]['thesaurus']['title'] == "GEMET - INSPIRE thema's, versie 1.0"
assert iden.keywords[0]['thesaurus']['date'] == '2008-06-01'
assert iden.keywords[0]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[0]['keywords'], 1)
assert iden.keywords[0]['keywords'] == ['Geologie']
assert type(iden.keywords[1]) is dict
assert iden.keywords[1]['type'] == ''
assert iden.keywords[1]['thesaurus'][
'title'] == "GEMET - Concepten, versie 2.4"
assert iden.keywords[1]['thesaurus']['date'] == '2010-01-13'
assert iden.keywords[1]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[1]['keywords'], 2)
assert iden.keywords[1]['keywords'] == ['grondwater', 'meetnet(werk)']
assert type(iden.keywords[2]) is dict
assert iden.keywords[2]['type'] == ''
assert iden.keywords[2]['thesaurus'][
'title'] == "Vlaamse regio's"
assert iden.keywords[2]['thesaurus']['date'] == '2013-09-25'
assert iden.keywords[2]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[2]['keywords'], 1)
assert iden.keywords[2]['keywords'] == ['Vlaams Gewest']
assert type(iden.keywords[3]) is dict
assert iden.keywords[3]['type'] is None
assert iden.keywords[3]['thesaurus'][
'title'] == "GDI-Vlaanderen Trefwoorden"
assert iden.keywords[3]['thesaurus']['date'] == '2014-02-26'
assert iden.keywords[3]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[3]['keywords'], 7)
assert iden.keywords[3]['keywords'] == [
'Toegevoegd GDI-Vl', 'Herbruikbaar', 'Vlaamse Open data',
'Kosteloos', 'Lijst M&R INSPIRE', 'Metadata INSPIRE-conform',
'Metadata GDI-Vl-conform']
assert type(iden.keywords[4]) is dict
assert iden.keywords[4]['type'] is None
assert iden.keywords[4]['thesaurus']['title'] == "DOV"
assert iden.keywords[4]['thesaurus']['date'] == '2010-12-01'
assert iden.keywords[4]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[4]['keywords'], 7)
assert iden.keywords[4]['keywords'] == [
'Ondergrond', 'DOV', 'Vlaanderen', 'monitoring', 'meetnetten',
'Kaderrichtlijn Water', 'Decreet Integraal waterbeleid']
assert_list(iden.keywords2, 5)
assert iden.keywords2[0].type == ''
assert iden.keywords2[0].thesaurus[
'title'] == "GEMET - INSPIRE thema's, versie 1.0"
assert iden.keywords2[0].thesaurus['date'] == '2008-06-01'
assert iden.keywords2[0].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[0].keywords, 1)
assert iden.keywords2[0].keywords == ['Geologie']
assert iden.keywords2[1].type == ''
assert iden.keywords2[1].thesaurus[
'title'] == "GEMET - Concepten, versie 2.4"
assert iden.keywords2[1].thesaurus['date'] == '2010-01-13'
assert iden.keywords2[1].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[1].keywords, 2)
assert iden.keywords2[1].keywords == ['grondwater', 'meetnet(werk)']
assert iden.keywords2[2].type == ''
assert iden.keywords2[2].thesaurus[
'title'] == "Vlaamse regio's"
assert iden.keywords2[2].thesaurus['date'] == '2013-09-25'
assert iden.keywords2[2].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[2].keywords, 1)
assert iden.keywords2[2].keywords == ['Vlaams Gewest']
assert iden.keywords2[3].type is None
assert iden.keywords2[3].thesaurus[
'title'] == "GDI-Vlaanderen Trefwoorden"
assert iden.keywords2[3].thesaurus['date'] == '2014-02-26'
assert iden.keywords2[3].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[3].keywords, 7)
assert iden.keywords2[3].keywords == [
'Toegevoegd GDI-Vl', 'Herbruikbaar', 'Vlaamse Open data',
'Kosteloos', 'Lijst M&R INSPIRE', 'Metadata INSPIRE-conform',
'Metadata GDI-Vl-conform']
assert iden.keywords2[4].type is None
assert iden.keywords2[4].thesaurus['title'] == "DOV"
assert iden.keywords2[4].thesaurus['date'] == '2010-12-01'
assert iden.keywords2[4].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[4].keywords, 7)
assert iden.keywords2[4].keywords == [
'Ondergrond', 'DOV', 'Vlaanderen', 'monitoring', 'meetnetten',
'Kaderrichtlijn Water', 'Decreet Integraal waterbeleid']
assert_list(iden.topiccategory, 1)
assert iden.topiccategory[0] == 'geoscientificInformation'
assert iden.supplementalinformation == \
"https://www.dov.vlaanderen.be/page/grondwatermeetnet"
assert_list(md.contentinfo, 1)
ci = md.contentinfo[0]
assert ci.compliancecode is None
assert_list(ci.language, 0)
assert ci.includedwithdataset == True
assert_list(ci.featuretypenames, 0)
assert_list(ci.featurecatalogues, 1)
assert ci.featurecatalogues[0] == 'b142965f-b2aa-429e-86ff-a7cb0e065d48'
def test_md_parsing_geobretagne():
md_resource = get_md_resource(
'tests/resources/csw_geobretagne_mdmetadata.xml')
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.identifier == '955c3e47-411e-4969-b61b-3556d1b9f879'
assert md.dataseturi is None
assert md.parentidentifier is None
assert md.language == 'fre'
assert md.languagecode is None
assert md.charset == 'utf8'
assert md.datestamp == '2018-07-30T14:19:40'
assert md.hierarchy == 'dataset'
assert_list(md.contact, 1)
contact = md.contact[0]
assert contact.organization == 'DIRECTION GENERALE DES FINANCES ' \
'PUBLIQUES BUREAU GF-3A'
assert contact.address is None
assert contact.city is None
assert contact.postcode is None
assert contact.country is None
assert contact.email == 'bureau.gf3a@dgfip.finances.gouv.fr'
assert contact.onlineresource is None
assert contact.role == 'pointOfContact'
assert md.stdname == 'ISO 19115'
assert md.stdver == '1.0'
assert md.referencesystem.code == 'RGF93 / CC48 (EPSG:3948)'
assert md.referencesystem.codeSpace == 'EPSG'
assert_list(md.identificationinfo, 1)
iden = md.identificationinfo[0]
assert iden.title == 'Cadastre 2018 en Bretagne'
assert iden.alternatetitle is None
assert_list(iden.date, 1)
assert iden.date[0].date == '2018-09-01'
assert iden.date[0].type == 'revision'
assert_list(iden.uricode, 1)
assert iden.uricode[0] == 'https://geobretagne.fr/geonetwork/apps/georchestra/?uuid=363e3a8e-d0ce-497d-87a9-2a2d58d82772'
assert_list(iden.uricodespace, 0)
assert_list(iden.uselimitation, 2)
assert u"le plan cadastral décrit les limites apparentes de la " \
u"propriété." in iden.uselimitation
assert_list(iden.uselimitation_url, 0)
assert_list(iden.accessconstraints, 1)
assert iden.accessconstraints[0] == 'otherRestrictions'
assert_list(iden.classification, 0)
assert_list(iden.otherconstraints, 1)
assert iden.otherconstraints[
0] == u'Usage libre sous réserve des mentions obligatoires ' \
u'sur tout document de diffusion : "Source : DGFIP"'
assert_list(iden.securityconstraints, 0)
assert_list(iden.useconstraints, 1)
assert iden.useconstraints[0] == 'copyright'
assert_list(iden.denominators, 1)
assert iden.denominators[0] == '500'
assert_list(iden.distance, 0)
assert_list(iden.uom, 0)
assert_list(iden.resourcelanguage, 1)
assert iden.resourcelanguage[0] == 'fre'
assert_list(iden.resourcelanguagecode, 0)
assert_list(iden.creator, 0)
assert_list(iden.publisher, 0)
assert_list(iden.contributor, 0)
assert iden.edition is None
assert iden.abstract.startswith(
u"Le plan du cadastre est un document administratif qui propose "
u"l’unique plan parcellaire à grande échelle couvrant le territoire "
u"national.")
assert iden.purpose.startswith(
u"Le but premier du plan cadastral est d'identifier, de localiser et "
u"représenter la propriété foncière, ainsi que de servir à l'assise "
u"de la fiscalité locale des propriétés non bâties.")
assert iden.status == 'completed'
assert_list(iden.contact, 1)
assert iden.contact[0].organization == 'DGFIP Bretagne'
assert iden.contact[0].name == 'DIRECTION GENERALE DES FINANCES PUBLIQUES'
assert iden.contact[0].address is None
assert iden.contact[0].city is None
assert iden.contact[0].postcode is None
assert iden.contact[0].country is None
assert iden.contact[0].email == 'bureau.gf3a@dgfip.finances.gouv.fr'
assert iden.contact[0].onlineresource is None
assert iden.contact[0].role == 'pointOfContact'
assert_list(iden.spatialrepresentationtype, 1)
assert iden.spatialrepresentationtype[0] == 'vector'
assert_list(iden.keywords, 7)
assert type(iden.keywords[0]) is dict
assert iden.keywords[0]['type'] == 'place'
assert iden.keywords[0]['thesaurus']['title'] is None
assert iden.keywords[0]['thesaurus']['date'] is None
assert iden.keywords[0]['thesaurus']['datetype'] is None
assert_list(iden.keywords[0]['keywords'], 1)
assert iden.keywords[0]['keywords'] == ['France']
assert type(iden.keywords[1]) is dict
assert iden.keywords[1]['type'] is None
assert iden.keywords[1]['thesaurus']['title'] is None
assert iden.keywords[1]['thesaurus']['date'] is None
assert iden.keywords[1]['thesaurus']['datetype'] is None
assert_list(iden.keywords[1]['keywords'], 0)
assert type(iden.keywords[2]) is dict
assert iden.keywords[2]['type'] == 'theme'
assert iden.keywords[2]['thesaurus']['title'] is None
assert iden.keywords[2]['thesaurus']['date'] is None
assert iden.keywords[2]['thesaurus']['datetype'] is None
assert_list(iden.keywords[2]['keywords'], 7)
assert iden.keywords[2]['keywords'] == [
u'bâtiments', 'adresses', 'parcelles cadastrales', 'hydrographie',
u'réseaux de transport', u'unités administratives',
u'référentiels de coordonnées']
assert type(iden.keywords[3]) is dict
assert iden.keywords[3]['type'] == 'theme'
assert iden.keywords[3]['thesaurus']['title'] is None
assert iden.keywords[3]['thesaurus']['date'] is None
assert iden.keywords[3]['thesaurus']['datetype'] is None
assert_list(iden.keywords[3]['keywords'], 5)
assert iden.keywords[3]['keywords'] == [
u'bâtis', 'sections', 'parcelles', 'cadastre', 'cadastrale']
assert type(iden.keywords[4]) is dict
assert iden.keywords[4]['type'] == 'theme'
assert iden.keywords[4]['thesaurus']['title'] == u"GéoBretagne v 2.0"
assert iden.keywords[4]['thesaurus']['date'] == '2014-01-13'
assert iden.keywords[4]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[4]['keywords'], 1)
assert iden.keywords[4]['keywords'] == [u'référentiels : cadastre']
assert type(iden.keywords[5]) is dict
assert iden.keywords[5]['type'] == 'theme'
assert iden.keywords[5]['thesaurus']['title'] == "INSPIRE themes"
assert iden.keywords[5]['thesaurus']['date'] == '2008-06-01'
assert iden.keywords[5]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[5]['keywords'], 1)
assert iden.keywords[5]['keywords'] == ['Parcelles cadastrales']
assert type(iden.keywords[6]) is dict
assert iden.keywords[6]['type'] == 'theme'
assert iden.keywords[6]['thesaurus']['title'] == "GEMET"
assert iden.keywords[6]['thesaurus']['date'] == '2012-07-20'
assert iden.keywords[6]['thesaurus']['datetype'] == 'publication'
assert_list(iden.keywords[6]['keywords'], 2)
assert iden.keywords[6]['keywords'] == ['cadastre', u'bâtiment']
assert_list(iden.keywords2, 6)
assert iden.keywords2[0].type == 'place'
assert iden.keywords2[0].thesaurus is None
assert_list(iden.keywords2[0].keywords, 1)
assert iden.keywords2[0].keywords == ['France']
assert iden.keywords2[1].type == 'theme'
assert iden.keywords2[1].thesaurus is None
assert_list(iden.keywords2[1].keywords, 7)
assert iden.keywords2[1].keywords == [
u'bâtiments', 'adresses', 'parcelles cadastrales', 'hydrographie',
u'réseaux de transport', u'unités administratives',
u'référentiels de coordonnées']
assert iden.keywords2[2].type == 'theme'
assert iden.keywords2[2].thesaurus is None
assert_list(iden.keywords2[2].keywords, 5)
assert iden.keywords2[2].keywords == [
u'bâtis', 'sections', 'parcelles', 'cadastre', 'cadastrale']
assert iden.keywords2[3].type == 'theme'
assert iden.keywords2[3].thesaurus['title'] == u"GéoBretagne v 2.0"
assert iden.keywords2[3].thesaurus['date'] == '2014-01-13'
assert iden.keywords2[3].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[3].keywords, 1)
assert iden.keywords2[3].keywords == [u'référentiels : cadastre']
assert iden.keywords2[4].type == 'theme'
assert iden.keywords2[4].thesaurus['title'] == "INSPIRE themes"
assert iden.keywords2[4].thesaurus['date'] == '2008-06-01'
assert iden.keywords2[4].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[4].keywords, 1)
assert iden.keywords2[4].keywords == ['Parcelles cadastrales']
assert iden.keywords2[5].type == 'theme'
assert iden.keywords2[5].thesaurus['title'] == "GEMET"
assert iden.keywords2[5].thesaurus['date'] == '2012-07-20'
assert iden.keywords2[5].thesaurus['datetype'] == 'publication'
assert_list(iden.keywords2[5].keywords, 2)
assert iden.keywords2[5].keywords == ['cadastre', u'bâtiment']
assert_list(iden.topiccategory, 1)
assert iden.topiccategory[0] == 'planningCadastre'
assert iden.supplementalinformation == \
u"La légende du plan cadastral est consultable sur: " \
"http://www.cadastre.gouv.fr/scpc/pdf/legendes/FR_fr/Legende%20du" \
"%20plan%20sur%20internet.pdf"
assert_list(md.contentinfo, 1)
ci = md.contentinfo[0]
assert ci.compliancecode is None
assert_list(ci.language, 0)
assert ci.includedwithdataset == False
assert_list(ci.featuretypenames, 0)
assert_list(ci.featurecatalogues, 0)
def test_md_parsing_19115_2():
md_resource = get_md_resource(
'tests/resources/iso_mi.xml')
md = MD_Metadata(md_resource)
assert type(md) is MD_Metadata
assert md.identifier == '3f342f64-9348-11df-ba6a-0014c2c00eab'
ci = md.contentinfo[0]
assert ci.type == 'image'
assert ci.cloud_cover == '72'
assert ci.processing_level == '1.0'
band = ci.bands[0]
assert band.id == 'B1'
assert band.units == 'nm'
assert band.min == '932'
assert band.max == '958'
plt = md.acquisition.platforms[0]
assert plt.identifier == 'LANDSAT_8'
assert plt.description == 'Landsat 8'
inst = plt.instruments[0]
assert inst.identifier == 'OLI_TIRS'
assert inst.type == 'INS-NOBS'
| true | true |
f71bcee66968f36d4da1975fc465b3d5f8cf8ea5 | 1,789 | py | Python | tegaki/core.py | RShirohara/handwriting_detection | f24aba8ac695fef064d090db78229ab482f342cd | [
"MIT"
] | null | null | null | tegaki/core.py | RShirohara/handwriting_detection | f24aba8ac695fef064d090db78229ab482f342cd | [
"MIT"
] | 1 | 2021-02-24T00:31:12.000Z | 2021-02-26T00:11:24.000Z | tegaki/core.py | RShirohara/handwriting_detection | f24aba8ac695fef064d090db78229ab482f342cd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# author: @RShirohara
# TODO: #8
from time import sleep
from .detect import DetectArea
from .googleapis import DetectText, GetTTS
from .send import PlayMP3
from .util import VideoStream, QueueConnector
class Tegaki:
"""HandWriting Detection core class.
Attributes:
capture (VideoStream): Video stream from source device or file.
cap_params (CapParams): infomation of source device or file.
"""
def __init__(self, model_dir, src=0, width=None, height=None, maxsize=0):
"""Initilize core class.
Args:
model_dir (str): Path to protocol buffer (.pb) file.
src (str, int): Path to capture device or file.
width (int): Width of the frames in stream.
height (int): Height of the frames in stream.
maxsize (int): Upperbound limit on the item in the queue.
"""
self.capture = VideoStream(
src=src, width=width, height=height
).start()
self.cap_params = self.capture.info()
self.th_play = PlayMP3(daemon=True, maxsize=maxsize)
self.th_tts = GetTTS(
QueueConnector([self.th_play]),
daemon=True,
maxsize=maxsize
)
self.th_ocr = DetectText(
self.th_tts,
daemon=True,
maxsize=maxsize
)
self.th_det = DetectArea(
self.th_ocr,
model_dir,
self.cap_params,
daemon=True,
maxsize=maxsize
)
def run(self):
"""Exec."""
self.th_play.start()
self.th_tts.start()
self.th_ocr.start()
self.th_det.start()
while True:
self.th_det.put(self.capture.read())
sleep(5)
| 26.308824 | 77 | 0.57071 |
from time import sleep
from .detect import DetectArea
from .googleapis import DetectText, GetTTS
from .send import PlayMP3
from .util import VideoStream, QueueConnector
class Tegaki:
def __init__(self, model_dir, src=0, width=None, height=None, maxsize=0):
self.capture = VideoStream(
src=src, width=width, height=height
).start()
self.cap_params = self.capture.info()
self.th_play = PlayMP3(daemon=True, maxsize=maxsize)
self.th_tts = GetTTS(
QueueConnector([self.th_play]),
daemon=True,
maxsize=maxsize
)
self.th_ocr = DetectText(
self.th_tts,
daemon=True,
maxsize=maxsize
)
self.th_det = DetectArea(
self.th_ocr,
model_dir,
self.cap_params,
daemon=True,
maxsize=maxsize
)
def run(self):
self.th_play.start()
self.th_tts.start()
self.th_ocr.start()
self.th_det.start()
while True:
self.th_det.put(self.capture.read())
sleep(5)
| true | true |
f71bceeb4b6085939b05dc07c82d4eea47511809 | 865 | py | Python | OpenPGPAbs/gpgBackends/__init__.py | KOLANICH/OpenPGPAbs | 1052422a74c3970990491972f81be8eb142d2dd7 | [
"Unlicense"
] | null | null | null | OpenPGPAbs/gpgBackends/__init__.py | KOLANICH/OpenPGPAbs | 1052422a74c3970990491972f81be8eb142d2dd7 | [
"Unlicense"
] | null | null | null | OpenPGPAbs/gpgBackends/__init__.py | KOLANICH/OpenPGPAbs | 1052422a74c3970990491972f81be8eb142d2dd7 | [
"Unlicense"
] | null | null | null | _backendsNames = ("bouncyCastle", "pgpy")
from pathlib import Path
from os.path import expanduser
from enum import IntFlag
from abc import ABC, abstractmethod
keyringPath = Path(expanduser("~/.gnupg/pubring.kbx"))
class SecurityIssues(IntFlag):
OK = 0
wrongSig = (1 << 0)
expired = (1 << 1)
disabled = (1 << 2)
revoked = (1 << 3)
invalid = (1 << 4)
brokenAssymetricFunc = (1 << 5)
hashFunctionNotCollisionResistant = (1 << 6)
hashFunctionNotSecondPreimageResistant = (1 << 7)
assymetricKeyLengthIsTooShort = (1 << 8)
insecureCurve = (1 << 9)
noSelfSignature = (1 << 10)
class Backend(ABC):
__slots__ = ()
@abstractmethod
def verifyBlob(signedData: bytes, signature: bytes, *, keyFingerprint: str = None, keyFile: Path = None, subkeyFingerprint: str = None):
raise NotImplementedError
def isConsideredInsecure(k):
raise NotImplementedError
| 26.212121 | 137 | 0.713295 | _backendsNames = ("bouncyCastle", "pgpy")
from pathlib import Path
from os.path import expanduser
from enum import IntFlag
from abc import ABC, abstractmethod
keyringPath = Path(expanduser("~/.gnupg/pubring.kbx"))
class SecurityIssues(IntFlag):
OK = 0
wrongSig = (1 << 0)
expired = (1 << 1)
disabled = (1 << 2)
revoked = (1 << 3)
invalid = (1 << 4)
brokenAssymetricFunc = (1 << 5)
hashFunctionNotCollisionResistant = (1 << 6)
hashFunctionNotSecondPreimageResistant = (1 << 7)
assymetricKeyLengthIsTooShort = (1 << 8)
insecureCurve = (1 << 9)
noSelfSignature = (1 << 10)
class Backend(ABC):
__slots__ = ()
@abstractmethod
def verifyBlob(signedData: bytes, signature: bytes, *, keyFingerprint: str = None, keyFile: Path = None, subkeyFingerprint: str = None):
raise NotImplementedError
def isConsideredInsecure(k):
raise NotImplementedError
| true | true |
f71bcf92d85264acc7dde6e6008f78b323781cda | 417 | py | Python | pinaxcon/hooks.py | n6151h/pyconau2017 | 092de5fd60d2b0dd207242cf2585e16ec6843392 | [
"MIT"
] | 7 | 2015-12-15T22:54:42.000Z | 2018-12-29T03:31:51.000Z | pinaxcon/hooks.py | n6151h/pyconau2017 | 092de5fd60d2b0dd207242cf2585e16ec6843392 | [
"MIT"
] | 59 | 2017-08-09T02:19:42.000Z | 2021-11-30T03:16:58.000Z | config/hooks.py | pyung/pycon-ng | fc7d1709e5da6f3013886d7a3099bd3d617b0df1 | [
"MIT"
] | 11 | 2016-01-03T18:04:58.000Z | 2021-09-19T06:01:25.000Z | import markdown
import pinax.boxes.hooks
import pinax.pages.hooks
def markup_renderer(content):
return markdown.markdown(content)
class PinaxBoxesHookSet(pinax.boxes.hooks.DefaultHookSet):
def parse_content(self, content):
return markup_renderer(content)
class PinaxPagesHookSet(pinax.pages.hooks.DefaultHookSet):
def parse_content(self, content):
return markup_renderer(content)
| 19.857143 | 58 | 0.772182 | import markdown
import pinax.boxes.hooks
import pinax.pages.hooks
def markup_renderer(content):
return markdown.markdown(content)
class PinaxBoxesHookSet(pinax.boxes.hooks.DefaultHookSet):
def parse_content(self, content):
return markup_renderer(content)
class PinaxPagesHookSet(pinax.pages.hooks.DefaultHookSet):
def parse_content(self, content):
return markup_renderer(content)
| true | true |
f71bd131825e1190622a69fa2bed7b9d67ded123 | 7,429 | py | Python | selam/prepdata.py | tsoonjin/selam | fbbb355490271bf09056e05b23245be1b75ae24d | [
"MIT"
] | 3 | 2020-10-14T06:05:26.000Z | 2021-07-21T15:43:54.000Z | selam/prepdata.py | tsoonjin/selam | fbbb355490271bf09056e05b23245be1b75ae24d | [
"MIT"
] | null | null | null | selam/prepdata.py | tsoonjin/selam | fbbb355490271bf09056e05b23245be1b75ae24d | [
"MIT"
] | 1 | 2020-05-08T12:59:35.000Z | 2020-05-08T12:59:35.000Z | #!/bin/bash
import os
import sys
import random
import cv2
import numpy as np
import xgboost as xgb
from sklearn import preprocessing
from sklearn.decomposition import PCA, NMF
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from keras.preprocessing.image import ImageDataGenerator
from selam.utils import img
def sample_negative(img, rect, n=1, size=(100, 100)):
""" Sample n negative samples randomly
@param rect: [x1, y1, x2, y2]
@param n: number of negative samples
@param size: size of negative window
"""
samples = []
maxHeight, maxWidth = img.shape[:-1]
width = abs(rect[0] - rect[2])
height = abs(rect[1] - rect[3])
while len(samples) != n:
tmpX = int(random.random() * (maxWidth - width))
tmpY = int(random.random() * (maxHeight - height))
isNotOverlapX = tmpX + width < rect[0] or tmpX > rect[2]
isNotOverlapY = tmpY + height < rect[1] or tmpY > rect[3]
# Only accepts sample that does not overlap with ground truth
if isNotOverlapX and isNotOverlapY:
samples.append(cv2.resize(
img[tmpY: tmpY + height, tmpX: tmpX + width], size))
return samples
def get_roi(img, rect, size=(100, 100)):
""" Return extracted bounding box given 4 corners of a rectangle
size: size of training image
@return roi, [x1, y1, x2, y2]
"""
xpos = rect[0::2]
ypos = rect[1::2]
y = [int(min(ypos)), int(max(ypos))]
x = [int(min(xpos)), int(max(xpos))]
roi = img[y[0]:y[1], x[0]:x[1]]
return cv2.resize(roi, size), [x[0], y[0], x[1], y[1]]
def get_jpgs(dirpath, skip=0, resize=None):
""" Returns all images located in given dirpath
skip : number of frames skip to reduce computation time
resize: scale factor for resize
"""
filenames = os.listdir(dirpath)
# Only attempt to parse and sort files that end with .jpg
filenames = [filename for filename in filenames
if filename.endswith(".jpg") or filename.endswith(".png")]
filenames.sort(key=lambda x: int(x.split('.', 1)[0]))
frames = [cv2.imread('{}/{}'.format(dirpath, filename))
for filename in filenames]
out = frames[0::skip] if skip > 0 else frames
print('Read {} images from {}'.format(len(out), dirpath))
if resize:
new_size = (out[0].shape[1] / resize, out[0].shape[0] / resize)
return map(lambda x: cv2.resize(x, new_size), out)
return out
def extract_training(dataset_path, annotation):
""" Returns a list of labelled images as positive training data
Uses default size of 100 x 100 as training patch
@return positive samples, negative samples
"""
positives = []
negatives = []
imgs = get_jpgs(dataset_path)
with open(annotation) as ann:
for i, label in zip(imgs, ann):
rect = map(float, label.rstrip().split(','))
if rect[0] > 0:
roi, coord = get_roi(i, rect)
negatives.extend(sample_negative(i, coord))
positives.append(roi)
print("{} positive samples".format(len(positives)))
print("{} negative samples".format(len(negatives)))
return positives, negatives
def augment_data(imgs, augment_dir, prefix, n=20):
""" Augment imgs with various transformations
@param augment_dir: directory to save augmented images
@param prefix: prefix of filename
@param n: number of transformations per image
"""
n_samples = len(imgs)
datagen = ImageDataGenerator(
rotation_range=90,
width_shift_range=0.2,
height_shift_range=0.2,
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest')
for i in imgs:
selected = cv2.cvtColor(i, cv2.COLOR_BGR2RGB)
selected = selected.reshape((1, ) + selected.shape)
for x, batch in enumerate(datagen.flow(selected, batch_size=1,
save_to_dir=augment_dir,
save_prefix=prefix,
save_format='jpeg')):
if x > n:
break
def kfold(x, y, eval_size=0.10):
""" Split dataset into training set and validation set
@param eval_size: percentage of data used for evaluation
@return X_train, X_valid, Y_train, Y_valid
"""
return train_test_split(x, y, test_size=eval_size, random_state=0)
def std_zscore(X):
""" Z-score standardization by subtracting mean and divided by standard
deviation of dataset
"""
scaler = preprocessing.StandardScaler().fit(X)
return scaler.transform(X)
def std_minmax(X):
scaler = preprocessing.MinMaxScaler().fit(X)
return scaler.transform(X)
def reduce_pca(X, h, w, n=15, display=True):
""" Performs PCA decomposition using n components """
pca = PCA(n_components=n, svd_solver='randomized',
whiten=True).fit(X)
eigenfaces = pca.components_.reshape((n, h, w, -1))
if display:
for i in eigenfaces:
cv2.imshow('PC', np.uint8(img.normUnity(np.mean(i, axis=2)) * 255))
cv2.waitKey(0)
return pca.transform(X)
def reduce_nmf(X, h, w, n=15, display=False):
""" Performs Non-negative matrix factorization using n components """
model = NMF(n_components=n, init='random', random_state=0).fit(X)
components = model.components_.reshape((n, h, w, -1))
if display:
for i in components:
cv2.imshow('PC', np.uint8(img.normUnity(np.mean(i, axis=2)) * 255))
cv2.waitKey(0)
return model.transform(X)
def classify_svm(X_train, Y_train):
param_grid = {'C': [1e3, 5e3, 1e4, 5e4, 1e5],
'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1]}
clf = GridSearchCV(SVC(kernel='rbf', class_weight='balanced'), param_grid)
clf.fit(X_train, Y_train)
return clf
def classify_rf(X_train, Y_train):
param_grid = {'n_estimators': [50, 200, 700],
'max_features': ['auto', 'sqrt', 'log2']}
clf = GridSearchCV(RandomForestClassifier(n_estimators=500, oob_score=True), param_grid)
clf.fit(X_train, Y_train)
return clf
def classify_gp(X, Y):
# Using same lengthscale for all features
kernel = 1.0 * RBF([1.0])
gpc_rbf = GaussianProcessClassifier(kernel=kernel).fit(X, Y)
return gpc_rbf
def classify_xgb(X, Y):
xgb_model = xgb.XGBClassifier()
parameters = {'nthread':[4], #when use hyperthread, xgboost may become slower
'objective':['binary:logistic'],
'learning_rate': [0.05], #so called `eta` value
'max_depth': [6],
'min_child_weight': [11],
'silent': [1],
'subsample': [0.8],
'colsample_bytree': [0.7],
'n_estimators': [5], #number of trees, change it to 1000 for better results
'missing':[-999],
'seed': [1337]}
clf = GridSearchCV(xgb_model, parameters)
clf.fit(X, Y)
return clf
if __name__ == '__main__':
if len(sys.argv) < 4:
print("Usage: python extract_region.py <dataset directory> <annotation file> <prefix> \n")
exit()
positives, negatives = extract_training(sys.argv[1], sys.argv[2])
| 34.714953 | 98 | 0.629964 |
import os
import sys
import random
import cv2
import numpy as np
import xgboost as xgb
from sklearn import preprocessing
from sklearn.decomposition import PCA, NMF
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from keras.preprocessing.image import ImageDataGenerator
from selam.utils import img
def sample_negative(img, rect, n=1, size=(100, 100)):
samples = []
maxHeight, maxWidth = img.shape[:-1]
width = abs(rect[0] - rect[2])
height = abs(rect[1] - rect[3])
while len(samples) != n:
tmpX = int(random.random() * (maxWidth - width))
tmpY = int(random.random() * (maxHeight - height))
isNotOverlapX = tmpX + width < rect[0] or tmpX > rect[2]
isNotOverlapY = tmpY + height < rect[1] or tmpY > rect[3]
if isNotOverlapX and isNotOverlapY:
samples.append(cv2.resize(
img[tmpY: tmpY + height, tmpX: tmpX + width], size))
return samples
def get_roi(img, rect, size=(100, 100)):
xpos = rect[0::2]
ypos = rect[1::2]
y = [int(min(ypos)), int(max(ypos))]
x = [int(min(xpos)), int(max(xpos))]
roi = img[y[0]:y[1], x[0]:x[1]]
return cv2.resize(roi, size), [x[0], y[0], x[1], y[1]]
def get_jpgs(dirpath, skip=0, resize=None):
filenames = os.listdir(dirpath)
filenames = [filename for filename in filenames
if filename.endswith(".jpg") or filename.endswith(".png")]
filenames.sort(key=lambda x: int(x.split('.', 1)[0]))
frames = [cv2.imread('{}/{}'.format(dirpath, filename))
for filename in filenames]
out = frames[0::skip] if skip > 0 else frames
print('Read {} images from {}'.format(len(out), dirpath))
if resize:
new_size = (out[0].shape[1] / resize, out[0].shape[0] / resize)
return map(lambda x: cv2.resize(x, new_size), out)
return out
def extract_training(dataset_path, annotation):
positives = []
negatives = []
imgs = get_jpgs(dataset_path)
with open(annotation) as ann:
for i, label in zip(imgs, ann):
rect = map(float, label.rstrip().split(','))
if rect[0] > 0:
roi, coord = get_roi(i, rect)
negatives.extend(sample_negative(i, coord))
positives.append(roi)
print("{} positive samples".format(len(positives)))
print("{} negative samples".format(len(negatives)))
return positives, negatives
def augment_data(imgs, augment_dir, prefix, n=20):
n_samples = len(imgs)
datagen = ImageDataGenerator(
rotation_range=90,
width_shift_range=0.2,
height_shift_range=0.2,
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest')
for i in imgs:
selected = cv2.cvtColor(i, cv2.COLOR_BGR2RGB)
selected = selected.reshape((1, ) + selected.shape)
for x, batch in enumerate(datagen.flow(selected, batch_size=1,
save_to_dir=augment_dir,
save_prefix=prefix,
save_format='jpeg')):
if x > n:
break
def kfold(x, y, eval_size=0.10):
return train_test_split(x, y, test_size=eval_size, random_state=0)
def std_zscore(X):
scaler = preprocessing.StandardScaler().fit(X)
return scaler.transform(X)
def std_minmax(X):
scaler = preprocessing.MinMaxScaler().fit(X)
return scaler.transform(X)
def reduce_pca(X, h, w, n=15, display=True):
pca = PCA(n_components=n, svd_solver='randomized',
whiten=True).fit(X)
eigenfaces = pca.components_.reshape((n, h, w, -1))
if display:
for i in eigenfaces:
cv2.imshow('PC', np.uint8(img.normUnity(np.mean(i, axis=2)) * 255))
cv2.waitKey(0)
return pca.transform(X)
def reduce_nmf(X, h, w, n=15, display=False):
model = NMF(n_components=n, init='random', random_state=0).fit(X)
components = model.components_.reshape((n, h, w, -1))
if display:
for i in components:
cv2.imshow('PC', np.uint8(img.normUnity(np.mean(i, axis=2)) * 255))
cv2.waitKey(0)
return model.transform(X)
def classify_svm(X_train, Y_train):
param_grid = {'C': [1e3, 5e3, 1e4, 5e4, 1e5],
'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1]}
clf = GridSearchCV(SVC(kernel='rbf', class_weight='balanced'), param_grid)
clf.fit(X_train, Y_train)
return clf
def classify_rf(X_train, Y_train):
param_grid = {'n_estimators': [50, 200, 700],
'max_features': ['auto', 'sqrt', 'log2']}
clf = GridSearchCV(RandomForestClassifier(n_estimators=500, oob_score=True), param_grid)
clf.fit(X_train, Y_train)
return clf
def classify_gp(X, Y):
kernel = 1.0 * RBF([1.0])
gpc_rbf = GaussianProcessClassifier(kernel=kernel).fit(X, Y)
return gpc_rbf
def classify_xgb(X, Y):
xgb_model = xgb.XGBClassifier()
parameters = {'nthread':[4],
'objective':['binary:logistic'],
'learning_rate': [0.05],
'max_depth': [6],
'min_child_weight': [11],
'silent': [1],
'subsample': [0.8],
'colsample_bytree': [0.7],
'n_estimators': [5],
'missing':[-999],
'seed': [1337]}
clf = GridSearchCV(xgb_model, parameters)
clf.fit(X, Y)
return clf
if __name__ == '__main__':
if len(sys.argv) < 4:
print("Usage: python extract_region.py <dataset directory> <annotation file> <prefix> \n")
exit()
positives, negatives = extract_training(sys.argv[1], sys.argv[2])
| true | true |
f71bd2ea8e759bf953972aa21026569e55e95cd0 | 3,360 | py | Python | RLAgents/lib_common/WrapperSuperMario.py | michalnand/reinforcement_learning_agents | 45f02c23b1135c87311dce5a52f6e643e4313fc3 | [
"MIT"
] | 2 | 2021-08-05T20:50:41.000Z | 2021-12-25T11:00:38.000Z | RLAgents/lib_common/WrapperSuperMario.py | michalnand/reinforcement_learning_agents | 45f02c23b1135c87311dce5a52f6e643e4313fc3 | [
"MIT"
] | null | null | null | RLAgents/lib_common/WrapperSuperMario.py | michalnand/reinforcement_learning_agents | 45f02c23b1135c87311dce5a52f6e643e4313fc3 | [
"MIT"
] | null | null | null | import gym
import numpy
from PIL import Image
from nes_py.wrappers import JoypadSpace
from gym_super_mario_bros.actions import COMPLEX_MOVEMENT
class NopOpsEnv(gym.Wrapper):
def __init__(self, env=None, max_count=30):
super(NopOpsEnv, self).__init__(env)
self.max_count = max_count
def reset(self):
self.env.reset()
noops = numpy.random.randint(1, self.max_count + 1)
for _ in range(noops):
obs, _, _, _ = self.env.step(0)
return obs
class SkipEnv(gym.Wrapper):
def __init__(self, env, skip = 4):
gym.Wrapper.__init__(self, env)
self._skip = skip
def step(self, action):
total_reward = 0.0
done = None
for i in range(self._skip):
state, reward, done, info = self.env.step(action)
total_reward+= reward
if done:
break
return state, total_reward, done, info
class ResizeEnv(gym.ObservationWrapper):
def __init__(self, env, height = 96, width = 96, frame_stacking = 4):
super(ResizeEnv, self).__init__(env)
self.height = height
self.width = width
self.frame_stacking = frame_stacking
state_shape = (self.frame_stacking, self.height, self.width)
self.dtype = numpy.float32
self.observation_space = gym.spaces.Box(low=0.0, high=1.0, shape=state_shape, dtype=self.dtype)
self.state = numpy.zeros(state_shape, dtype=self.dtype)
def observation(self, state):
img = Image.fromarray(state)
img = img.convert('L')
img = img.resize((self.height, self.width))
for i in reversed(range(self.frame_stacking-1)):
self.state[i+1] = self.state[i].copy()
self.state[0] = numpy.array(img).astype(self.dtype)/255.0
return self.state
class ClipRewardEnv(gym.Wrapper):
def __init__(self, env, no_rewards = False):
gym.Wrapper.__init__(self, env)
self.raw_episodes = 0
self.raw_score = 0.0
self.raw_score_per_episode = 0.0
self.raw_score_total = 0.0
self.no_rewards = no_rewards
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.raw_score+= reward
self.raw_score_total+= reward
if done:
self.raw_episodes+= 1
k = 0.1
self.raw_score_per_episode = (1.0 - k)*self.raw_score_per_episode + k*self.raw_score
self.raw_score = 0.0
reward = reward/15.0
if self.no_rewards:
reward = 0.0
return obs, reward, done, info
def WrapperSuperMario(env, height = 96, width = 96, frame_stacking=4, frame_skipping=4):
env = JoypadSpace(env, COMPLEX_MOVEMENT)
env = NopOpsEnv(env)
env = SkipEnv(env, frame_skipping)
env = ResizeEnv(env, height, width, frame_stacking)
env = ClipRewardEnv(env, False)
env.reset()
return env
def WrapperSuperMarioNoRewards(env, height = 96, width = 96, frame_stacking=4, frame_skipping=4):
env = JoypadSpace(env, COMPLEX_MOVEMENT)
env = NopOpsEnv(env)
env = SkipEnv(env, frame_skipping)
env = ResizeEnv(env, height, width, frame_stacking)
env = ClipRewardEnv(env, True)
env.reset()
return env | 27.540984 | 103 | 0.612798 | import gym
import numpy
from PIL import Image
from nes_py.wrappers import JoypadSpace
from gym_super_mario_bros.actions import COMPLEX_MOVEMENT
class NopOpsEnv(gym.Wrapper):
def __init__(self, env=None, max_count=30):
super(NopOpsEnv, self).__init__(env)
self.max_count = max_count
def reset(self):
self.env.reset()
noops = numpy.random.randint(1, self.max_count + 1)
for _ in range(noops):
obs, _, _, _ = self.env.step(0)
return obs
class SkipEnv(gym.Wrapper):
def __init__(self, env, skip = 4):
gym.Wrapper.__init__(self, env)
self._skip = skip
def step(self, action):
total_reward = 0.0
done = None
for i in range(self._skip):
state, reward, done, info = self.env.step(action)
total_reward+= reward
if done:
break
return state, total_reward, done, info
class ResizeEnv(gym.ObservationWrapper):
def __init__(self, env, height = 96, width = 96, frame_stacking = 4):
super(ResizeEnv, self).__init__(env)
self.height = height
self.width = width
self.frame_stacking = frame_stacking
state_shape = (self.frame_stacking, self.height, self.width)
self.dtype = numpy.float32
self.observation_space = gym.spaces.Box(low=0.0, high=1.0, shape=state_shape, dtype=self.dtype)
self.state = numpy.zeros(state_shape, dtype=self.dtype)
def observation(self, state):
img = Image.fromarray(state)
img = img.convert('L')
img = img.resize((self.height, self.width))
for i in reversed(range(self.frame_stacking-1)):
self.state[i+1] = self.state[i].copy()
self.state[0] = numpy.array(img).astype(self.dtype)/255.0
return self.state
class ClipRewardEnv(gym.Wrapper):
def __init__(self, env, no_rewards = False):
gym.Wrapper.__init__(self, env)
self.raw_episodes = 0
self.raw_score = 0.0
self.raw_score_per_episode = 0.0
self.raw_score_total = 0.0
self.no_rewards = no_rewards
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.raw_score+= reward
self.raw_score_total+= reward
if done:
self.raw_episodes+= 1
k = 0.1
self.raw_score_per_episode = (1.0 - k)*self.raw_score_per_episode + k*self.raw_score
self.raw_score = 0.0
reward = reward/15.0
if self.no_rewards:
reward = 0.0
return obs, reward, done, info
def WrapperSuperMario(env, height = 96, width = 96, frame_stacking=4, frame_skipping=4):
env = JoypadSpace(env, COMPLEX_MOVEMENT)
env = NopOpsEnv(env)
env = SkipEnv(env, frame_skipping)
env = ResizeEnv(env, height, width, frame_stacking)
env = ClipRewardEnv(env, False)
env.reset()
return env
def WrapperSuperMarioNoRewards(env, height = 96, width = 96, frame_stacking=4, frame_skipping=4):
env = JoypadSpace(env, COMPLEX_MOVEMENT)
env = NopOpsEnv(env)
env = SkipEnv(env, frame_skipping)
env = ResizeEnv(env, height, width, frame_stacking)
env = ClipRewardEnv(env, True)
env.reset()
return env | true | true |
f71bd2ef2fd1cafd83ccb890af0c057046ca9fa8 | 46,217 | py | Python | facebook_business/adobjects/advideo.py | alternativshik/facebook-python-business-sdk | 83be60d162ae34ffca186104597fdbb7d1fb7cf2 | [
"CNRI-Python"
] | null | null | null | facebook_business/adobjects/advideo.py | alternativshik/facebook-python-business-sdk | 83be60d162ae34ffca186104597fdbb7d1fb7cf2 | [
"CNRI-Python"
] | null | null | null | facebook_business/adobjects/advideo.py | alternativshik/facebook-python-business-sdk | 83be60d162ae34ffca186104597fdbb7d1fb7cf2 | [
"CNRI-Python"
] | 1 | 2021-04-09T22:44:52.000Z | 2021-04-09T22:44:52.000Z | # Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.abstractobject import AbstractObject
from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject
from facebook_business.adobjects.objectparser import ObjectParser
from facebook_business.api import FacebookRequest
from facebook_business.typechecker import TypeChecker
"""
This class is auto-generated.
For any issues or feature requests related to this class, please let us know on
github and we'll fix in our codegen framework. We'll not be able to accept
pull request for this class.
"""
class AdVideo(
AbstractCrudObject,
):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdVideo = True
super(AdVideo, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
ad_breaks = 'ad_breaks'
backdated_time = 'backdated_time'
backdated_time_granularity = 'backdated_time_granularity'
content_category = 'content_category'
content_tags = 'content_tags'
copyright = 'copyright'
copyright_monitoring_status = 'copyright_monitoring_status'
created_time = 'created_time'
custom_labels = 'custom_labels'
description = 'description'
embed_html = 'embed_html'
embeddable = 'embeddable'
event = 'event'
expiration = 'expiration'
format = 'format'
field_from = 'from'
icon = 'icon'
id = 'id'
is_crosspost_video = 'is_crosspost_video'
is_crossposting_eligible = 'is_crossposting_eligible'
is_episode = 'is_episode'
is_instagram_eligible = 'is_instagram_eligible'
is_reference_only = 'is_reference_only'
length = 'length'
live_audience_count = 'live_audience_count'
live_status = 'live_status'
music_video_copyright = 'music_video_copyright'
permalink_url = 'permalink_url'
picture = 'picture'
place = 'place'
premiere_living_room_status = 'premiere_living_room_status'
privacy = 'privacy'
published = 'published'
scheduled_publish_time = 'scheduled_publish_time'
source = 'source'
spherical = 'spherical'
status = 'status'
title = 'title'
universal_video_id = 'universal_video_id'
updated_time = 'updated_time'
adaptive_type = 'adaptive_type'
animated_effect_id = 'animated_effect_id'
application_id = 'application_id'
asked_fun_fact_prompt_id = 'asked_fun_fact_prompt_id'
audio_story_wave_animation_handle = 'audio_story_wave_animation_handle'
chunk_session_id = 'chunk_session_id'
composer_entry_picker = 'composer_entry_picker'
composer_entry_point = 'composer_entry_point'
composer_entry_time = 'composer_entry_time'
composer_session_events_log = 'composer_session_events_log'
composer_session_id = 'composer_session_id'
composer_source_surface = 'composer_source_surface'
composer_type = 'composer_type'
container_type = 'container_type'
creative_tools = 'creative_tools'
end_offset = 'end_offset'
fbuploader_video_file_chunk = 'fbuploader_video_file_chunk'
file_size = 'file_size'
file_url = 'file_url'
fisheye_video_cropped = 'fisheye_video_cropped'
formatting = 'formatting'
fov = 'fov'
front_z_rotation = 'front_z_rotation'
fun_fact_prompt_id = 'fun_fact_prompt_id'
fun_fact_toastee_id = 'fun_fact_toastee_id'
guide = 'guide'
guide_enabled = 'guide_enabled'
has_nickname = 'has_nickname'
holiday_card = 'holiday_card'
initial_heading = 'initial_heading'
initial_pitch = 'initial_pitch'
instant_game_entry_point_data = 'instant_game_entry_point_data'
is_boost_intended = 'is_boost_intended'
is_group_linking_post = 'is_group_linking_post'
is_voice_clip = 'is_voice_clip'
location_source_id = 'location_source_id'
name = 'name'
offer_like_post_id = 'offer_like_post_id'
og_action_type_id = 'og_action_type_id'
og_icon_id = 'og_icon_id'
og_object_id = 'og_object_id'
og_phrase = 'og_phrase'
og_suggestion_mechanism = 'og_suggestion_mechanism'
original_fov = 'original_fov'
original_projection_type = 'original_projection_type'
publish_event_id = 'publish_event_id'
react_mode_metadata = 'react_mode_metadata'
referenced_sticker_id = 'referenced_sticker_id'
replace_video_id = 'replace_video_id'
sales_promo_id = 'sales_promo_id'
slideshow_spec = 'slideshow_spec'
source_instagram_media_id = 'source_instagram_media_id'
start_offset = 'start_offset'
swap_mode = 'swap_mode'
text_format_metadata = 'text_format_metadata'
throwback_camera_roll_media = 'throwback_camera_roll_media'
thumb = 'thumb'
time_since_original_post = 'time_since_original_post'
transcode_setting_properties = 'transcode_setting_properties'
unpublished_content_type = 'unpublished_content_type'
upload_phase = 'upload_phase'
upload_session_id = 'upload_session_id'
upload_setting_properties = 'upload_setting_properties'
video_file_chunk = 'video_file_chunk'
video_id_original = 'video_id_original'
video_start_time_ms = 'video_start_time_ms'
waterfall_id = 'waterfall_id'
filename = 'filename'
filepath = 'filepath'
class ContainerType:
aco_autoextracted_video = 'ACO_AUTOEXTRACTED_VIDEO'
aco_video_variation = 'ACO_VIDEO_VARIATION'
ad_break_preview = 'AD_BREAK_PREVIEW'
ad_derivative = 'AD_DERIVATIVE'
age_up = 'AGE_UP'
album_multimedia_post = 'ALBUM_MULTIMEDIA_POST'
aloha_call_video = 'ALOHA_CALL_VIDEO'
aloha_superframe = 'ALOHA_SUPERFRAME'
app_review_screencast = 'APP_REVIEW_SCREENCAST'
atlas_video = 'ATLAS_VIDEO'
audio_broadcast = 'AUDIO_BROADCAST'
bell_poll = 'BELL_POLL'
brand_equity_poll_video = 'BRAND_EQUITY_POLL_VIDEO'
broadcast = 'BROADCAST'
candidate_videos = 'CANDIDATE_VIDEOS'
canvas = 'CANVAS'
cfc_video = 'CFC_VIDEO'
cms_media_manager = 'CMS_MEDIA_MANAGER'
contained_post_attachment = 'CONTAINED_POST_ATTACHMENT'
contained_post_audio_broadcast = 'CONTAINED_POST_AUDIO_BROADCAST'
contained_post_broadcast = 'CONTAINED_POST_BROADCAST'
copyright_reference_broadcast = 'COPYRIGHT_REFERENCE_BROADCAST'
copyright_reference_video = 'COPYRIGHT_REFERENCE_VIDEO'
cultural_moment_deprecated = 'CULTURAL_MOMENT_DEPRECATED'
dco_ad_asset_feed = 'DCO_AD_ASSET_FEED'
dco_autogen_video = 'DCO_AUTOGEN_VIDEO'
dco_trimmed_video = 'DCO_TRIMMED_VIDEO'
dim_sum = 'DIM_SUM'
directed_post_attachment = 'DIRECTED_POST_ATTACHMENT'
direct_inbox = 'DIRECT_INBOX'
direct_inbox_reaction = 'DIRECT_INBOX_REACTION'
dynamic_item_display_bundle = 'DYNAMIC_ITEM_DISPLAY_BUNDLE'
dynamic_item_video = 'DYNAMIC_ITEM_VIDEO'
dynamic_template_video = 'DYNAMIC_TEMPLATE_VIDEO'
event_cover_video = 'EVENT_COVER_VIDEO'
event_tour = 'EVENT_TOUR'
facecast_dvr = 'FACECAST_DVR'
fb_shorts = 'FB_SHORTS'
fb_shorts_group_post = 'FB_SHORTS_GROUP_POST'
fb_shorts_post = 'FB_SHORTS_POST'
fundraiser_cover_video = 'FUNDRAISER_COVER_VIDEO'
game_clip = 'GAME_CLIP'
gemstone = 'GEMSTONE'
goodwill_anniversary_deprecated = 'GOODWILL_ANNIVERSARY_DEPRECATED'
goodwill_anniversary_promotion_deprecated = 'GOODWILL_ANNIVERSARY_PROMOTION_DEPRECATED'
goodwill_video_contained_share = 'GOODWILL_VIDEO_CONTAINED_SHARE'
goodwill_video_promotion = 'GOODWILL_VIDEO_PROMOTION'
goodwill_video_share = 'GOODWILL_VIDEO_SHARE'
goodwill_video_token_required = 'GOODWILL_VIDEO_TOKEN_REQUIRED'
group_post = 'GROUP_POST'
heuristic_cluster_video = 'HEURISTIC_CLUSTER_VIDEO'
heuristic_preview = 'HEURISTIC_PREVIEW'
highlight_clip_video = 'HIGHLIGHT_CLIP_VIDEO'
ig_reels_xpv = 'IG_REELS_XPV'
ig_stories_reader = 'IG_STORIES_READER'
inspiration_video = 'INSPIRATION_VIDEO'
instagram_video_copy = 'INSTAGRAM_VIDEO_COPY'
instant_application_preview = 'INSTANT_APPLICATION_PREVIEW'
instant_article = 'INSTANT_ARTICLE'
instant_game_clip = 'INSTANT_GAME_CLIP'
issue_module = 'ISSUE_MODULE'
job_application_video = 'JOB_APPLICATION_VIDEO'
job_opening_video = 'JOB_OPENING_VIDEO'
kototoro = 'KOTOTORO'
learn = 'LEARN'
legacy = 'LEGACY'
live_creative_kit_video = 'LIVE_CREATIVE_KIT_VIDEO'
live_linear_video_channel_internal_broadcast = 'LIVE_LINEAR_VIDEO_CHANNEL_INTERNAL_BROADCAST'
live_photo = 'LIVE_PHOTO'
look_now_deprecated = 'LOOK_NOW_DEPRECATED'
marketplace_listing_video = 'MARKETPLACE_LISTING_VIDEO'
marketplace_pre_recorded_video = 'MARKETPLACE_PRE_RECORDED_VIDEO'
moments_video = 'MOMENTS_VIDEO'
neo_async_game_video = 'NEO_ASYNC_GAME_VIDEO'
no_story = 'NO_STORY'
no_story_with_entpost = 'NO_STORY_WITH_ENTPOST'
oculus_creator_portal = 'OCULUS_CREATOR_PORTAL'
oculus_venues_broadcast = 'OCULUS_VENUES_BROADCAST'
offers_video = 'OFFERS_VIDEO'
pages_cover_video = 'PAGES_COVER_VIDEO'
page_review_screencast = 'PAGE_REVIEW_SCREENCAST'
page_slideshow_video = 'PAGE_SLIDESHOW_VIDEO'
pixelcloud = 'PIXELCLOUD'
premiere_source = 'PREMIERE_SOURCE'
private_gallery_video = 'PRIVATE_GALLERY_VIDEO'
product_video = 'PRODUCT_VIDEO'
profile_cover_video = 'PROFILE_COVER_VIDEO'
profile_intro_card = 'PROFILE_INTRO_CARD'
profile_to_page_uploaded_video = 'PROFILE_TO_PAGE_UPLOADED_VIDEO'
profile_video = 'PROFILE_VIDEO'
proton = 'PROTON'
quick_promotion = 'QUICK_PROMOTION'
replace_video = 'REPLACE_VIDEO'
sales_client_interaction = 'SALES_CLIENT_INTERACTION'
say_thanks_deprecated = 'SAY_THANKS_DEPRECATED'
showreel_native_dummy_video = 'SHOWREEL_NATIVE_DUMMY_VIDEO'
slideshow_animoto = 'SLIDESHOW_ANIMOTO'
slideshow_shakr = 'SLIDESHOW_SHAKR'
slideshow_variation_video = 'SLIDESHOW_VARIATION_VIDEO'
sotto_content = 'SOTTO_CONTENT'
sound_platform_stream = 'SOUND_PLATFORM_STREAM'
stories_video = 'STORIES_VIDEO'
storyline = 'STORYLINE'
storyline_with_external_music = 'STORYLINE_WITH_EXTERNAL_MUSIC'
story_archive_video = 'STORY_ARCHIVE_VIDEO'
story_card_template = 'STORY_CARD_TEMPLATE'
stream_highlights_video = 'STREAM_HIGHLIGHTS_VIDEO'
tarot_digest = 'TAROT_DIGEST'
temp_multimedia_post = 'TEMP_MULTIMEDIA_POST'
unlisted = 'UNLISTED'
video_comment = 'VIDEO_COMMENT'
video_creative_editor_autogen_ad_video = 'VIDEO_CREATIVE_EDITOR_AUTOGEN_AD_VIDEO'
video_superres = 'VIDEO_SUPERRES'
vu_generated_video = 'VU_GENERATED_VIDEO'
woodhenge = 'WOODHENGE'
work_knowledge_video = 'WORK_KNOWLEDGE_VIDEO'
your_day = 'YOUR_DAY'
class ContentCategory:
beauty_fashion = 'BEAUTY_FASHION'
business = 'BUSINESS'
cars_trucks = 'CARS_TRUCKS'
comedy = 'COMEDY'
cute_animals = 'CUTE_ANIMALS'
entertainment = 'ENTERTAINMENT'
family = 'FAMILY'
food_health = 'FOOD_HEALTH'
home = 'HOME'
lifestyle = 'LIFESTYLE'
music = 'MUSIC'
news = 'NEWS'
other = 'OTHER'
politics = 'POLITICS'
science = 'SCIENCE'
sports = 'SPORTS'
technology = 'TECHNOLOGY'
video_gaming = 'VIDEO_GAMING'
class Formatting:
markdown = 'MARKDOWN'
plaintext = 'PLAINTEXT'
class OriginalProjectionType:
cubemap = 'cubemap'
equirectangular = 'equirectangular'
half_equirectangular = 'half_equirectangular'
class SwapMode:
replace = 'replace'
class UnpublishedContentType:
ads_post = 'ADS_POST'
draft = 'DRAFT'
inline_created = 'INLINE_CREATED'
published = 'PUBLISHED'
reviewable_branded_content = 'REVIEWABLE_BRANDED_CONTENT'
scheduled = 'SCHEDULED'
scheduled_recurring = 'SCHEDULED_RECURRING'
class UploadPhase:
cancel = 'cancel'
finish = 'finish'
start = 'start'
transfer = 'transfer'
class Type:
tagged = 'tagged'
uploaded = 'uploaded'
class BackdatedTimeGranularity:
day = 'day'
hour = 'hour'
min = 'min'
month = 'month'
none = 'none'
year = 'year'
# @deprecated get_endpoint function is deprecated
@classmethod
def get_endpoint(cls):
return 'advideos'
# @deprecated api_create is being deprecated
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.adaccount import AdAccount
return AdAccount(api=self._api, fbid=parent_id).create_ad_video(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='DELETE',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'ad_breaks': 'list',
'allow_bm_crossposting': 'bool',
'allow_crossposting_for_pages': 'list<Object>',
'backdated_time': 'datetime',
'backdated_time_granularity': 'backdated_time_granularity_enum',
'call_to_action': 'Object',
'content_category': 'content_category_enum',
'content_tags': 'list<string>',
'custom_labels': 'list<string>',
'description': 'string',
'direct_share_status': 'unsigned int',
'embeddable': 'bool',
'expiration': 'Object',
'expire_now': 'bool',
'increment_play_count': 'bool',
'name': 'string',
'preferred_thumbnail_id': 'string',
'privacy': 'string',
'publish_to_news_feed': 'bool',
'publish_to_videos_tab': 'bool',
'published': 'bool',
'scheduled_publish_time': 'unsigned int',
'social_actions': 'bool',
'sponsor_id': 'string',
'sponsor_relationship': 'unsigned int',
'tags': 'list<string>',
'target': 'string',
'universal_video_id': 'string',
}
enums = {
'backdated_time_granularity_enum': AdVideo.BackdatedTimeGranularity.__dict__.values(),
'content_category_enum': AdVideo.ContentCategory.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_captions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/captions',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='EDGE',
response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_caption(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'captions_file': 'file',
'default_locale': 'string',
'locales_to_delete': 'list<string>',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/captions',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_comments(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.comment import Comment
param_types = {
'filter': 'filter_enum',
'live_filter': 'live_filter_enum',
'order': 'order_enum',
'since': 'datetime',
}
enums = {
'filter_enum': Comment.Filter.__dict__.values(),
'live_filter_enum': Comment.LiveFilter.__dict__.values(),
'order_enum': Comment.Order.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/comments',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Comment,
api_type='EDGE',
response_parser=ObjectParser(target_class=Comment, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_comment(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.comment import Comment
param_types = {
'attachment_id': 'string',
'attachment_share_url': 'string',
'attachment_url': 'string',
'comment_privacy_value': 'comment_privacy_value_enum',
'facepile_mentioned_ids': 'list<string>',
'feedback_source': 'string',
'is_offline': 'bool',
'message': 'string',
'nectar_module': 'string',
'object_id': 'string',
'parent_comment_id': 'Object',
'text': 'string',
'tracking': 'string',
}
enums = {
'comment_privacy_value_enum': Comment.CommentPrivacyValue.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/comments',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Comment,
api_type='EDGE',
response_parser=ObjectParser(target_class=Comment, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_crosspost_shared_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.page import Page
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/crosspost_shared_pages',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Page,
api_type='EDGE',
response_parser=ObjectParser(target_class=Page, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profile import Profile
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/likes',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Profile,
api_type='EDGE',
response_parser=ObjectParser(target_class=Profile, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_like(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'feedback_source': 'string',
'nectar_module': 'string',
'notify': 'bool',
'tracking': 'string',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/likes',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_poll_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/poll_settings',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='EDGE',
response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_polls(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videopoll import VideoPoll
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/polls',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=VideoPoll,
api_type='EDGE',
response_parser=ObjectParser(target_class=VideoPoll, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_poll(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videopoll import VideoPoll
param_types = {
'close_after_voting': 'bool',
'correct_option': 'unsigned int',
'default_open': 'bool',
'options': 'list<string>',
'question': 'string',
'show_gradient': 'bool',
'show_results': 'bool',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/polls',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=VideoPoll,
api_type='EDGE',
response_parser=ObjectParser(target_class=VideoPoll, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_sponsor_tags(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.page import Page
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/sponsor_tags',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Page,
api_type='EDGE',
response_parser=ObjectParser(target_class=Page, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_tags(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/tags',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='EDGE',
response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_tag(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'tag_uid': 'int',
'uid': 'int',
'vid': 'string',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/tags',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_thumbnails(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videothumbnail import VideoThumbnail
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/thumbnails',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=VideoThumbnail,
api_type='EDGE',
response_parser=ObjectParser(target_class=VideoThumbnail, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_thumbnail(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'is_preferred': 'bool',
'source': 'file',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/thumbnails',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_video_insights(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.insightsresult import InsightsResult
param_types = {
'metric': 'list<Object>',
'period': 'period_enum',
'since': 'datetime',
'until': 'datetime',
}
enums = {
'period_enum': InsightsResult.Period.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/video_insights',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=InsightsResult,
api_type='EDGE',
response_parser=ObjectParser(target_class=InsightsResult, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {
'ad_breaks': 'list<int>',
'backdated_time': 'datetime',
'backdated_time_granularity': 'string',
'content_category': 'string',
'content_tags': 'list<string>',
'copyright': 'VideoCopyright',
'copyright_monitoring_status': 'string',
'created_time': 'datetime',
'custom_labels': 'list<string>',
'description': 'string',
'embed_html': 'string',
'embeddable': 'bool',
'event': 'Event',
'expiration': 'Object',
'format': 'list<Object>',
'from': 'Object',
'icon': 'string',
'id': 'string',
'is_crosspost_video': 'bool',
'is_crossposting_eligible': 'bool',
'is_episode': 'bool',
'is_instagram_eligible': 'bool',
'is_reference_only': 'bool',
'length': 'float',
'live_audience_count': 'unsigned int',
'live_status': 'string',
'music_video_copyright': 'MusicVideoCopyright',
'permalink_url': 'string',
'picture': 'string',
'place': 'Place',
'premiere_living_room_status': 'string',
'privacy': 'Privacy',
'published': 'bool',
'scheduled_publish_time': 'datetime',
'source': 'string',
'spherical': 'bool',
'status': 'Object',
'title': 'string',
'universal_video_id': 'string',
'updated_time': 'datetime',
'adaptive_type': 'string',
'animated_effect_id': 'unsigned int',
'application_id': 'string',
'asked_fun_fact_prompt_id': 'unsigned int',
'audio_story_wave_animation_handle': 'string',
'chunk_session_id': 'string',
'composer_entry_picker': 'string',
'composer_entry_point': 'string',
'composer_entry_time': 'unsigned int',
'composer_session_events_log': 'string',
'composer_session_id': 'string',
'composer_source_surface': 'string',
'composer_type': 'string',
'container_type': 'ContainerType',
'creative_tools': 'string',
'end_offset': 'unsigned int',
'fbuploader_video_file_chunk': 'string',
'file_size': 'unsigned int',
'file_url': 'string',
'fisheye_video_cropped': 'bool',
'formatting': 'Formatting',
'fov': 'unsigned int',
'front_z_rotation': 'float',
'fun_fact_prompt_id': 'unsigned int',
'fun_fact_toastee_id': 'unsigned int',
'guide': 'list<list<unsigned int>>',
'guide_enabled': 'bool',
'has_nickname': 'bool',
'holiday_card': 'string',
'initial_heading': 'unsigned int',
'initial_pitch': 'unsigned int',
'instant_game_entry_point_data': 'string',
'is_boost_intended': 'bool',
'is_group_linking_post': 'bool',
'is_voice_clip': 'bool',
'location_source_id': 'string',
'name': 'string',
'offer_like_post_id': 'unsigned int',
'og_action_type_id': 'string',
'og_icon_id': 'string',
'og_object_id': 'string',
'og_phrase': 'string',
'og_suggestion_mechanism': 'string',
'original_fov': 'unsigned int',
'original_projection_type': 'OriginalProjectionType',
'publish_event_id': 'unsigned int',
'react_mode_metadata': 'string',
'referenced_sticker_id': 'string',
'replace_video_id': 'string',
'sales_promo_id': 'unsigned int',
'slideshow_spec': 'map',
'source_instagram_media_id': 'string',
'start_offset': 'unsigned int',
'swap_mode': 'SwapMode',
'text_format_metadata': 'string',
'throwback_camera_roll_media': 'string',
'thumb': 'file',
'time_since_original_post': 'unsigned int',
'transcode_setting_properties': 'string',
'unpublished_content_type': 'UnpublishedContentType',
'upload_phase': 'UploadPhase',
'upload_session_id': 'string',
'upload_setting_properties': 'string',
'video_file_chunk': 'string',
'video_id_original': 'string',
'video_start_time_ms': 'unsigned int',
'waterfall_id': 'string',
'filename': 'file'
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['ContainerType'] = AdVideo.ContainerType.__dict__.values()
field_enum_info['ContentCategory'] = AdVideo.ContentCategory.__dict__.values()
field_enum_info['Formatting'] = AdVideo.Formatting.__dict__.values()
field_enum_info['OriginalProjectionType'] = AdVideo.OriginalProjectionType.__dict__.values()
field_enum_info['SwapMode'] = AdVideo.SwapMode.__dict__.values()
field_enum_info['UnpublishedContentType'] = AdVideo.UnpublishedContentType.__dict__.values()
field_enum_info['UploadPhase'] = AdVideo.UploadPhase.__dict__.values()
field_enum_info['Type'] = AdVideo.Type.__dict__.values()
field_enum_info['BackdatedTimeGranularity'] = AdVideo.BackdatedTimeGranularity.__dict__.values()
return field_enum_info
def remote_create(
self,
batch=None,
failure=None,
params=None,
success=None,
):
"""
Uploads filepath and creates the AdVideo object from it.
It has same arguments as AbstractCrudObject.remote_create except it
does not have the files argument but requires the 'filepath' property
to be defined.
"""
from facebook_business.exceptions import FacebookBadObjectError
from facebook_business.video_uploader import (
VideoUploader,
VideoUploadRequest,
)
if (self.Field.slideshow_spec in self and
self[self.Field.slideshow_spec] is not None):
request = VideoUploadRequest(self.get_api_assured())
request.setParams(params={'slideshow_spec': {
'images_urls': self[self.Field.slideshow_spec]['images_urls'],
'duration_ms': self[self.Field.slideshow_spec]['duration_ms'],
'transition_ms': self[self.Field.slideshow_spec]['transition_ms'],
}})
response = request.send((self.get_parent_id_assured(), 'advideos')).json()
elif not (self.Field.filepath in self):
raise FacebookBadObjectError(
"AdVideo requires a filepath or slideshow_spec to be defined.",
)
else:
video_uploader = VideoUploader()
response = video_uploader.upload(self)
self._set_data(response)
return response
def waitUntilEncodingReady(self, interval=30, timeout=600):
from facebook_business.video_uploader import VideoEncodingStatusChecker
from facebook_business.exceptions import FacebookError
if 'id' not in self:
raise FacebookError(
'Invalid Video ID',
)
VideoEncodingStatusChecker.waitUntilReady(
self.get_api_assured(),
self['id'],
interval,
timeout,
)
| 39.400682 | 122 | 0.624857 |
from facebook_business.adobjects.abstractobject import AbstractObject
from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject
from facebook_business.adobjects.objectparser import ObjectParser
from facebook_business.api import FacebookRequest
from facebook_business.typechecker import TypeChecker
class AdVideo(
AbstractCrudObject,
):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdVideo = True
super(AdVideo, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
ad_breaks = 'ad_breaks'
backdated_time = 'backdated_time'
backdated_time_granularity = 'backdated_time_granularity'
content_category = 'content_category'
content_tags = 'content_tags'
copyright = 'copyright'
copyright_monitoring_status = 'copyright_monitoring_status'
created_time = 'created_time'
custom_labels = 'custom_labels'
description = 'description'
embed_html = 'embed_html'
embeddable = 'embeddable'
event = 'event'
expiration = 'expiration'
format = 'format'
field_from = 'from'
icon = 'icon'
id = 'id'
is_crosspost_video = 'is_crosspost_video'
is_crossposting_eligible = 'is_crossposting_eligible'
is_episode = 'is_episode'
is_instagram_eligible = 'is_instagram_eligible'
is_reference_only = 'is_reference_only'
length = 'length'
live_audience_count = 'live_audience_count'
live_status = 'live_status'
music_video_copyright = 'music_video_copyright'
permalink_url = 'permalink_url'
picture = 'picture'
place = 'place'
premiere_living_room_status = 'premiere_living_room_status'
privacy = 'privacy'
published = 'published'
scheduled_publish_time = 'scheduled_publish_time'
source = 'source'
spherical = 'spherical'
status = 'status'
title = 'title'
universal_video_id = 'universal_video_id'
updated_time = 'updated_time'
adaptive_type = 'adaptive_type'
animated_effect_id = 'animated_effect_id'
application_id = 'application_id'
asked_fun_fact_prompt_id = 'asked_fun_fact_prompt_id'
audio_story_wave_animation_handle = 'audio_story_wave_animation_handle'
chunk_session_id = 'chunk_session_id'
composer_entry_picker = 'composer_entry_picker'
composer_entry_point = 'composer_entry_point'
composer_entry_time = 'composer_entry_time'
composer_session_events_log = 'composer_session_events_log'
composer_session_id = 'composer_session_id'
composer_source_surface = 'composer_source_surface'
composer_type = 'composer_type'
container_type = 'container_type'
creative_tools = 'creative_tools'
end_offset = 'end_offset'
fbuploader_video_file_chunk = 'fbuploader_video_file_chunk'
file_size = 'file_size'
file_url = 'file_url'
fisheye_video_cropped = 'fisheye_video_cropped'
formatting = 'formatting'
fov = 'fov'
front_z_rotation = 'front_z_rotation'
fun_fact_prompt_id = 'fun_fact_prompt_id'
fun_fact_toastee_id = 'fun_fact_toastee_id'
guide = 'guide'
guide_enabled = 'guide_enabled'
has_nickname = 'has_nickname'
holiday_card = 'holiday_card'
initial_heading = 'initial_heading'
initial_pitch = 'initial_pitch'
instant_game_entry_point_data = 'instant_game_entry_point_data'
is_boost_intended = 'is_boost_intended'
is_group_linking_post = 'is_group_linking_post'
is_voice_clip = 'is_voice_clip'
location_source_id = 'location_source_id'
name = 'name'
offer_like_post_id = 'offer_like_post_id'
og_action_type_id = 'og_action_type_id'
og_icon_id = 'og_icon_id'
og_object_id = 'og_object_id'
og_phrase = 'og_phrase'
og_suggestion_mechanism = 'og_suggestion_mechanism'
original_fov = 'original_fov'
original_projection_type = 'original_projection_type'
publish_event_id = 'publish_event_id'
react_mode_metadata = 'react_mode_metadata'
referenced_sticker_id = 'referenced_sticker_id'
replace_video_id = 'replace_video_id'
sales_promo_id = 'sales_promo_id'
slideshow_spec = 'slideshow_spec'
source_instagram_media_id = 'source_instagram_media_id'
start_offset = 'start_offset'
swap_mode = 'swap_mode'
text_format_metadata = 'text_format_metadata'
throwback_camera_roll_media = 'throwback_camera_roll_media'
thumb = 'thumb'
time_since_original_post = 'time_since_original_post'
transcode_setting_properties = 'transcode_setting_properties'
unpublished_content_type = 'unpublished_content_type'
upload_phase = 'upload_phase'
upload_session_id = 'upload_session_id'
upload_setting_properties = 'upload_setting_properties'
video_file_chunk = 'video_file_chunk'
video_id_original = 'video_id_original'
video_start_time_ms = 'video_start_time_ms'
waterfall_id = 'waterfall_id'
filename = 'filename'
filepath = 'filepath'
class ContainerType:
aco_autoextracted_video = 'ACO_AUTOEXTRACTED_VIDEO'
aco_video_variation = 'ACO_VIDEO_VARIATION'
ad_break_preview = 'AD_BREAK_PREVIEW'
ad_derivative = 'AD_DERIVATIVE'
age_up = 'AGE_UP'
album_multimedia_post = 'ALBUM_MULTIMEDIA_POST'
aloha_call_video = 'ALOHA_CALL_VIDEO'
aloha_superframe = 'ALOHA_SUPERFRAME'
app_review_screencast = 'APP_REVIEW_SCREENCAST'
atlas_video = 'ATLAS_VIDEO'
audio_broadcast = 'AUDIO_BROADCAST'
bell_poll = 'BELL_POLL'
brand_equity_poll_video = 'BRAND_EQUITY_POLL_VIDEO'
broadcast = 'BROADCAST'
candidate_videos = 'CANDIDATE_VIDEOS'
canvas = 'CANVAS'
cfc_video = 'CFC_VIDEO'
cms_media_manager = 'CMS_MEDIA_MANAGER'
contained_post_attachment = 'CONTAINED_POST_ATTACHMENT'
contained_post_audio_broadcast = 'CONTAINED_POST_AUDIO_BROADCAST'
contained_post_broadcast = 'CONTAINED_POST_BROADCAST'
copyright_reference_broadcast = 'COPYRIGHT_REFERENCE_BROADCAST'
copyright_reference_video = 'COPYRIGHT_REFERENCE_VIDEO'
cultural_moment_deprecated = 'CULTURAL_MOMENT_DEPRECATED'
dco_ad_asset_feed = 'DCO_AD_ASSET_FEED'
dco_autogen_video = 'DCO_AUTOGEN_VIDEO'
dco_trimmed_video = 'DCO_TRIMMED_VIDEO'
dim_sum = 'DIM_SUM'
directed_post_attachment = 'DIRECTED_POST_ATTACHMENT'
direct_inbox = 'DIRECT_INBOX'
direct_inbox_reaction = 'DIRECT_INBOX_REACTION'
dynamic_item_display_bundle = 'DYNAMIC_ITEM_DISPLAY_BUNDLE'
dynamic_item_video = 'DYNAMIC_ITEM_VIDEO'
dynamic_template_video = 'DYNAMIC_TEMPLATE_VIDEO'
event_cover_video = 'EVENT_COVER_VIDEO'
event_tour = 'EVENT_TOUR'
facecast_dvr = 'FACECAST_DVR'
fb_shorts = 'FB_SHORTS'
fb_shorts_group_post = 'FB_SHORTS_GROUP_POST'
fb_shorts_post = 'FB_SHORTS_POST'
fundraiser_cover_video = 'FUNDRAISER_COVER_VIDEO'
game_clip = 'GAME_CLIP'
gemstone = 'GEMSTONE'
goodwill_anniversary_deprecated = 'GOODWILL_ANNIVERSARY_DEPRECATED'
goodwill_anniversary_promotion_deprecated = 'GOODWILL_ANNIVERSARY_PROMOTION_DEPRECATED'
goodwill_video_contained_share = 'GOODWILL_VIDEO_CONTAINED_SHARE'
goodwill_video_promotion = 'GOODWILL_VIDEO_PROMOTION'
goodwill_video_share = 'GOODWILL_VIDEO_SHARE'
goodwill_video_token_required = 'GOODWILL_VIDEO_TOKEN_REQUIRED'
group_post = 'GROUP_POST'
heuristic_cluster_video = 'HEURISTIC_CLUSTER_VIDEO'
heuristic_preview = 'HEURISTIC_PREVIEW'
highlight_clip_video = 'HIGHLIGHT_CLIP_VIDEO'
ig_reels_xpv = 'IG_REELS_XPV'
ig_stories_reader = 'IG_STORIES_READER'
inspiration_video = 'INSPIRATION_VIDEO'
instagram_video_copy = 'INSTAGRAM_VIDEO_COPY'
instant_application_preview = 'INSTANT_APPLICATION_PREVIEW'
instant_article = 'INSTANT_ARTICLE'
instant_game_clip = 'INSTANT_GAME_CLIP'
issue_module = 'ISSUE_MODULE'
job_application_video = 'JOB_APPLICATION_VIDEO'
job_opening_video = 'JOB_OPENING_VIDEO'
kototoro = 'KOTOTORO'
learn = 'LEARN'
legacy = 'LEGACY'
live_creative_kit_video = 'LIVE_CREATIVE_KIT_VIDEO'
live_linear_video_channel_internal_broadcast = 'LIVE_LINEAR_VIDEO_CHANNEL_INTERNAL_BROADCAST'
live_photo = 'LIVE_PHOTO'
look_now_deprecated = 'LOOK_NOW_DEPRECATED'
marketplace_listing_video = 'MARKETPLACE_LISTING_VIDEO'
marketplace_pre_recorded_video = 'MARKETPLACE_PRE_RECORDED_VIDEO'
moments_video = 'MOMENTS_VIDEO'
neo_async_game_video = 'NEO_ASYNC_GAME_VIDEO'
no_story = 'NO_STORY'
no_story_with_entpost = 'NO_STORY_WITH_ENTPOST'
oculus_creator_portal = 'OCULUS_CREATOR_PORTAL'
oculus_venues_broadcast = 'OCULUS_VENUES_BROADCAST'
offers_video = 'OFFERS_VIDEO'
pages_cover_video = 'PAGES_COVER_VIDEO'
page_review_screencast = 'PAGE_REVIEW_SCREENCAST'
page_slideshow_video = 'PAGE_SLIDESHOW_VIDEO'
pixelcloud = 'PIXELCLOUD'
premiere_source = 'PREMIERE_SOURCE'
private_gallery_video = 'PRIVATE_GALLERY_VIDEO'
product_video = 'PRODUCT_VIDEO'
profile_cover_video = 'PROFILE_COVER_VIDEO'
profile_intro_card = 'PROFILE_INTRO_CARD'
profile_to_page_uploaded_video = 'PROFILE_TO_PAGE_UPLOADED_VIDEO'
profile_video = 'PROFILE_VIDEO'
proton = 'PROTON'
quick_promotion = 'QUICK_PROMOTION'
replace_video = 'REPLACE_VIDEO'
sales_client_interaction = 'SALES_CLIENT_INTERACTION'
say_thanks_deprecated = 'SAY_THANKS_DEPRECATED'
showreel_native_dummy_video = 'SHOWREEL_NATIVE_DUMMY_VIDEO'
slideshow_animoto = 'SLIDESHOW_ANIMOTO'
slideshow_shakr = 'SLIDESHOW_SHAKR'
slideshow_variation_video = 'SLIDESHOW_VARIATION_VIDEO'
sotto_content = 'SOTTO_CONTENT'
sound_platform_stream = 'SOUND_PLATFORM_STREAM'
stories_video = 'STORIES_VIDEO'
storyline = 'STORYLINE'
storyline_with_external_music = 'STORYLINE_WITH_EXTERNAL_MUSIC'
story_archive_video = 'STORY_ARCHIVE_VIDEO'
story_card_template = 'STORY_CARD_TEMPLATE'
stream_highlights_video = 'STREAM_HIGHLIGHTS_VIDEO'
tarot_digest = 'TAROT_DIGEST'
temp_multimedia_post = 'TEMP_MULTIMEDIA_POST'
unlisted = 'UNLISTED'
video_comment = 'VIDEO_COMMENT'
video_creative_editor_autogen_ad_video = 'VIDEO_CREATIVE_EDITOR_AUTOGEN_AD_VIDEO'
video_superres = 'VIDEO_SUPERRES'
vu_generated_video = 'VU_GENERATED_VIDEO'
woodhenge = 'WOODHENGE'
work_knowledge_video = 'WORK_KNOWLEDGE_VIDEO'
your_day = 'YOUR_DAY'
class ContentCategory:
beauty_fashion = 'BEAUTY_FASHION'
business = 'BUSINESS'
cars_trucks = 'CARS_TRUCKS'
comedy = 'COMEDY'
cute_animals = 'CUTE_ANIMALS'
entertainment = 'ENTERTAINMENT'
family = 'FAMILY'
food_health = 'FOOD_HEALTH'
home = 'HOME'
lifestyle = 'LIFESTYLE'
music = 'MUSIC'
news = 'NEWS'
other = 'OTHER'
politics = 'POLITICS'
science = 'SCIENCE'
sports = 'SPORTS'
technology = 'TECHNOLOGY'
video_gaming = 'VIDEO_GAMING'
class Formatting:
markdown = 'MARKDOWN'
plaintext = 'PLAINTEXT'
class OriginalProjectionType:
cubemap = 'cubemap'
equirectangular = 'equirectangular'
half_equirectangular = 'half_equirectangular'
class SwapMode:
replace = 'replace'
class UnpublishedContentType:
ads_post = 'ADS_POST'
draft = 'DRAFT'
inline_created = 'INLINE_CREATED'
published = 'PUBLISHED'
reviewable_branded_content = 'REVIEWABLE_BRANDED_CONTENT'
scheduled = 'SCHEDULED'
scheduled_recurring = 'SCHEDULED_RECURRING'
class UploadPhase:
cancel = 'cancel'
finish = 'finish'
start = 'start'
transfer = 'transfer'
class Type:
tagged = 'tagged'
uploaded = 'uploaded'
class BackdatedTimeGranularity:
day = 'day'
hour = 'hour'
min = 'min'
month = 'month'
none = 'none'
year = 'year'
@classmethod
def get_endpoint(cls):
return 'advideos'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.adaccount import AdAccount
return AdAccount(api=self._api, fbid=parent_id).create_ad_video(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='DELETE',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'ad_breaks': 'list',
'allow_bm_crossposting': 'bool',
'allow_crossposting_for_pages': 'list<Object>',
'backdated_time': 'datetime',
'backdated_time_granularity': 'backdated_time_granularity_enum',
'call_to_action': 'Object',
'content_category': 'content_category_enum',
'content_tags': 'list<string>',
'custom_labels': 'list<string>',
'description': 'string',
'direct_share_status': 'unsigned int',
'embeddable': 'bool',
'expiration': 'Object',
'expire_now': 'bool',
'increment_play_count': 'bool',
'name': 'string',
'preferred_thumbnail_id': 'string',
'privacy': 'string',
'publish_to_news_feed': 'bool',
'publish_to_videos_tab': 'bool',
'published': 'bool',
'scheduled_publish_time': 'unsigned int',
'social_actions': 'bool',
'sponsor_id': 'string',
'sponsor_relationship': 'unsigned int',
'tags': 'list<string>',
'target': 'string',
'universal_video_id': 'string',
}
enums = {
'backdated_time_granularity_enum': AdVideo.BackdatedTimeGranularity.__dict__.values(),
'content_category_enum': AdVideo.ContentCategory.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_captions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/captions',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='EDGE',
response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_caption(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'captions_file': 'file',
'default_locale': 'string',
'locales_to_delete': 'list<string>',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/captions',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_comments(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.comment import Comment
param_types = {
'filter': 'filter_enum',
'live_filter': 'live_filter_enum',
'order': 'order_enum',
'since': 'datetime',
}
enums = {
'filter_enum': Comment.Filter.__dict__.values(),
'live_filter_enum': Comment.LiveFilter.__dict__.values(),
'order_enum': Comment.Order.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/comments',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Comment,
api_type='EDGE',
response_parser=ObjectParser(target_class=Comment, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_comment(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.comment import Comment
param_types = {
'attachment_id': 'string',
'attachment_share_url': 'string',
'attachment_url': 'string',
'comment_privacy_value': 'comment_privacy_value_enum',
'facepile_mentioned_ids': 'list<string>',
'feedback_source': 'string',
'is_offline': 'bool',
'message': 'string',
'nectar_module': 'string',
'object_id': 'string',
'parent_comment_id': 'Object',
'text': 'string',
'tracking': 'string',
}
enums = {
'comment_privacy_value_enum': Comment.CommentPrivacyValue.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/comments',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Comment,
api_type='EDGE',
response_parser=ObjectParser(target_class=Comment, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_crosspost_shared_pages(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.page import Page
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/crosspost_shared_pages',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Page,
api_type='EDGE',
response_parser=ObjectParser(target_class=Page, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profile import Profile
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/likes',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Profile,
api_type='EDGE',
response_parser=ObjectParser(target_class=Profile, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_like(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'feedback_source': 'string',
'nectar_module': 'string',
'notify': 'bool',
'tracking': 'string',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/likes',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_poll_settings(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/poll_settings',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='EDGE',
response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_polls(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videopoll import VideoPoll
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/polls',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=VideoPoll,
api_type='EDGE',
response_parser=ObjectParser(target_class=VideoPoll, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_poll(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videopoll import VideoPoll
param_types = {
'close_after_voting': 'bool',
'correct_option': 'unsigned int',
'default_open': 'bool',
'options': 'list<string>',
'question': 'string',
'show_gradient': 'bool',
'show_results': 'bool',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/polls',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=VideoPoll,
api_type='EDGE',
response_parser=ObjectParser(target_class=VideoPoll, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_sponsor_tags(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.page import Page
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/sponsor_tags',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=Page,
api_type='EDGE',
response_parser=ObjectParser(target_class=Page, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_tags(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/tags',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='EDGE',
response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_tag(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'tag_uid': 'int',
'uid': 'int',
'vid': 'string',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/tags',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_thumbnails(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.videothumbnail import VideoThumbnail
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/thumbnails',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=VideoThumbnail,
api_type='EDGE',
response_parser=ObjectParser(target_class=VideoThumbnail, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_thumbnail(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'is_preferred': 'bool',
'source': 'file',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/thumbnails',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdVideo,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdVideo, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_video_insights(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.insightsresult import InsightsResult
param_types = {
'metric': 'list<Object>',
'period': 'period_enum',
'since': 'datetime',
'until': 'datetime',
}
enums = {
'period_enum': InsightsResult.Period.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/video_insights',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=InsightsResult,
api_type='EDGE',
response_parser=ObjectParser(target_class=InsightsResult, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {
'ad_breaks': 'list<int>',
'backdated_time': 'datetime',
'backdated_time_granularity': 'string',
'content_category': 'string',
'content_tags': 'list<string>',
'copyright': 'VideoCopyright',
'copyright_monitoring_status': 'string',
'created_time': 'datetime',
'custom_labels': 'list<string>',
'description': 'string',
'embed_html': 'string',
'embeddable': 'bool',
'event': 'Event',
'expiration': 'Object',
'format': 'list<Object>',
'from': 'Object',
'icon': 'string',
'id': 'string',
'is_crosspost_video': 'bool',
'is_crossposting_eligible': 'bool',
'is_episode': 'bool',
'is_instagram_eligible': 'bool',
'is_reference_only': 'bool',
'length': 'float',
'live_audience_count': 'unsigned int',
'live_status': 'string',
'music_video_copyright': 'MusicVideoCopyright',
'permalink_url': 'string',
'picture': 'string',
'place': 'Place',
'premiere_living_room_status': 'string',
'privacy': 'Privacy',
'published': 'bool',
'scheduled_publish_time': 'datetime',
'source': 'string',
'spherical': 'bool',
'status': 'Object',
'title': 'string',
'universal_video_id': 'string',
'updated_time': 'datetime',
'adaptive_type': 'string',
'animated_effect_id': 'unsigned int',
'application_id': 'string',
'asked_fun_fact_prompt_id': 'unsigned int',
'audio_story_wave_animation_handle': 'string',
'chunk_session_id': 'string',
'composer_entry_picker': 'string',
'composer_entry_point': 'string',
'composer_entry_time': 'unsigned int',
'composer_session_events_log': 'string',
'composer_session_id': 'string',
'composer_source_surface': 'string',
'composer_type': 'string',
'container_type': 'ContainerType',
'creative_tools': 'string',
'end_offset': 'unsigned int',
'fbuploader_video_file_chunk': 'string',
'file_size': 'unsigned int',
'file_url': 'string',
'fisheye_video_cropped': 'bool',
'formatting': 'Formatting',
'fov': 'unsigned int',
'front_z_rotation': 'float',
'fun_fact_prompt_id': 'unsigned int',
'fun_fact_toastee_id': 'unsigned int',
'guide': 'list<list<unsigned int>>',
'guide_enabled': 'bool',
'has_nickname': 'bool',
'holiday_card': 'string',
'initial_heading': 'unsigned int',
'initial_pitch': 'unsigned int',
'instant_game_entry_point_data': 'string',
'is_boost_intended': 'bool',
'is_group_linking_post': 'bool',
'is_voice_clip': 'bool',
'location_source_id': 'string',
'name': 'string',
'offer_like_post_id': 'unsigned int',
'og_action_type_id': 'string',
'og_icon_id': 'string',
'og_object_id': 'string',
'og_phrase': 'string',
'og_suggestion_mechanism': 'string',
'original_fov': 'unsigned int',
'original_projection_type': 'OriginalProjectionType',
'publish_event_id': 'unsigned int',
'react_mode_metadata': 'string',
'referenced_sticker_id': 'string',
'replace_video_id': 'string',
'sales_promo_id': 'unsigned int',
'slideshow_spec': 'map',
'source_instagram_media_id': 'string',
'start_offset': 'unsigned int',
'swap_mode': 'SwapMode',
'text_format_metadata': 'string',
'throwback_camera_roll_media': 'string',
'thumb': 'file',
'time_since_original_post': 'unsigned int',
'transcode_setting_properties': 'string',
'unpublished_content_type': 'UnpublishedContentType',
'upload_phase': 'UploadPhase',
'upload_session_id': 'string',
'upload_setting_properties': 'string',
'video_file_chunk': 'string',
'video_id_original': 'string',
'video_start_time_ms': 'unsigned int',
'waterfall_id': 'string',
'filename': 'file'
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['ContainerType'] = AdVideo.ContainerType.__dict__.values()
field_enum_info['ContentCategory'] = AdVideo.ContentCategory.__dict__.values()
field_enum_info['Formatting'] = AdVideo.Formatting.__dict__.values()
field_enum_info['OriginalProjectionType'] = AdVideo.OriginalProjectionType.__dict__.values()
field_enum_info['SwapMode'] = AdVideo.SwapMode.__dict__.values()
field_enum_info['UnpublishedContentType'] = AdVideo.UnpublishedContentType.__dict__.values()
field_enum_info['UploadPhase'] = AdVideo.UploadPhase.__dict__.values()
field_enum_info['Type'] = AdVideo.Type.__dict__.values()
field_enum_info['BackdatedTimeGranularity'] = AdVideo.BackdatedTimeGranularity.__dict__.values()
return field_enum_info
def remote_create(
self,
batch=None,
failure=None,
params=None,
success=None,
):
from facebook_business.exceptions import FacebookBadObjectError
from facebook_business.video_uploader import (
VideoUploader,
VideoUploadRequest,
)
if (self.Field.slideshow_spec in self and
self[self.Field.slideshow_spec] is not None):
request = VideoUploadRequest(self.get_api_assured())
request.setParams(params={'slideshow_spec': {
'images_urls': self[self.Field.slideshow_spec]['images_urls'],
'duration_ms': self[self.Field.slideshow_spec]['duration_ms'],
'transition_ms': self[self.Field.slideshow_spec]['transition_ms'],
}})
response = request.send((self.get_parent_id_assured(), 'advideos')).json()
elif not (self.Field.filepath in self):
raise FacebookBadObjectError(
"AdVideo requires a filepath or slideshow_spec to be defined.",
)
else:
video_uploader = VideoUploader()
response = video_uploader.upload(self)
self._set_data(response)
return response
def waitUntilEncodingReady(self, interval=30, timeout=600):
from facebook_business.video_uploader import VideoEncodingStatusChecker
from facebook_business.exceptions import FacebookError
if 'id' not in self:
raise FacebookError(
'Invalid Video ID',
)
VideoEncodingStatusChecker.waitUntilReady(
self.get_api_assured(),
self['id'],
interval,
timeout,
)
| true | true |
f71bd3a0366aa96de99eb6814ff7287178cc8592 | 1,849 | py | Python | solver.py | gtxn/wordle-solver | 48e8317bd1930884fa65d5bd0559b0341af456b6 | [
"MIT"
] | 1 | 2022-02-25T06:11:26.000Z | 2022-02-25T06:11:26.000Z | solver.py | gtxn/wordle-solver | 48e8317bd1930884fa65d5bd0559b0341af456b6 | [
"MIT"
] | null | null | null | solver.py | gtxn/wordle-solver | 48e8317bd1930884fa65d5bd0559b0341af456b6 | [
"MIT"
] | null | null | null | from multiprocessing.sharedctypes import Value
from global_vars import *
from utils import *
def state_inp_to_str(state_inp):
state_str = ''
for ch in state_inp:
state_str += str((['b', 'y', 'g'].index(ch)))
return state_str
def get_state_input():
valid = False
while not valid:
valid = True
state_inp = input(
"Fill in colours in the following format: b for black, y for yellow and g for green. For example, if a word had no correct letters, the input would be 'bbbbb': ").strip().lower()
if len(state_inp) != 5:
print("Please make sure exactly 5 colours were put.")
valid = False
for ch in state_inp:
if ch not in 'byg':
print("Only letters b, y, and g are allowed")
valid = False
print()
state_str = state_inp_to_str(state_inp)
return state_str
def get_confirm(msg):
yes_no = input(
f"{msg} (y/n): ").strip().lower()
if yes_no == 'y':
return True
elif yes_no == 'n':
return False
print("Invalid input.")
get_confirm(msg)
print('='*9)
rnd = 1
did_win = False
avail_words = GUESS_ARR
while rnd <= 5 and not did_win:
guess_rec, entropy = get_guess(avail_words)
print(f"Recommended word: {guess_rec.upper()}")
did_follow_rec = get_confirm('Did you follow the recommended word?')
if not did_follow_rec:
guess = input("What word did you input: ")
else:
guess = guess_rec
did_win = get_confirm('Did you win?')
if did_win:
break
print()
state = get_state_input()
avail_words = set(STATE_MAP[guess][state]) & set(avail_words)
print("---")
rnd += 1
if did_win:
print('Congrats! You won!')
else:
print('No... Sorry about that. Better luck next time!')
| 22.277108 | 190 | 0.60411 | from multiprocessing.sharedctypes import Value
from global_vars import *
from utils import *
def state_inp_to_str(state_inp):
state_str = ''
for ch in state_inp:
state_str += str((['b', 'y', 'g'].index(ch)))
return state_str
def get_state_input():
valid = False
while not valid:
valid = True
state_inp = input(
"Fill in colours in the following format: b for black, y for yellow and g for green. For example, if a word had no correct letters, the input would be 'bbbbb': ").strip().lower()
if len(state_inp) != 5:
print("Please make sure exactly 5 colours were put.")
valid = False
for ch in state_inp:
if ch not in 'byg':
print("Only letters b, y, and g are allowed")
valid = False
print()
state_str = state_inp_to_str(state_inp)
return state_str
def get_confirm(msg):
yes_no = input(
f"{msg} (y/n): ").strip().lower()
if yes_no == 'y':
return True
elif yes_no == 'n':
return False
print("Invalid input.")
get_confirm(msg)
print('='*9)
rnd = 1
did_win = False
avail_words = GUESS_ARR
while rnd <= 5 and not did_win:
guess_rec, entropy = get_guess(avail_words)
print(f"Recommended word: {guess_rec.upper()}")
did_follow_rec = get_confirm('Did you follow the recommended word?')
if not did_follow_rec:
guess = input("What word did you input: ")
else:
guess = guess_rec
did_win = get_confirm('Did you win?')
if did_win:
break
print()
state = get_state_input()
avail_words = set(STATE_MAP[guess][state]) & set(avail_words)
print("---")
rnd += 1
if did_win:
print('Congrats! You won!')
else:
print('No... Sorry about that. Better luck next time!')
| true | true |
f71bd4c71e4fbf7580c2aedbde090cc21504f482 | 1,367 | py | Python | mysite/urls.py | vansjyo/OSVI-RemoteControl | 6d3dd6aa1cceac2254171d57b33975df08cda2a8 | [
"MIT"
] | null | null | null | mysite/urls.py | vansjyo/OSVI-RemoteControl | 6d3dd6aa1cceac2254171d57b33975df08cda2a8 | [
"MIT"
] | null | null | null | mysite/urls.py | vansjyo/OSVI-RemoteControl | 6d3dd6aa1cceac2254171d57b33975df08cda2a8 | [
"MIT"
] | null | null | null | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('runcode/', include('runcode.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from django.conf import settings
from django.views.static import serve
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('accounts.urls')),
path('accounts/', include('django.contrib.auth.urls')),
path('',TemplateView.as_view(template_name='home.html'), name='home'),
path('runcode/', include('runcode.urls')),
]#+static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += [
re_path(r'^media/(?P<path>.*)$', serve, {
'document_root': settings.MEDIA_ROOT,
}),
]
| 37.972222 | 77 | 0.702999 | from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from django.conf import settings
from django.views.static import serve
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('accounts.urls')),
path('accounts/', include('django.contrib.auth.urls')),
path('',TemplateView.as_view(template_name='home.html'), name='home'),
path('runcode/', include('runcode.urls')),
]
urlpatterns += [
re_path(r'^media/(?P<path>.*)$', serve, {
'document_root': settings.MEDIA_ROOT,
}),
]
| true | true |
f71bd4cab6d4e1f08fe404d5664c23ec2a6f827b | 319 | py | Python | openapi2ceres/main.py | laulin/openapi2ceres | 16622d399b0baed0159f62bd44f405ab2565126c | [
"Apache-2.0"
] | null | null | null | openapi2ceres/main.py | laulin/openapi2ceres | 16622d399b0baed0159f62bd44f405ab2565126c | [
"Apache-2.0"
] | null | null | null | openapi2ceres/main.py | laulin/openapi2ceres | 16622d399b0baed0159f62bd44f405ab2565126c | [
"Apache-2.0"
] | null | null | null | from pprint import pprint
from .args import get_args
from .openapifile import OpenAPIFile
from .ceresfile import CeresFile
def main():
args = get_args()
input_file = OpenAPIFile()
input_file.load(args.input)
output_producer = CeresFile(input_file, args.output_dir)
output_producer.process()
| 22.785714 | 60 | 0.742947 | from pprint import pprint
from .args import get_args
from .openapifile import OpenAPIFile
from .ceresfile import CeresFile
def main():
args = get_args()
input_file = OpenAPIFile()
input_file.load(args.input)
output_producer = CeresFile(input_file, args.output_dir)
output_producer.process()
| true | true |
f71bd4d65ed44d77bfd69c5414311187e9b51c70 | 1,447 | py | Python | setup.py | fusion-energy/openmc_mesh_tally_to_vtk | b0a9077da79363dbf758d951ca68e5e5365d09ad | [
"MIT"
] | null | null | null | setup.py | fusion-energy/openmc_mesh_tally_to_vtk | b0a9077da79363dbf758d951ca68e5e5365d09ad | [
"MIT"
] | 3 | 2021-11-19T23:32:23.000Z | 2022-02-15T19:58:40.000Z | setup.py | fusion-energy/openmc_mesh_tally_to_vtk | b0a9077da79363dbf758d951ca68e5e5365d09ad | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="openmc_mesh_tally_to_vtk",
version="develop",
author="The Regular Mesh Plotter Development Team",
author_email="mail@jshimwell.com",
description="A Python package for converting OpenMC mesh tallies to VTK files and optionally converting the units",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/fusion-energy/openmc_mesh_tally_to_vtk",
packages=setuptools.find_packages(),
classifiers=[
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
package_data={
"openmc_mesh_tally_to_vtk": [
# "requirements.txt",
"README.md",
"LICENSE",
]
},
install_requires=[
"numpy>=1.21.1",
"matplotlib>=3.4.2",
"trimesh",
"shapely",
"scipy",
"dagmc_geometry_slice_plotter",
"openmc_tally_unit_converter",
"vtk",
],
)
| 31.456522 | 119 | 0.612301 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="openmc_mesh_tally_to_vtk",
version="develop",
author="The Regular Mesh Plotter Development Team",
author_email="mail@jshimwell.com",
description="A Python package for converting OpenMC mesh tallies to VTK files and optionally converting the units",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/fusion-energy/openmc_mesh_tally_to_vtk",
packages=setuptools.find_packages(),
classifiers=[
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
package_data={
"openmc_mesh_tally_to_vtk": [
"README.md",
"LICENSE",
]
},
install_requires=[
"numpy>=1.21.1",
"matplotlib>=3.4.2",
"trimesh",
"shapely",
"scipy",
"dagmc_geometry_slice_plotter",
"openmc_tally_unit_converter",
"vtk",
],
)
| true | true |
f71bd54ef148217f90cdec13b843fb943c0543ba | 18,592 | py | Python | saleor/order/models.py | vantrong291/saleor | d4820d53f7f9825510c7ea0c41cacbda6b612452 | [
"CC-BY-4.0"
] | null | null | null | saleor/order/models.py | vantrong291/saleor | d4820d53f7f9825510c7ea0c41cacbda6b612452 | [
"CC-BY-4.0"
] | null | null | null | saleor/order/models.py | vantrong291/saleor | d4820d53f7f9825510c7ea0c41cacbda6b612452 | [
"CC-BY-4.0"
] | null | null | null | from decimal import Decimal
from operator import attrgetter
from uuid import uuid4
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.core.validators import MinValueValidator
from django.db import models
from django.db.models import F, Max, Sum
from django.urls import reverse
from django.utils.timezone import now
from django.utils.translation import pgettext_lazy
from django_measurement.models import MeasurementField
from django_prices.models import MoneyField, TaxedMoneyField
from measurement.measures import Weight
from prices import Money
from ..account.models import Address
from ..core.taxes import zero_money, zero_taxed_money
from ..core.utils.json_serializer import CustomJsonEncoder
from ..core.weight import WeightUnits, zero_weight
from ..discount.models import Voucher
from ..giftcard.models import GiftCard
from ..payment import ChargeStatus, TransactionKind
from ..shipping.models import ShippingMethod
from . import FulfillmentStatus, OrderEvents, OrderStatus
class OrderQueryset(models.QuerySet):
def confirmed(self):
"""Return non-draft orders."""
return self.exclude(status=OrderStatus.DRAFT)
def drafts(self):
"""Return draft orders."""
return self.filter(status=OrderStatus.DRAFT)
def ready_to_fulfill(self):
"""Return orders that can be fulfilled.
Orders ready to fulfill are fully paid but unfulfilled (or partially
fulfilled).
"""
statuses = {OrderStatus.UNFULFILLED, OrderStatus.PARTIALLY_FULFILLED}
qs = self.filter(status__in=statuses, payments__is_active=True)
qs = qs.annotate(amount_paid=Sum("payments__captured_amount"))
return qs.filter(total_gross_amount__lte=F("amount_paid"))
def ready_to_capture(self):
"""Return orders with payments to capture.
Orders ready to capture are those which are not draft or canceled and
have a preauthorized payment. The preauthorized payment can not
already be partially or fully captured.
"""
qs = self.filter(
payments__is_active=True, payments__charge_status=ChargeStatus.NOT_CHARGED
)
qs = qs.exclude(status={OrderStatus.DRAFT, OrderStatus.CANCELED})
return qs.distinct()
class Order(models.Model):
created = models.DateTimeField(default=now, editable=False)
status = models.CharField(
max_length=32, default=OrderStatus.UNFULFILLED, choices=OrderStatus.CHOICES
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True,
related_name="orders",
on_delete=models.SET_NULL,
)
language_code = models.CharField(max_length=35, default=settings.LANGUAGE_CODE)
tracking_client_id = models.CharField(max_length=36, blank=True, editable=False)
billing_address = models.ForeignKey(
Address, related_name="+", editable=False, null=True, on_delete=models.SET_NULL
)
shipping_address = models.ForeignKey(
Address, related_name="+", editable=False, null=True, on_delete=models.SET_NULL
)
user_email = models.EmailField(blank=True, default="")
currency = models.CharField(
max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH,
default=settings.DEFAULT_CURRENCY,
)
shipping_method = models.ForeignKey(
ShippingMethod,
blank=True,
null=True,
related_name="orders",
on_delete=models.SET_NULL,
)
shipping_method_name = models.CharField(
max_length=255, null=True, default=None, blank=True, editable=False
)
shipping_price_net_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
editable=False,
)
shipping_price_net = MoneyField(
amount_field="shipping_price_net_amount", currency_field="currency"
)
shipping_price_gross_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
editable=False,
)
shipping_price_gross = MoneyField(
amount_field="shipping_price_gross_amount", currency_field="currency"
)
shipping_price = TaxedMoneyField(
net_amount_field="shipping_price_net_amount",
gross_amount_field="shipping_price_gross_amount",
currency_field="currency",
)
token = models.CharField(max_length=36, unique=True, blank=True)
# Token of a checkout instance that this order was created from
checkout_token = models.CharField(max_length=36, blank=True)
total_net_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
total_net = MoneyField(amount_field="total_net_amount", currency_field="currency")
total_gross_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
total_gross = MoneyField(
amount_field="total_gross_amount", currency_field="currency"
)
total = TaxedMoneyField(
net_amount_field="total_net_amount",
gross_amount_field="total_gross_amount",
currency_field="currency",
)
voucher = models.ForeignKey(
Voucher, blank=True, null=True, related_name="+", on_delete=models.SET_NULL
)
gift_cards = models.ManyToManyField(GiftCard, blank=True, related_name="orders")
discount_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
discount = MoneyField(amount_field="discount_amount", currency_field="currency")
discount_name = models.CharField(max_length=255, default="", blank=True)
translated_discount_name = models.CharField(max_length=255, default="", blank=True)
display_gross_prices = models.BooleanField(default=True)
customer_note = models.TextField(blank=True, default="")
weight = MeasurementField(
measurement=Weight, unit_choices=WeightUnits.CHOICES, default=zero_weight
)
objects = OrderQueryset.as_manager()
class Meta:
ordering = ("-pk",)
permissions = (
(
"manage_orders",
pgettext_lazy("Permission description", "Manage orders."),
),
)
def save(self, *args, **kwargs):
if not self.token:
self.token = str(uuid4())
return super().save(*args, **kwargs)
def is_fully_paid(self):
total_paid = self._total_paid()
return total_paid.gross >= self.total.gross
def is_partly_paid(self):
total_paid = self._total_paid()
return total_paid.gross.amount > 0
def get_customer_email(self):
return self.user.email if self.user else self.user_email
def _total_paid(self):
# Get total paid amount from partially charged,
# fully charged and partially refunded payments
payments = self.payments.filter(
charge_status__in=[
ChargeStatus.PARTIALLY_CHARGED,
ChargeStatus.FULLY_CHARGED,
ChargeStatus.PARTIALLY_REFUNDED,
]
)
total_captured = [payment.get_captured_amount() for payment in payments]
total_paid = sum(total_captured, zero_taxed_money())
return total_paid
def _index_billing_phone(self):
return self.billing_address.phone
def _index_shipping_phone(self):
return self.shipping_address.phone
def __iter__(self):
return iter(self.lines.all())
def __repr__(self):
return "<Order #%r>" % (self.id,)
def __str__(self):
return "#%d" % (self.id,)
def get_absolute_url(self):
return reverse("order:details", kwargs={"token": self.token})
def get_last_payment(self):
return max(self.payments.all(), default=None, key=attrgetter("pk"))
def get_payment_status(self):
last_payment = self.get_last_payment()
if last_payment:
return last_payment.charge_status
return ChargeStatus.NOT_CHARGED
def get_payment_status_display(self):
last_payment = self.get_last_payment()
if last_payment:
return last_payment.get_charge_status_display()
return dict(ChargeStatus.CHOICES).get(ChargeStatus.NOT_CHARGED)
def is_pre_authorized(self):
return (
self.payments.filter(
is_active=True, transactions__kind=TransactionKind.AUTH
)
.filter(transactions__is_success=True)
.exists()
)
@property
def quantity_fulfilled(self):
return sum([line.quantity_fulfilled for line in self])
def is_shipping_required(self):
return any(line.is_shipping_required for line in self)
def get_subtotal(self):
subtotal_iterator = (line.get_total() for line in self)
return sum(subtotal_iterator, zero_taxed_money())
def get_total_quantity(self):
return sum([line.quantity for line in self])
def is_draft(self):
return self.status == OrderStatus.DRAFT
def is_open(self):
statuses = {OrderStatus.UNFULFILLED, OrderStatus.PARTIALLY_FULFILLED}
return self.status in statuses
def can_cancel(self):
return self.status not in {OrderStatus.CANCELED, OrderStatus.DRAFT}
def can_capture(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
order_status_ok = self.status not in {OrderStatus.DRAFT, OrderStatus.CANCELED}
return payment.can_capture() and order_status_ok
def can_charge(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
order_status_ok = self.status not in {OrderStatus.DRAFT, OrderStatus.CANCELED}
return payment.can_charge() and order_status_ok
def can_void(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
return payment.can_void()
def can_refund(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
return payment.can_refund()
def can_mark_as_paid(self):
return len(self.payments.all()) == 0
@property
def total_authorized(self):
payment = self.get_last_payment()
if payment:
return payment.get_authorized_amount()
return zero_money()
@property
def total_captured(self):
payment = self.get_last_payment()
if payment and payment.charge_status in (
ChargeStatus.PARTIALLY_CHARGED,
ChargeStatus.FULLY_CHARGED,
ChargeStatus.PARTIALLY_REFUNDED,
):
return Money(payment.captured_amount, payment.currency)
return zero_money()
@property
def total_balance(self):
return self.total_captured - self.total.gross
def get_total_weight(self):
return self.weight
def product_to_string(self):
str_product_result = ""
for line in self.lines.all():
str_product_result = str_product_result + " " + str(line.product_name) + " " + str(line.variant_name) + ", "
return str_product_result
def product_to_list(self):
list_product_result = []
for line in self.lines.all():
list_product_result.append(line.product_name)
return list_product_result
def variant_to_list(self):
list_variant_result = []
for line in self.lines.all():
list_variant_result.append({"variant": line.product_name + " " + line.variant_name, "quantity": line.variant.quantity, "quantity_allocated": line.variant.quantity_allocated})
return list_variant_result
def product_category_to_list(self):
list_product_category_result = []
for line in self.lines.all():
list_product_category_result.append(line.variant.product.category.name)
return list_product_category_result
def product_type_to_list(self):
list_product_type_result = []
for line in self.lines.all():
list_product_type_result.append(line.variant.product.product_type.name)
return list_product_type_result
def variant_to_string(self):
str_variant_result = ""
for line in self.lines.all():
str_variant_result = str_variant_result + " " + str(line.variant_name)
return str_variant_result
class OrderLineQueryset(models.QuerySet):
def digital(self):
"""Return lines with digital products."""
for line in self.all():
if line.is_digital:
yield line
def physical(self):
"""Return lines with physical products."""
for line in self.all():
if not line.is_digital:
yield line
class OrderLine(models.Model):
order = models.ForeignKey(
Order, related_name="lines", editable=False, on_delete=models.CASCADE
)
variant = models.ForeignKey(
"product.ProductVariant",
related_name="order_lines",
on_delete=models.SET_NULL,
blank=True,
null=True,
)
# max_length is as produced by ProductVariant's display_product method
product_name = models.CharField(max_length=386)
variant_name = models.CharField(max_length=255, default="", blank=True)
translated_product_name = models.CharField(max_length=386, default="", blank=True)
translated_variant_name = models.CharField(max_length=255, default="", blank=True)
product_sku = models.CharField(max_length=32)
is_shipping_required = models.BooleanField()
quantity = models.IntegerField(validators=[MinValueValidator(1)])
quantity_fulfilled = models.IntegerField(
validators=[MinValueValidator(0)], default=0
)
currency = models.CharField(
max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH,
default=settings.DEFAULT_CURRENCY,
)
unit_price_net_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
)
unit_price_net = MoneyField(
amount_field="unit_price_net_amount", currency_field="currency"
)
unit_price_gross_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
)
unit_price_gross = MoneyField(
amount_field="unit_price_gross_amount", currency_field="currency"
)
unit_price = TaxedMoneyField(
net_amount_field="unit_price_net_amount",
gross_amount_field="unit_price_gross_amount",
currency="currency",
)
tax_rate = models.DecimalField(
max_digits=5, decimal_places=2, default=Decimal("0.0")
)
objects = OrderLineQueryset.as_manager()
class Meta:
ordering = ("pk",)
def __str__(self):
return (
f"{self.product_name} ({self.variant_name})"
if self.variant_name
else self.product_name
)
def get_total(self):
return self.unit_price * self.quantity
@property
def quantity_unfulfilled(self):
return self.quantity - self.quantity_fulfilled
@property
def is_digital(self) -> bool:
"""Check if a variant is digital and contains digital content."""
is_digital = self.variant.is_digital()
has_digital = hasattr(self.variant, "digital_content")
return is_digital and has_digital
class Fulfillment(models.Model):
fulfillment_order = models.PositiveIntegerField(editable=False)
order = models.ForeignKey(
Order, related_name="fulfillments", editable=False, on_delete=models.CASCADE
)
status = models.CharField(
max_length=32,
default=FulfillmentStatus.FULFILLED,
choices=FulfillmentStatus.CHOICES,
)
tracking_number = models.CharField(max_length=255, default="", blank=True)
shipping_date = models.DateTimeField(default=now, editable=False)
def __str__(self):
return pgettext_lazy("Fulfillment str", "Fulfillment #%s") % (self.composed_id,)
def __iter__(self):
return iter(self.lines.all())
def save(self, *args, **kwargs):
"""Assign an auto incremented value as a fulfillment order."""
if not self.pk:
groups = self.order.fulfillments.all()
existing_max = groups.aggregate(Max("fulfillment_order"))
existing_max = existing_max.get("fulfillment_order__max")
self.fulfillment_order = existing_max + 1 if existing_max is not None else 1
return super().save(*args, **kwargs)
@property
def composed_id(self):
return "%s-%s" % (self.order.id, self.fulfillment_order)
def can_edit(self):
return self.status != FulfillmentStatus.CANCELED
def get_total_quantity(self):
return sum([line.quantity for line in self])
class FulfillmentLine(models.Model):
order_line = models.ForeignKey(
OrderLine, related_name="+", on_delete=models.CASCADE
)
fulfillment = models.ForeignKey(
Fulfillment, related_name="lines", on_delete=models.CASCADE
)
quantity = models.PositiveIntegerField()
class OrderEvent(models.Model):
"""Model used to store events that happened during the order lifecycle.
Args:
parameters: Values needed to display the event on the storefront
type: Type of an order
"""
date = models.DateTimeField(default=now, editable=False)
type = models.CharField(
max_length=255,
choices=[
(type_name.upper(), type_name) for type_name, _ in OrderEvents.CHOICES
],
)
order = models.ForeignKey(Order, related_name="events", on_delete=models.CASCADE)
parameters = JSONField(blank=True, default=dict, encoder=CustomJsonEncoder)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name="+",
)
class Meta:
ordering = ("date",)
def __repr__(self):
return f"{self.__class__.__name__}(type={self.type!r}, user={self.user!r})"
| 34.113761 | 186 | 0.677765 | from decimal import Decimal
from operator import attrgetter
from uuid import uuid4
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.core.validators import MinValueValidator
from django.db import models
from django.db.models import F, Max, Sum
from django.urls import reverse
from django.utils.timezone import now
from django.utils.translation import pgettext_lazy
from django_measurement.models import MeasurementField
from django_prices.models import MoneyField, TaxedMoneyField
from measurement.measures import Weight
from prices import Money
from ..account.models import Address
from ..core.taxes import zero_money, zero_taxed_money
from ..core.utils.json_serializer import CustomJsonEncoder
from ..core.weight import WeightUnits, zero_weight
from ..discount.models import Voucher
from ..giftcard.models import GiftCard
from ..payment import ChargeStatus, TransactionKind
from ..shipping.models import ShippingMethod
from . import FulfillmentStatus, OrderEvents, OrderStatus
class OrderQueryset(models.QuerySet):
def confirmed(self):
return self.exclude(status=OrderStatus.DRAFT)
def drafts(self):
return self.filter(status=OrderStatus.DRAFT)
def ready_to_fulfill(self):
statuses = {OrderStatus.UNFULFILLED, OrderStatus.PARTIALLY_FULFILLED}
qs = self.filter(status__in=statuses, payments__is_active=True)
qs = qs.annotate(amount_paid=Sum("payments__captured_amount"))
return qs.filter(total_gross_amount__lte=F("amount_paid"))
def ready_to_capture(self):
qs = self.filter(
payments__is_active=True, payments__charge_status=ChargeStatus.NOT_CHARGED
)
qs = qs.exclude(status={OrderStatus.DRAFT, OrderStatus.CANCELED})
return qs.distinct()
class Order(models.Model):
created = models.DateTimeField(default=now, editable=False)
status = models.CharField(
max_length=32, default=OrderStatus.UNFULFILLED, choices=OrderStatus.CHOICES
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True,
related_name="orders",
on_delete=models.SET_NULL,
)
language_code = models.CharField(max_length=35, default=settings.LANGUAGE_CODE)
tracking_client_id = models.CharField(max_length=36, blank=True, editable=False)
billing_address = models.ForeignKey(
Address, related_name="+", editable=False, null=True, on_delete=models.SET_NULL
)
shipping_address = models.ForeignKey(
Address, related_name="+", editable=False, null=True, on_delete=models.SET_NULL
)
user_email = models.EmailField(blank=True, default="")
currency = models.CharField(
max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH,
default=settings.DEFAULT_CURRENCY,
)
shipping_method = models.ForeignKey(
ShippingMethod,
blank=True,
null=True,
related_name="orders",
on_delete=models.SET_NULL,
)
shipping_method_name = models.CharField(
max_length=255, null=True, default=None, blank=True, editable=False
)
shipping_price_net_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
editable=False,
)
shipping_price_net = MoneyField(
amount_field="shipping_price_net_amount", currency_field="currency"
)
shipping_price_gross_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
editable=False,
)
shipping_price_gross = MoneyField(
amount_field="shipping_price_gross_amount", currency_field="currency"
)
shipping_price = TaxedMoneyField(
net_amount_field="shipping_price_net_amount",
gross_amount_field="shipping_price_gross_amount",
currency_field="currency",
)
token = models.CharField(max_length=36, unique=True, blank=True)
checkout_token = models.CharField(max_length=36, blank=True)
total_net_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
total_net = MoneyField(amount_field="total_net_amount", currency_field="currency")
total_gross_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
total_gross = MoneyField(
amount_field="total_gross_amount", currency_field="currency"
)
total = TaxedMoneyField(
net_amount_field="total_net_amount",
gross_amount_field="total_gross_amount",
currency_field="currency",
)
voucher = models.ForeignKey(
Voucher, blank=True, null=True, related_name="+", on_delete=models.SET_NULL
)
gift_cards = models.ManyToManyField(GiftCard, blank=True, related_name="orders")
discount_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
default=0,
)
discount = MoneyField(amount_field="discount_amount", currency_field="currency")
discount_name = models.CharField(max_length=255, default="", blank=True)
translated_discount_name = models.CharField(max_length=255, default="", blank=True)
display_gross_prices = models.BooleanField(default=True)
customer_note = models.TextField(blank=True, default="")
weight = MeasurementField(
measurement=Weight, unit_choices=WeightUnits.CHOICES, default=zero_weight
)
objects = OrderQueryset.as_manager()
class Meta:
ordering = ("-pk",)
permissions = (
(
"manage_orders",
pgettext_lazy("Permission description", "Manage orders."),
),
)
def save(self, *args, **kwargs):
if not self.token:
self.token = str(uuid4())
return super().save(*args, **kwargs)
def is_fully_paid(self):
total_paid = self._total_paid()
return total_paid.gross >= self.total.gross
def is_partly_paid(self):
total_paid = self._total_paid()
return total_paid.gross.amount > 0
def get_customer_email(self):
return self.user.email if self.user else self.user_email
def _total_paid(self):
payments = self.payments.filter(
charge_status__in=[
ChargeStatus.PARTIALLY_CHARGED,
ChargeStatus.FULLY_CHARGED,
ChargeStatus.PARTIALLY_REFUNDED,
]
)
total_captured = [payment.get_captured_amount() for payment in payments]
total_paid = sum(total_captured, zero_taxed_money())
return total_paid
def _index_billing_phone(self):
return self.billing_address.phone
def _index_shipping_phone(self):
return self.shipping_address.phone
def __iter__(self):
return iter(self.lines.all())
def __repr__(self):
return "<Order #%r>" % (self.id,)
def __str__(self):
return "#%d" % (self.id,)
def get_absolute_url(self):
return reverse("order:details", kwargs={"token": self.token})
def get_last_payment(self):
return max(self.payments.all(), default=None, key=attrgetter("pk"))
def get_payment_status(self):
last_payment = self.get_last_payment()
if last_payment:
return last_payment.charge_status
return ChargeStatus.NOT_CHARGED
def get_payment_status_display(self):
last_payment = self.get_last_payment()
if last_payment:
return last_payment.get_charge_status_display()
return dict(ChargeStatus.CHOICES).get(ChargeStatus.NOT_CHARGED)
def is_pre_authorized(self):
return (
self.payments.filter(
is_active=True, transactions__kind=TransactionKind.AUTH
)
.filter(transactions__is_success=True)
.exists()
)
@property
def quantity_fulfilled(self):
return sum([line.quantity_fulfilled for line in self])
def is_shipping_required(self):
return any(line.is_shipping_required for line in self)
def get_subtotal(self):
subtotal_iterator = (line.get_total() for line in self)
return sum(subtotal_iterator, zero_taxed_money())
def get_total_quantity(self):
return sum([line.quantity for line in self])
def is_draft(self):
return self.status == OrderStatus.DRAFT
def is_open(self):
statuses = {OrderStatus.UNFULFILLED, OrderStatus.PARTIALLY_FULFILLED}
return self.status in statuses
def can_cancel(self):
return self.status not in {OrderStatus.CANCELED, OrderStatus.DRAFT}
def can_capture(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
order_status_ok = self.status not in {OrderStatus.DRAFT, OrderStatus.CANCELED}
return payment.can_capture() and order_status_ok
def can_charge(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
order_status_ok = self.status not in {OrderStatus.DRAFT, OrderStatus.CANCELED}
return payment.can_charge() and order_status_ok
def can_void(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
return payment.can_void()
def can_refund(self, payment=None):
if not payment:
payment = self.get_last_payment()
if not payment:
return False
return payment.can_refund()
def can_mark_as_paid(self):
return len(self.payments.all()) == 0
@property
def total_authorized(self):
payment = self.get_last_payment()
if payment:
return payment.get_authorized_amount()
return zero_money()
@property
def total_captured(self):
payment = self.get_last_payment()
if payment and payment.charge_status in (
ChargeStatus.PARTIALLY_CHARGED,
ChargeStatus.FULLY_CHARGED,
ChargeStatus.PARTIALLY_REFUNDED,
):
return Money(payment.captured_amount, payment.currency)
return zero_money()
@property
def total_balance(self):
return self.total_captured - self.total.gross
def get_total_weight(self):
return self.weight
def product_to_string(self):
str_product_result = ""
for line in self.lines.all():
str_product_result = str_product_result + " " + str(line.product_name) + " " + str(line.variant_name) + ", "
return str_product_result
def product_to_list(self):
list_product_result = []
for line in self.lines.all():
list_product_result.append(line.product_name)
return list_product_result
def variant_to_list(self):
list_variant_result = []
for line in self.lines.all():
list_variant_result.append({"variant": line.product_name + " " + line.variant_name, "quantity": line.variant.quantity, "quantity_allocated": line.variant.quantity_allocated})
return list_variant_result
def product_category_to_list(self):
list_product_category_result = []
for line in self.lines.all():
list_product_category_result.append(line.variant.product.category.name)
return list_product_category_result
def product_type_to_list(self):
list_product_type_result = []
for line in self.lines.all():
list_product_type_result.append(line.variant.product.product_type.name)
return list_product_type_result
def variant_to_string(self):
str_variant_result = ""
for line in self.lines.all():
str_variant_result = str_variant_result + " " + str(line.variant_name)
return str_variant_result
class OrderLineQueryset(models.QuerySet):
def digital(self):
for line in self.all():
if line.is_digital:
yield line
def physical(self):
for line in self.all():
if not line.is_digital:
yield line
class OrderLine(models.Model):
order = models.ForeignKey(
Order, related_name="lines", editable=False, on_delete=models.CASCADE
)
variant = models.ForeignKey(
"product.ProductVariant",
related_name="order_lines",
on_delete=models.SET_NULL,
blank=True,
null=True,
)
product_name = models.CharField(max_length=386)
variant_name = models.CharField(max_length=255, default="", blank=True)
translated_product_name = models.CharField(max_length=386, default="", blank=True)
translated_variant_name = models.CharField(max_length=255, default="", blank=True)
product_sku = models.CharField(max_length=32)
is_shipping_required = models.BooleanField()
quantity = models.IntegerField(validators=[MinValueValidator(1)])
quantity_fulfilled = models.IntegerField(
validators=[MinValueValidator(0)], default=0
)
currency = models.CharField(
max_length=settings.DEFAULT_CURRENCY_CODE_LENGTH,
default=settings.DEFAULT_CURRENCY,
)
unit_price_net_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
)
unit_price_net = MoneyField(
amount_field="unit_price_net_amount", currency_field="currency"
)
unit_price_gross_amount = models.DecimalField(
max_digits=settings.DEFAULT_MAX_DIGITS,
decimal_places=settings.DEFAULT_DECIMAL_PLACES,
)
unit_price_gross = MoneyField(
amount_field="unit_price_gross_amount", currency_field="currency"
)
unit_price = TaxedMoneyField(
net_amount_field="unit_price_net_amount",
gross_amount_field="unit_price_gross_amount",
currency="currency",
)
tax_rate = models.DecimalField(
max_digits=5, decimal_places=2, default=Decimal("0.0")
)
objects = OrderLineQueryset.as_manager()
class Meta:
ordering = ("pk",)
def __str__(self):
return (
f"{self.product_name} ({self.variant_name})"
if self.variant_name
else self.product_name
)
def get_total(self):
return self.unit_price * self.quantity
@property
def quantity_unfulfilled(self):
return self.quantity - self.quantity_fulfilled
@property
def is_digital(self) -> bool:
is_digital = self.variant.is_digital()
has_digital = hasattr(self.variant, "digital_content")
return is_digital and has_digital
class Fulfillment(models.Model):
fulfillment_order = models.PositiveIntegerField(editable=False)
order = models.ForeignKey(
Order, related_name="fulfillments", editable=False, on_delete=models.CASCADE
)
status = models.CharField(
max_length=32,
default=FulfillmentStatus.FULFILLED,
choices=FulfillmentStatus.CHOICES,
)
tracking_number = models.CharField(max_length=255, default="", blank=True)
shipping_date = models.DateTimeField(default=now, editable=False)
def __str__(self):
return pgettext_lazy("Fulfillment str", "Fulfillment #%s") % (self.composed_id,)
def __iter__(self):
return iter(self.lines.all())
def save(self, *args, **kwargs):
if not self.pk:
groups = self.order.fulfillments.all()
existing_max = groups.aggregate(Max("fulfillment_order"))
existing_max = existing_max.get("fulfillment_order__max")
self.fulfillment_order = existing_max + 1 if existing_max is not None else 1
return super().save(*args, **kwargs)
@property
def composed_id(self):
return "%s-%s" % (self.order.id, self.fulfillment_order)
def can_edit(self):
return self.status != FulfillmentStatus.CANCELED
def get_total_quantity(self):
return sum([line.quantity for line in self])
class FulfillmentLine(models.Model):
order_line = models.ForeignKey(
OrderLine, related_name="+", on_delete=models.CASCADE
)
fulfillment = models.ForeignKey(
Fulfillment, related_name="lines", on_delete=models.CASCADE
)
quantity = models.PositiveIntegerField()
class OrderEvent(models.Model):
date = models.DateTimeField(default=now, editable=False)
type = models.CharField(
max_length=255,
choices=[
(type_name.upper(), type_name) for type_name, _ in OrderEvents.CHOICES
],
)
order = models.ForeignKey(Order, related_name="events", on_delete=models.CASCADE)
parameters = JSONField(blank=True, default=dict, encoder=CustomJsonEncoder)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name="+",
)
class Meta:
ordering = ("date",)
def __repr__(self):
return f"{self.__class__.__name__}(type={self.type!r}, user={self.user!r})"
| true | true |
f71bd651d2401162ae8d5fd7fa574451af7b41e4 | 3,518 | py | Python | check_updates.py | kunansy/CheckDependencyUpdates | 56ef905b7baf568e38c66ce39ca71115438a758b | [
"MIT"
] | null | null | null | check_updates.py | kunansy/CheckDependencyUpdates | 56ef905b7baf568e38c66ce39ca71115438a758b | [
"MIT"
] | null | null | null | check_updates.py | kunansy/CheckDependencyUpdates | 56ef905b7baf568e38c66ce39ca71115438a758b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import argparse
import asyncio
import re
import sys
from itertools import groupby
from pathlib import Path
from typing import Optional, NamedTuple, AsyncIterable
import aiofiles
import aiohttp
PYPI_URL = "https://pypi.org/pypi/{package_name}/json"
class Package(NamedTuple):
name: str
required_version: str
last_version: Optional[str] = None
def __str__(self) -> str:
return f"Package: '{self.name}'\n" \
f"Required version: {self.required_version}\n" \
f"Last version: {self.last_version}"
async def get_last_version(session: aiohttp.ClientSession,
package_name: str) -> Optional[str]:
url = PYPI_URL.format(package_name=package_name)
try:
resp = await session.get(url)
except Exception as e:
print(f"{e.__class__.__name__}({e!r})", file=sys.stderr)
return
if resp.status == 200:
json = await resp.json()
resp.close()
return json['info']['version']
resp.close()
async def worker(args: asyncio.Queue,
results: asyncio.Queue,
session: aiohttp.ClientSession) -> None:
while True:
package = await args.get()
last_version = await get_last_version(session, package.name)
package = Package(
name=package.name,
required_version=package.required_version,
last_version=last_version
)
await results.put(package)
args.task_done()
async def get_packages(project_path: Path) -> AsyncIterable[Package]:
requirements_path = project_path / 'requirements.txt'
if not requirements_path.exists():
print("Requirements file not found", file=sys.stderr)
return
pattern = re.compile(r'([^<>= ]+)[<>= ]{2,4}(.+)')
async with aiofiles.open(requirements_path) as r:
async for requirement in r:
name, version = pattern.search(requirement).groups()
yield Package(name=name, required_version=version)
async def bound(project_path: Path) -> list[Package]:
timeout = aiohttp.ClientTimeout(60)
args = asyncio.Queue(maxsize=-1)
results = asyncio.Queue(maxsize=-1)
async with aiohttp.ClientSession(timeout=timeout) as ses:
async for package in get_packages(project_path):
await args.put(package)
tasks = []
for _ in range(5):
task = asyncio.create_task(worker(args, results, ses))
tasks += [task]
await args.join()
for task in tasks:
task.cancel()
return [
results.get_nowait()
for _ in range(results.qsize())
]
def main() -> None:
parser = argparse.ArgumentParser(
description="Check updates of the requirements"
)
parser.add_argument(
'--path',
type=Path,
help="Path to the project",
default=Path('.'),
dest='path'
)
args = parser.parse_args()
packages = asyncio.run(bound(args.path))
key = lambda item: item.last_version != item.required_version
packages.sort(key=key)
for has_update, packages_ in groupby(packages, key=key):
if has_update:
print("Packages with updates: ")
else:
print("Packages without updates: ")
for num, package in enumerate(packages_, 1):
print(f"{num}.\n{package}", end='\n-------------\n')
print()
if __name__ == "__main__":
main()
| 26.451128 | 69 | 0.610006 |
import argparse
import asyncio
import re
import sys
from itertools import groupby
from pathlib import Path
from typing import Optional, NamedTuple, AsyncIterable
import aiofiles
import aiohttp
PYPI_URL = "https://pypi.org/pypi/{package_name}/json"
class Package(NamedTuple):
name: str
required_version: str
last_version: Optional[str] = None
def __str__(self) -> str:
return f"Package: '{self.name}'\n" \
f"Required version: {self.required_version}\n" \
f"Last version: {self.last_version}"
async def get_last_version(session: aiohttp.ClientSession,
package_name: str) -> Optional[str]:
url = PYPI_URL.format(package_name=package_name)
try:
resp = await session.get(url)
except Exception as e:
print(f"{e.__class__.__name__}({e!r})", file=sys.stderr)
return
if resp.status == 200:
json = await resp.json()
resp.close()
return json['info']['version']
resp.close()
async def worker(args: asyncio.Queue,
results: asyncio.Queue,
session: aiohttp.ClientSession) -> None:
while True:
package = await args.get()
last_version = await get_last_version(session, package.name)
package = Package(
name=package.name,
required_version=package.required_version,
last_version=last_version
)
await results.put(package)
args.task_done()
async def get_packages(project_path: Path) -> AsyncIterable[Package]:
requirements_path = project_path / 'requirements.txt'
if not requirements_path.exists():
print("Requirements file not found", file=sys.stderr)
return
pattern = re.compile(r'([^<>= ]+)[<>= ]{2,4}(.+)')
async with aiofiles.open(requirements_path) as r:
async for requirement in r:
name, version = pattern.search(requirement).groups()
yield Package(name=name, required_version=version)
async def bound(project_path: Path) -> list[Package]:
timeout = aiohttp.ClientTimeout(60)
args = asyncio.Queue(maxsize=-1)
results = asyncio.Queue(maxsize=-1)
async with aiohttp.ClientSession(timeout=timeout) as ses:
async for package in get_packages(project_path):
await args.put(package)
tasks = []
for _ in range(5):
task = asyncio.create_task(worker(args, results, ses))
tasks += [task]
await args.join()
for task in tasks:
task.cancel()
return [
results.get_nowait()
for _ in range(results.qsize())
]
def main() -> None:
parser = argparse.ArgumentParser(
description="Check updates of the requirements"
)
parser.add_argument(
'--path',
type=Path,
help="Path to the project",
default=Path('.'),
dest='path'
)
args = parser.parse_args()
packages = asyncio.run(bound(args.path))
key = lambda item: item.last_version != item.required_version
packages.sort(key=key)
for has_update, packages_ in groupby(packages, key=key):
if has_update:
print("Packages with updates: ")
else:
print("Packages without updates: ")
for num, package in enumerate(packages_, 1):
print(f"{num}.\n{package}", end='\n-------------\n')
print()
if __name__ == "__main__":
main()
| true | true |
f71bd6747f7fe25f8bd69690c6d90ecfa7797cb2 | 27,045 | py | Python | amos/observe/traits.py | WithPrecedent/amos | 35b2f5b8d493eac946b583dfcd9d0553e7565292 | [
"Apache-2.0"
] | null | null | null | amos/observe/traits.py | WithPrecedent/amos | 35b2f5b8d493eac946b583dfcd9d0553e7565292 | [
"Apache-2.0"
] | null | null | null | amos/observe/traits.py | WithPrecedent/amos | 35b2f5b8d493eac946b583dfcd9d0553e7565292 | [
"Apache-2.0"
] | null | null | null | """
traits: tools for examining classes, instances, and other python objects
Corey Rayburn Yung <coreyrayburnyung@gmail.com>
Copyright 2021, Corey Rayburn Yung
License: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contents:
contains
contains_dict
contains_list
contains_set
contains_tuple
parallel_contains
serial_contains
get_annotations
get_attributes
get_contents
get_contents_types
get_methods
get_name
get_properties
get_signatures
get_types
get_variables
has_attributes
has_methods
has_properties
has_signatures
has_traits
has_types
has_types_dict
has_types_list
has_types_sequence
is_class_attribute
is_container
is_function
is_iterable
is_method
is_nested
is_sequence
name_attributes
name_methods
name_parameters
name_properties
name_variables
ToDo:
Adding parsing functionlity to signature-related functions to find
equivalence when one signature has subtypes of the other signature
(e.g., one type annotation is 'dict' and the other is 'MutableMapping').
It might be necessary to create a separate Signature-like class to
implement this functionality. This includes fixing or abandoning
'has_annotations' due to issues matching type annotations.
Add support for nagata Kinds once that system is complete.
Add support for types (using type annotations) in the 'contains' function so
that 'contains' can be applied to classes and not just instances.
Add 'dispatcher' framework to 'contains' once the dispatcher framework is
completed in the 'bobbie' package and the Kind system is completed in
the nagata package. This should replace existing usages of python's
singledispatch, which doesn't propertly deal with subtypes.
"""
from __future__ import annotations
from collections.abc import (
Container, Hashable, Iterable, Mapping, MutableSequence, Sequence, Set)
import functools
import inspect
import types
from typing import Any, Optional, Type, Union
from ..repair import modify
@functools.singledispatch
def contains(
item: object,
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
"""Returns whether 'item' contains the type(s) in 'contents'.
Args:
item (object): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
raise TypeError(f'item {item} is not supported by {__name__}')
@contains.register(Mapping)
def dict_contains(
item: Mapping[Hashable, Any],
contents: tuple[Union[Type[Any], tuple[Type[Any], ...]],
Union[Type[Any], tuple[Type[Any], ...]]]) -> bool:
"""Returns whether dict 'item' contains the type(s) in 'contents'.
Args:
item (Mapping[Hashable, Any]): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
return (
serial_contains(item = item.keys(), contents = contents[0])
and serial_contains(item = item.values(), contents = contents[1]))
@contains.register(MutableSequence)
def list_contains(
item: MutableSequence[Any],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
"""Returns whether list 'item' contains the type(s) in 'contents'.
Args:
item (MutableSequence[Any]): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
return serial_contains(item = item, contents = contents)
@contains.register(Set)
def set_contains(
item: Set[Any],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
"""Returns whether list 'item' contains the type(s) in 'contents'.
Args:
item (Set[Any]): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
return serial_contains(item = item, contents = contents)
@contains.register(tuple)
def tuple_contains(
item: tuple[Any, ...],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
"""Returns whether tuple 'item' contains the type(s) in 'contents'.
Args:
item (tuple[Any, ...]): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
if isinstance(contents, tuple) and len(item) == len(contents):
technique = parallel_contains
else:
technique = serial_contains
return technique(item = item, contents = contents)
@contains.register(Sequence)
def parallel_contains(
item: Sequence[Any],
contents: tuple[Type[Any], ...]) -> bool:
"""Returns whether parallel 'item' contains the type(s) in 'contents'.
Args:
item (Sequence[Any]): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
return all(isinstance(item[i], contents[i]) for i in enumerate(item))
@contains.register(Container)
def serial_contains(
item: Container[Any],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
"""Returns whether serial 'item' contains the type(s) in 'contents'.
Args:
item (Collection[Any]): item to examine.
contents (Union[Type[Any], tuple[Type[Any], ...]]): types to check for
in 'item' contents.
Returns:
bool: whether 'item' holds the types in 'contents'.
"""
return all(isinstance(i, contents) for i in item)
def get_annotations(
item: object,
include_private: bool = False) -> dict[str, Type[Any]]:
"""Returns dict of attributes of 'item' with type annotations.
Args:
item (object): instance to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
dict[str, Any]: dict of attributes in 'item' (keys are attribute names
and values are type annotations) that are type annotated.
"""
annotations = item.__annotations__
if include_private:
return annotations
else:
return {k: v for k, v in annotations.items() if not k.startswith('_')}
def get_attributes(
item: object,
include_private: bool = False) -> dict[str, Any]:
"""Returns dict of attributes of 'item'.
Args:
item (Any): item to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
dict[str, Any]: dict of attributes in 'item' (keys are attribute names
and values are attribute values).
"""
attributes = name_attributes(item = item, include_private = include_private)
values = [getattr(item, m) for m in attributes]
return dict(zip(attributes, values))
def get_methods(
item: Union[object, Type[Any]],
include_private: bool = False) -> dict[str, types.MethodType]:
"""Returns dict of methods of 'item'.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
dict[str, types.MethodType]: dict of methods in 'item' (keys are method
names and values are methods).
"""
methods = name_methods(item = item, include_private = include_private)
return [getattr(item, m) for m in methods]
def get_name(item: Any, default: Optional[str] = None) -> Optional[str]:
"""Returns str name representation of 'item'.
Args:
item (Any): item to determine a str name.
default(Optional[str]): default name to return if other methods at name
creation fail.
Returns:
str: a name representation of 'item.'
"""
if isinstance(item, str):
return item
elif (
hasattr(item, 'name')
and not inspect.isclass(item)
and isinstance(item.name, str)):
return item.name
else:
try:
return modify.snakify(item.__name__)
except AttributeError:
if item.__class__.__name__ is not None:
return modify.snakify(item.__class__.__name__)
else:
return default
def get_properties(
item: object,
include_private: bool = False) -> dict[str, Any]:
"""Returns properties of 'item'.
Args:
item (object): instance to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
dict[str, Any]: dict of properties in 'item' (keys are property names
and values are property values).
"""
properties = name_properties(item = item, include_private = include_private)
values = [getattr(item, p) for p in properties]
return dict(zip(properties, values))
def get_signatures(
item: Union[object, Type[Any]],
include_private: bool = False) -> dict[str, inspect.Signature]:
"""Returns dict of method signatures of 'item'.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
dict[str, inspect.Signature]: dict of method signatures in 'item' (keys
are method names and values are method signatures).
"""
methods = name_methods(item = item, include_private = include_private)
signatures = [inspect.signature(getattr(item, m)) for m in methods]
return dict(zip(methods, signatures))
def get_variables(
item: object,
include_private: bool = False) -> dict[str, Any]:
"""Returns dict of attributes of 'item' that are not methods or properties.
Args:
item (object): instance to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
dict[str, Any]: dict of attributes in 'item' (keys are attribute names
and values are attribute values) that are not methods or properties.
"""
attributes = name_attributes(item = item, include_private = include_private)
methods = name_methods(item = item, include_private = include_private)
properties = name_properties(item = item, include_private = include_private)
variables = [
a for a in attributes if a not in methods and a not in properties]
values = [getattr(item, m) for m in variables]
return dict(zip(variables, values))
def has_attributes(
item: Union[object, Type[Any]],
attributes: MutableSequence[str]) -> bool:
"""Returns whether 'attributes' exist in 'item'.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
attributes (MutableSequence[str]): names of attributes to check to see
if they exist in 'item'.
Returns:
bool: whether all 'attributes' exist in 'items'.
"""
return all(hasattr(item, a) for a in attributes)
def has_methods(
item: Union[object, Type[Any]],
methods: Union[str, MutableSequence[str]]) -> bool:
"""Returns whether 'item' has 'methods' which are methods.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
methods (Union[str, MutableSequence[str]]): name(s) of methods to check
to see if they exist in 'item' and are types.MethodType.
Returns:
bool: whether all 'methods' exist in 'items' and are types.MethodType.
"""
methods = list(convert.iterify(methods))
return all(is_method(item = item, attribute = m) for m in methods)
def has_properties(
item: Union[object, Type[Any]],
properties: Union[str, MutableSequence[str]]) -> bool:
"""Returns whether 'item' has 'properties' which are properties.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
properties (MutableSequence[str]): names of properties to check to see
if they exist in 'item' and are property type.
Returns:
bool: whether all 'properties' exist in 'items'.
"""
properties = list(convert.iterify(properties))
return all(is_property(item = item, attribute = p) for p in properties)
def has_signatures(
item: Union[object, Type[Any]],
signatures: Mapping[str, inspect.Signature]) -> bool:
"""Returns whether 'item' has 'signatures' of its methods.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
signatures (Mapping[str, inspect.Signature]): keys are the names of
methods and values are the corresponding method signatures.
Returns:
bool: whether all 'signatures' exist in 'items'.
"""
item_signatures = get_signatures(item = item, include_private = True)
pass_test = True
for name, parameters in signatures.items():
if (name not in item_signatures or item_signatures[name] != parameters):
pass_test = False
return pass_test
def has_traits(
item: Union[object, Type[Any]],
attributes: Optional[MutableSequence[str]] = None,
methods: Optional[MutableSequence[str]] = None,
properties: Optional[MutableSequence[str]] = None,
signatures: Optional[Mapping[str, inspect.Signature]] = None) -> bool:
"""Returns if 'item' has 'attributes', 'methods' and 'properties'.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
attributes (MutableSequence[str]): names of attributes to check to see
if they exist in 'item'.
methods (MutableSequence[str]): name(s) of methods to check to see if
they exist in 'item' and are types.MethodType.
properties (MutableSequence[str]): names of properties to check to see
if they exist in 'item' and are property type.
signatures (Mapping[str, inspect.Signature]): keys are the names of
methods and values are the corresponding method signatures.
Returns:
bool: whether all passed arguments exist in 'items'.
"""
if not inspect.isclass(item):
item = item.__class__
attributes = attributes or []
methods = methods or []
properties = properties or []
signatures = signatures or {}
return (
has_attributes(item = item, attributes = attributes)
and has_methods(item = item, methods = methods)
and has_properties(item = item, properties = properties)
and has_signatures(item = item, signatures = signatures))
@functools.singledispatch
def has_types(item: object) -> Optional[Union[
tuple[Type[Any], ...],
tuple[tuple[Type[Any], ...], tuple[Type[Any], ...]]]]:
"""Returns types contained in 'item'.
Args:
item (object): item to examine.
Returns:
Optional[Union[tuple[Type[Any], ...], tuple[tuple[Type[Any], ...],
tuple[Type[Any], ...]]]]:: returns the types of things contained
in 'item'. Returns None if 'item' is not a container.
"""
raise TypeError(f'item {item} is not supported by {__name__}')
@has_types.register(Mapping)
def has_types_dict(
item: Mapping[Hashable, Any]) -> Optional[
tuple[tuple[Type[Any], ...], tuple[Type[Any], ...]]]:
"""Returns types contained in 'item'.
Args:
item (object): item to examine.
Returns:
Optional[tuple[Type[Any], ...]]: returns the types of things contained
in 'item'. Returns None if 'item' is not a container.
"""
if isinstance(item, Mapping):
key_types = has_types_sequence(item = item.keys())
value_types = has_types_sequence(item = item.values())
return tuple([key_types, value_types])
else:
return None
@has_types.register(MutableSequence)
def has_types_list(item: list[Any]) -> Optional[tuple[Type[Any], ...]]:
"""Returns types contained in 'item'.
Args:
item (list[Any]): item to examine.
Returns:
Optional[tuple[Type[Any], ...]]: returns the types of things contained
in 'item'. Returns None if 'item' is not a container.
"""
if isinstance(item, list):
key_types = has_types_sequence(item = item.keys())
value_types = has_types_sequence(item = item.values())
return tuple([key_types, value_types])
else:
return None
@has_types.register(Sequence)
def has_types_sequence(item: Sequence[Any]) -> Optional[tuple[Type[Any], ...]]:
"""Returns types contained in 'item'.
Args:
item (Sequence[Any]): item to examine.
Returns:
Optional[tuple[Type[Any], ...]]: returns the types of things contained
in 'item'. Returns None if 'item' is not a container.
"""
if isinstance(item, Sequence):
all_types = []
for thing in item:
kind = type(thing)
if not kind in all_types:
all_types.append(kind)
return tuple(all_types)
else:
return None
def is_class_attribute(item: Union[object, Type[Any]], attribute: str) -> bool:
"""Returns if 'attribute' is a class attribute of 'item'."""
if not inspect.isclass(item):
item = item.__class__
return (
hasattr(item, attribute)
and not is_method(item = item, attribute = attribute)
and not is_property(item = item, attribute = attribute))
def is_container(item: Union[object, Type[Any]]) -> bool:
"""Returns if 'item' is a container and not a str.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
Returns:
bool: if 'item' is a container but not a str.
"""
if not inspect.isclass(item):
item = item.__class__
return issubclass(item, Container) and not issubclass(item, str)
def is_function(item: Union[object, Type[Any]], attribute: Any) -> bool:
"""Returns if 'attribute' is a function of 'item'."""
if isinstance(attribute, str):
try:
attribute = getattr(item, attribute)
except AttributeError:
return False
return isinstance(attribute, types.FunctionType)
def is_iterable(item: Union[object, Type[Any]]) -> bool:
"""Returns if 'item' is iterable and is NOT a str type.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
Returns:
bool: if 'item' is iterable but not a str.
"""
if not inspect.isclass(item):
item = item.__class__
return issubclass(item, Iterable) and not issubclass(item, str)
def is_method(item: Union[object, Type[Any]], attribute: Any) -> bool:
"""Returns if 'attribute' is a method of 'item'."""
if isinstance(attribute, str):
try:
attribute = getattr(item, attribute)
except AttributeError:
return False
return inspect.ismethod(attribute)
def is_nested(item: Mapping[Any, Any]) -> bool:
"""Returns if 'item' is nested at least one-level.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
Returns:
bool: if 'item' is a nested mapping.
"""
return (
isinstance(item, Mapping)
and any(isinstance(v, Mapping) for v in item.values()))
def is_property(item: Union[object, Type[Any]], attribute: Any) -> bool:
"""Returns if 'attribute' is a property of 'item'."""
if not inspect.isclass(item):
item = item.__class__
if isinstance(attribute, str):
try:
attribute = getattr(item, attribute)
except AttributeError:
return False
return isinstance(attribute, property)
def is_sequence(item: Union[object, Type[Any]]) -> bool:
"""Returns if 'item' is a sequence and is NOT a str type.
Args:
item (Union[object, Type[Any]]): class or instance to examine.
Returns:
bool: if 'item' is a sequence but not a str.
"""
if not inspect.isclass(item):
item = item.__class__
return issubclass(item, Sequence) and not issubclass(item, str)
def is_variable(item: Union[object, Type[Any]], attribute: str) -> bool:
"""Returns if 'attribute' is a simple data attribute of 'item'.
Args:
item (Union[object, Type[Any]]): [description]
attribute (str): [description]
Returns:
bool: [description]
"""
return (
hasattr(item, attribute)
and not is_function(item = item, attribute = attribute)
and not is_property(item = item, attribute = attribute))
def name_attributes(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
"""Returns attribute names of 'item'.
Args:
item (Union[object, Type[Any]]): item to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
list[str]: names of attributes in 'item'.
"""
names = dir(item)
if not include_private:
names = modify.drop_privates(item = names)
return names
def name_methods(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
"""Returns method names of 'item'.
Args:
item (Union[object, Type[Any]]): item to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
list[str]: names of methods in 'item'.
"""
methods = [
a for a in dir(item)
if is_method(item = item, attribute = a)]
if not include_private:
methods = modify.drop_privates(item = methods)
return methods
def name_parameters(item: Type[Any]) -> list[str]:
"""Returns list of parameters based on annotations of 'item'.
Args:
item (Type[Any]): class to get parameters to.
Returns:
list[str]: names of parameters in 'item'.
"""
return list(item.__annotations__.keys())
def name_properties(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
"""Returns method names of 'item'.
Args:
item (Union[object, Type[Any]]): item to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
list[str]: names of properties in 'item'.
"""
if not inspect.isclass(item):
item = item.__class__
properties = [
a for a in dir(item)
if is_property(item = item, attribute = a)]
if not include_private:
properties = modify.drop_privates(item = properties)
return properties
def name_variables(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
"""Returns variable names of 'item'.
Args:
item (Union[object, Type[Any]]): item to examine.
include_private (bool): whether to include items that begin with '_'
(True) or to exclude them (False). Defauls to False.
Returns:
list[str]: names of attributes in 'item' that are neither methods nor
properties.
"""
names = [a for a in dir(item) if is_variable(item = item, attribute = a)]
if not include_private:
names = modify.drop_privates(item = names)
return names
# def has_annotations(
# item: Union[object, Type[Any]],
# attributes: Mapping[str, Type[Any]]) -> bool:
# """Returns whether 'attributes' exist in 'item' and are the right type.
# Args:
# item (Union[object, Type[Any]]): class or instance to examine.
# attributes (dict[str, Type[Any]]): dict where keys are the attribute
# names and values are the expected types of whose named attributes.
# Returns
# bool: whether all of the 'attributes' exist in 'item' and are of the
# proper type.
# """
# matched = True
# if inspect.isclass(item):
# for attribute, value in attributes.items():
# if value is not None:
# try:
# testing = getattr(item, attribute)
# testing = item.__annotations__[testing]
# except AttributeError:
# return False
# try:
# if not issubclass(testing, value):
# return False
# except TypeError:
# pass
# else:
# for attribute, value in attributes.items():
# if value is not None:
# try:
# testing = getattr(item, attribute)
# except AttributeError:
# return False
# try:
# if not isinstance(testing, value):
# return False
# except TypeError:
# pass
# return matched
| 34.147727 | 80 | 0.614346 | from __future__ import annotations
from collections.abc import (
Container, Hashable, Iterable, Mapping, MutableSequence, Sequence, Set)
import functools
import inspect
import types
from typing import Any, Optional, Type, Union
from ..repair import modify
@functools.singledispatch
def contains(
item: object,
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
raise TypeError(f'item {item} is not supported by {__name__}')
@contains.register(Mapping)
def dict_contains(
item: Mapping[Hashable, Any],
contents: tuple[Union[Type[Any], tuple[Type[Any], ...]],
Union[Type[Any], tuple[Type[Any], ...]]]) -> bool:
return (
serial_contains(item = item.keys(), contents = contents[0])
and serial_contains(item = item.values(), contents = contents[1]))
@contains.register(MutableSequence)
def list_contains(
item: MutableSequence[Any],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
return serial_contains(item = item, contents = contents)
@contains.register(Set)
def set_contains(
item: Set[Any],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
return serial_contains(item = item, contents = contents)
@contains.register(tuple)
def tuple_contains(
item: tuple[Any, ...],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
if isinstance(contents, tuple) and len(item) == len(contents):
technique = parallel_contains
else:
technique = serial_contains
return technique(item = item, contents = contents)
@contains.register(Sequence)
def parallel_contains(
item: Sequence[Any],
contents: tuple[Type[Any], ...]) -> bool:
return all(isinstance(item[i], contents[i]) for i in enumerate(item))
@contains.register(Container)
def serial_contains(
item: Container[Any],
contents: Union[Type[Any], tuple[Type[Any], ...]]) -> bool:
return all(isinstance(i, contents) for i in item)
def get_annotations(
item: object,
include_private: bool = False) -> dict[str, Type[Any]]:
annotations = item.__annotations__
if include_private:
return annotations
else:
return {k: v for k, v in annotations.items() if not k.startswith('_')}
def get_attributes(
item: object,
include_private: bool = False) -> dict[str, Any]:
attributes = name_attributes(item = item, include_private = include_private)
values = [getattr(item, m) for m in attributes]
return dict(zip(attributes, values))
def get_methods(
item: Union[object, Type[Any]],
include_private: bool = False) -> dict[str, types.MethodType]:
methods = name_methods(item = item, include_private = include_private)
return [getattr(item, m) for m in methods]
def get_name(item: Any, default: Optional[str] = None) -> Optional[str]:
if isinstance(item, str):
return item
elif (
hasattr(item, 'name')
and not inspect.isclass(item)
and isinstance(item.name, str)):
return item.name
else:
try:
return modify.snakify(item.__name__)
except AttributeError:
if item.__class__.__name__ is not None:
return modify.snakify(item.__class__.__name__)
else:
return default
def get_properties(
item: object,
include_private: bool = False) -> dict[str, Any]:
properties = name_properties(item = item, include_private = include_private)
values = [getattr(item, p) for p in properties]
return dict(zip(properties, values))
def get_signatures(
item: Union[object, Type[Any]],
include_private: bool = False) -> dict[str, inspect.Signature]:
methods = name_methods(item = item, include_private = include_private)
signatures = [inspect.signature(getattr(item, m)) for m in methods]
return dict(zip(methods, signatures))
def get_variables(
item: object,
include_private: bool = False) -> dict[str, Any]:
attributes = name_attributes(item = item, include_private = include_private)
methods = name_methods(item = item, include_private = include_private)
properties = name_properties(item = item, include_private = include_private)
variables = [
a for a in attributes if a not in methods and a not in properties]
values = [getattr(item, m) for m in variables]
return dict(zip(variables, values))
def has_attributes(
item: Union[object, Type[Any]],
attributes: MutableSequence[str]) -> bool:
return all(hasattr(item, a) for a in attributes)
def has_methods(
item: Union[object, Type[Any]],
methods: Union[str, MutableSequence[str]]) -> bool:
methods = list(convert.iterify(methods))
return all(is_method(item = item, attribute = m) for m in methods)
def has_properties(
item: Union[object, Type[Any]],
properties: Union[str, MutableSequence[str]]) -> bool:
properties = list(convert.iterify(properties))
return all(is_property(item = item, attribute = p) for p in properties)
def has_signatures(
item: Union[object, Type[Any]],
signatures: Mapping[str, inspect.Signature]) -> bool:
item_signatures = get_signatures(item = item, include_private = True)
pass_test = True
for name, parameters in signatures.items():
if (name not in item_signatures or item_signatures[name] != parameters):
pass_test = False
return pass_test
def has_traits(
item: Union[object, Type[Any]],
attributes: Optional[MutableSequence[str]] = None,
methods: Optional[MutableSequence[str]] = None,
properties: Optional[MutableSequence[str]] = None,
signatures: Optional[Mapping[str, inspect.Signature]] = None) -> bool:
if not inspect.isclass(item):
item = item.__class__
attributes = attributes or []
methods = methods or []
properties = properties or []
signatures = signatures or {}
return (
has_attributes(item = item, attributes = attributes)
and has_methods(item = item, methods = methods)
and has_properties(item = item, properties = properties)
and has_signatures(item = item, signatures = signatures))
@functools.singledispatch
def has_types(item: object) -> Optional[Union[
tuple[Type[Any], ...],
tuple[tuple[Type[Any], ...], tuple[Type[Any], ...]]]]:
raise TypeError(f'item {item} is not supported by {__name__}')
@has_types.register(Mapping)
def has_types_dict(
item: Mapping[Hashable, Any]) -> Optional[
tuple[tuple[Type[Any], ...], tuple[Type[Any], ...]]]:
if isinstance(item, Mapping):
key_types = has_types_sequence(item = item.keys())
value_types = has_types_sequence(item = item.values())
return tuple([key_types, value_types])
else:
return None
@has_types.register(MutableSequence)
def has_types_list(item: list[Any]) -> Optional[tuple[Type[Any], ...]]:
if isinstance(item, list):
key_types = has_types_sequence(item = item.keys())
value_types = has_types_sequence(item = item.values())
return tuple([key_types, value_types])
else:
return None
@has_types.register(Sequence)
def has_types_sequence(item: Sequence[Any]) -> Optional[tuple[Type[Any], ...]]:
if isinstance(item, Sequence):
all_types = []
for thing in item:
kind = type(thing)
if not kind in all_types:
all_types.append(kind)
return tuple(all_types)
else:
return None
def is_class_attribute(item: Union[object, Type[Any]], attribute: str) -> bool:
if not inspect.isclass(item):
item = item.__class__
return (
hasattr(item, attribute)
and not is_method(item = item, attribute = attribute)
and not is_property(item = item, attribute = attribute))
def is_container(item: Union[object, Type[Any]]) -> bool:
if not inspect.isclass(item):
item = item.__class__
return issubclass(item, Container) and not issubclass(item, str)
def is_function(item: Union[object, Type[Any]], attribute: Any) -> bool:
if isinstance(attribute, str):
try:
attribute = getattr(item, attribute)
except AttributeError:
return False
return isinstance(attribute, types.FunctionType)
def is_iterable(item: Union[object, Type[Any]]) -> bool:
if not inspect.isclass(item):
item = item.__class__
return issubclass(item, Iterable) and not issubclass(item, str)
def is_method(item: Union[object, Type[Any]], attribute: Any) -> bool:
if isinstance(attribute, str):
try:
attribute = getattr(item, attribute)
except AttributeError:
return False
return inspect.ismethod(attribute)
def is_nested(item: Mapping[Any, Any]) -> bool:
return (
isinstance(item, Mapping)
and any(isinstance(v, Mapping) for v in item.values()))
def is_property(item: Union[object, Type[Any]], attribute: Any) -> bool:
if not inspect.isclass(item):
item = item.__class__
if isinstance(attribute, str):
try:
attribute = getattr(item, attribute)
except AttributeError:
return False
return isinstance(attribute, property)
def is_sequence(item: Union[object, Type[Any]]) -> bool:
if not inspect.isclass(item):
item = item.__class__
return issubclass(item, Sequence) and not issubclass(item, str)
def is_variable(item: Union[object, Type[Any]], attribute: str) -> bool:
return (
hasattr(item, attribute)
and not is_function(item = item, attribute = attribute)
and not is_property(item = item, attribute = attribute))
def name_attributes(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
names = dir(item)
if not include_private:
names = modify.drop_privates(item = names)
return names
def name_methods(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
methods = [
a for a in dir(item)
if is_method(item = item, attribute = a)]
if not include_private:
methods = modify.drop_privates(item = methods)
return methods
def name_parameters(item: Type[Any]) -> list[str]:
return list(item.__annotations__.keys())
def name_properties(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
if not inspect.isclass(item):
item = item.__class__
properties = [
a for a in dir(item)
if is_property(item = item, attribute = a)]
if not include_private:
properties = modify.drop_privates(item = properties)
return properties
def name_variables(
item: Union[object, Type[Any]],
include_private: bool = False) -> list[str]:
names = [a for a in dir(item) if is_variable(item = item, attribute = a)]
if not include_private:
names = modify.drop_privates(item = names)
return names
# Args:
# item (Union[object, Type[Any]]): class or instance to examine.
# attributes (dict[str, Type[Any]]): dict where keys are the attribute
# names and values are the expected types of whose named attributes.
# Returns
# bool: whether all of the 'attributes' exist in 'item' and are of the
# proper type.
# """
| true | true |
f71bd6d48a0ed9d4e130a00c843f740473f02ef1 | 14,332 | py | Python | TaskList/Task.py | CaptainDesAstres/Blender-Render-Manager | 39082e7833383bbe7dd414381f1b295e3b778439 | [
"MIT"
] | 5 | 2015-07-22T03:02:17.000Z | 2018-10-11T10:07:42.000Z | TaskList/Task.py | CaptainDesAstres/Blender-Render-Manager | 39082e7833383bbe7dd414381f1b295e3b778439 | [
"MIT"
] | null | null | null | TaskList/Task.py | CaptainDesAstres/Blender-Render-Manager | 39082e7833383bbe7dd414381f1b295e3b778439 | [
"MIT"
] | 1 | 2018-10-11T10:07:43.000Z | 2018-10-11T10:07:43.000Z | #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage task settings'''
import xml.etree.ElementTree as xmlMod
import os, uuid, subprocess, shlex, time, datetime, threading
from save import *
from usefullFunctions import *
from Preferences.PresetList.Preset.Preset import *
from TaskList.FileInfo.FileInfo import *
from TaskList.TaskLog.TaskLog import *
class Task:
'''class to manage task settings'''
def __init__(self, path = None, scene = None, preset = None,\
fileInfo = None, xml= None):
'''initialize task object with default settings or saved settings'''
self.running = False
if xml is None:
self.defaultInit(path, scene, preset, fileInfo)
else:
self.fromXml(xml)
def defaultInit(self, path, scene, preset, fileInfo):
'''initialize Task object with default settings'''
self.path = path
self.scene = scene
self.preset = preset
self.info = fileInfo
self.uid = uuid.uuid4().hex
self.log = None
self.status = 'waiting'
# self.status possible values:
# waiting > the task have been set and is waiting to be run
# lock > the task is protected against running
# pendinglock> same thing for a task that already have been started
# ready > the task have been run once and task.log is set
# running > the task is running
# pause > the task have been started but is now waiting to be continued
# ended > the task have been totaly rendered
# erased > the task have been erased
def fromXml(self, xml):
'''initialize Task object with savedd settings'''
self.path = xml.get('path')
self.scene = xml.get('scene')
self.preset = xml.get('preset')
self.uid = xml.get('uid', uuid.uuid4().hex)
self.status = xml.get('status')
self.info = FileInfo(xml.find('fileInfo'))
node = xml.find('log')
if node is not None:
self.log = TaskLog(xml = node)
else:
self.log = None
def toXml(self):
'''export task settings into xml syntaxed string'''
xml = '<task path="'+XML.encode(self.path)+'" scene="'+XML.encode(self.scene)\
+'" preset="'+self.preset+'" uid="'+self.uid\
+'" status="'+self.status+'" >\n'\
+self.info.toXml()
if self.log is not None:
xml += self.log.toXml()
xml += '</task>\n'
return xml
def menu(self, log, index, tasks, preferences):
'''method to edit task settings'''
log.menuIn('Task n°'+str(index))
change = False
started = self.log is not None
if started:
menu = '''
Menu :
(TASK ALREADY STARTED : SOME OPTIONS IS NOT AVAILABLE!)
5- Change list row
6- Lock/Unlock task
7- Erase task
8- Copy task
9- See Rendering Log
0- Quit and save
'''
else:
menu = '''
Menu :
1- Change scene
2- Change preset
3- Edit preset
4- Active/desactive Renderlayer
5- Change list row
6- Lock/Unlock task
7- Erase task
8- Copy task
0- Quit and save
'''
while True:
log.print()
print('\n Edit Task n°'+str(index)+' :')
self.print()
print(menu)
choice= input('action : ').strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
elif choice == '1' and not started:
scene = self.info.sceneChoice(log, allChoice = False)
if scene is not None:
self.scene = scene[0]
log.write('Task n°'+str(index)+' : Scene set to «'+self.scene+'»')
change = True
elif choice == '2' and not started:
preset = Task.presetChoice(log, preferences)
if preset is not None :
self.preset = preset
log.write('Task n°'+str(index)+' : Preset set to «'+self.preset+'»')
change = True
elif choice == '3' and not started:
self.editPreset(log, preferences)
elif choice == '4' and not started:
confirm = self.info.scenes[self.scene].renderlayerActivator(log)
if confirm:
log.write('change task n°'+str(index)+' active renderlayer')
change = True
elif choice == '5':
confirm, select = tasks.move(log, [index])
if confirm:
change = True
index = select[0]
elif choice == '6':
if self.status in ['ready', 'pause']:
self.status = 'pendinglock'
change = True
log.write('Task n°'+str(index)+' locked')
elif self.status == 'waiting':
self.status = 'lock'
change = True
log.write('Task n°'+str(index)+' locked')
elif self.status == 'pendinglock':
self.status = 'pause'
change = True
log.write('Task n°'+str(index)+' unlocked')
elif self.status == 'lock':
self.status = 'waiting'
change = True
log.write('Task n°'+str(index)+' unlocked')
else:
log.error('Task n°'+str(index)+' is not lockable/unlockable')
elif choice == '7':
if tasks.remove(log, [index]):
log.menuOut()
log.write('Task n°'+str(index)+' removed')
return True
elif choice == '8':
new = self.copy()
new.status = 'waiting'
new.log = None
tasks.tasks.append(new)
log.write('a copy of the task n°'+str(index)+' have been added at the bottom of the task list')
change = True
elif choice == '9' and started:
self.log.menu(log, index)
else:
log.error('Unknow request!', False)
def menuArchive(self, log, index, tasks):
'''method to edit task settings'''
log.menuIn('Archived Task n°'+str(index))
change = False
while True:
log.print()
print('\n Task n°'+str(index)+' Log :')
self.print()
choice = input('''
Menu :
1- See Rendering Log
2- Copy Task In Rendering List
3- Erase Archived Task
0- Quit and save
action : ''').strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
elif choice == '1':
self.log.menu(log, index)
elif choice == '2':
new = self.copy()
new.status = 'waiting'
new.log = None
tasks.tasks.append(new)
log.write('A copy of the archived task n°'+str(index)+' have been added at the bottom of the pending task list.')
change = True
elif choice == '3':
conf = input('\n\nThe task gone be definitly erased. Confirm action (y) :').strip().lower()
if conf in ['y', 'yes']:
tasks.archive.pop(index)
log.write('The archived task n°'+str(index)+' have been erased.')
log.menuOut()
return True
else:
log.error('Unknow request!', False)
def print(self):
'''A method to print task information'''
print('\n\nStatus : '+self.status)
print('Path : '+self.path)
print('File Name : '+self.path.split('/').pop())
print('Scene : '+self.scene)
print('Preset : '+self.preset+'\n')
print('\033[4mActive Renderlayer :\033[0m')
self.info.scenes[self.scene].printActiveRenderlayer()
print('\n')
def renamePreset(self, old, new):
'''a method to rename used preset'''
if self.preset == old:
self.preset = new
def erasePreset(self, preset):
'''a method to stop using preset'''
if self.preset == preset:
self.preset = '[default]'
def getRow(self):
'''A method to get row to print task list'''
name = self.path.split('/').pop()
return columnLimit(' '+name, 25, 5)\
+columnLimit(' '+self.scene, 25, 5)\
+columnLimit(' '+self.preset, 25, 5)
def presetChoice(log, preferences):
'''A method to choose a preset'''
# preset choice
log.menuIn('Preset Choice')
log.print()
print('\n\n \033[4mPreset Choice :\033[0m\n\n')
confirm = input('Use «'+preferences.presets.default+'» default preset? (type anything else that y or yes to choose another one)')
if confirm in ['', 'y', 'yes']:
log.menuOut()
return '[default]'
else:
preset = preferences.presets.choose(log)
log.menuOut()
return preset
def editPreset(self, log, preferences):
'''A method to edit the preset used by the task'''
log.error('Warning : all change made to the preset will be effective for all task that use it…')
if self.preset == '[default]' :
name = preferences.presets.default
preset = preferences.presets.presets[name]
else:
name = self.preset
preset = preferences.presets.presets[name]
if type(preset) is Preset:
confirm = preset.menu(log, name, preferences.blenderVersion)
else:
confirm = preset.menu(log, name, preferences.presets)
if confirm:
savePreferences(preferences)
def copy(self):
xml = '<?xml version="1.0" encoding="UTF-8"?>\n'+self.toXml()
xml = xmlMod.fromstring(xml)
copy = Task(xml = xml)
copy.uid = uuid.uuid4().hex
return copy
def printRunMenu(self, index, count, log):
'''print current runninge state'''
log.print()
print('\n\nRun task n°'+str(index)+' of '+str(count)+' :\n\n')
if self.log is not None:
self.log.print()
log.runPrint()
def run(self, index, taskList, scriptPath, log, preferences):
'''A method to execute the task'''
log.menuIn('run Task '+str(index)+' from '+str(len(taskList.tasks)))
if self.log is None:
# task never have been run before
self.log = TaskLog(pref = preferences, task = self)
preferences.output.checkAndCreate(self, preferences, taskList)
self.printRunMenu(index, len(taskList.tasks), log)
metapreset = self.log.preset
if type(metapreset) is Preset:
if self.log.groups[0].remaining() > 0:
versions = { metapreset.engine.version : '[default]' }
else:
versions = {}
for group in self.log.groups:
if group.remaining() > 0:
if group.preset.engine.version in versions.keys():
versions[group.preset.engine.version].append(group.name)
else:
versions[group.preset.engine.version] = [group.name]
scripts = self.createTaskScript(scriptPath, preferences, versions, metapreset)
results = ''
for version in versions.keys():
try:
l = threading.Thread(target = self.socketAcceptClient,
args=(taskList, index, log))
l.start()
taskList.listenerThreads.append(l)
sub = subprocess.Popen(\
shlex.split(\
preferences.blenderVersion.getVersionPath(version)\
+' -b "'+self.path+'" -P "'\
+scripts[version]+'"'),\
stdout = subprocess.PIPE,\
stdin = subprocess.PIPE,\
stderr = subprocess.PIPE)
taskList.renderingSubprocess.append(sub)
result = sub.communicate()
taskList.renderingSubprocess.remove(sub)
results += result[0].decode()+result[1].decode()+'\n\n\n'
except FileNotFoundError:
log.write('\033[31mTask n°'+str(index)+' : Blender version call error! Try to verify the path of «'+version+'» blender version!\033[0m')
if taskList.runningMode in [taskList.UNTIL_GROUP_END,\
taskList.UNTIL_FRAME_END,\
taskList.STOP_NOW,\
taskList.STOP_FORCED]:
break
self.eraseTaskScript(scripts)
log.menuOut()
return True
def socketAcceptClient(self, taskList, index, log):
'''A method to manage client connexion when running'''
client = taskList.socket.accept()[0]
taskList.listenerSockets.append(
{
'socket':client,
'uid':self.uid
}
)
msg = ''
while taskList.runningMode < taskList.STOP_NOW:
msg += client.recv(1024).decode()
if msg == '':
time.sleep(1)
elif msg == self.uid+' VersionEnded EOS':
break
else:
msg = self.treatSocketMessage(msg, taskList, index, log)
client.close()
def treatSocketMessage(self, msg, taskList, index, log):
'''a method to interpret socket message'''
if msg[-4:] != ' EOS':
return msg
messages = msg.split(' EOS')
messages.pop()
for m in messages:
# normally, the message is to confirm the rendering of a frame, it must follow this sytaxe:
#uid action(group,frame,date,computingTime) EOS
#fc9b9d6fd2af4e0fb3f09066f9902f90 ConfirmFrame(groupe1,15,10:09:2014:10:30:40,11111111111111) EOS
uid = m[0:32]
action = m[33:m.find('(')]
info = m[46:-1]
if uid == self.uid and action == 'debugMsg':
log.write(info)
elif uid == self.uid and action == 'ConfirmFrame':
info = info.split(',')
group = info[0]
frame = int(info[1])
computingTime = float(info[3])
date = info[2].split(':')
date = datetime.datetime(
year = int(date[2]),
month = int(date[1]),
day = int(date[0]),
hour = int(date[3]),
minute = int(date[4]),
second = int(date[5])
)
self.log.getGroup(group).confirmFrame(frame, date, computingTime)
self.printRunMenu(index, len(taskList.tasks), log)
if messages[-1] == self.uid+' VersionEnded':
return messages[-1]+' EOS'
else:
return ''
def createTaskScript(self, scriptPath, preferences, versions, preset):
'''create a script for each blender versions to run tfhe task'''
start = '''#!/usr/bin/python3.4
# -*-coding:Utf-8 -*
''\'module to manage metapreset''\'
import sys
sys.path.append("'''+scriptPath+'''")
import xml.etree.ElementTree as xmlMod
from Preferences.Preferences import *
from Preferences.PresetList.Preset.Preset import *
from Preferences.PresetList.Preset.Metapreset import *
from TaskList.RenderingTask.RenderingTask import *
from TaskList.Task import *
preferences = Preferences( xml = xmlMod.fromstring(''\''''+preferences.toXml(False)+'''''\') )
task = Task( xml = xmlMod.fromstring(''\'<?xml version="1.0" encoding="UTF-8"?>\n'''+self.toXml()+'''''\'))
'''
end = '\nRenderingTask(task, preferences, groups)'
paths = {}
for v, g in versions.items():
script = start\
+'groups = ["'+('", "'.join(g) )+'"]\n'\
+end
paths[v] = scriptPath+'/TaskList/RenderingTask/TaskScripts/'+self.uid+'-'+v+'.py'
with open(paths[v],'w') as taskScriptFile:
taskScriptFile.write( script )
return paths
def eraseTaskScript(self, scripts):
'''erase Task Script files'''
for path in scripts.values():
os.remove(path)
def getUsefullGroup(self, groups, preferences):
'''return only usefull group from the list, excluding those who have no renderlayer in this task'''
renderlayers = self.info.scenes[self.scene].getActiveRenderlayers()
confirmed = []
for group in groups:
for RL in renderlayers:
if preferences.presets.renderlayers.groups[group].belongTo(RL.name):
confirmed.append(group)
break
return confirmed
| 25.321555 | 140 | 0.629431 |
import xml.etree.ElementTree as xmlMod
import os, uuid, subprocess, shlex, time, datetime, threading
from save import *
from usefullFunctions import *
from Preferences.PresetList.Preset.Preset import *
from TaskList.FileInfo.FileInfo import *
from TaskList.TaskLog.TaskLog import *
class Task:
def __init__(self, path = None, scene = None, preset = None,\
fileInfo = None, xml= None):
self.running = False
if xml is None:
self.defaultInit(path, scene, preset, fileInfo)
else:
self.fromXml(xml)
def defaultInit(self, path, scene, preset, fileInfo):
self.path = path
self.scene = scene
self.preset = preset
self.info = fileInfo
self.uid = uuid.uuid4().hex
self.log = None
self.status = 'waiting'
def fromXml(self, xml):
self.path = xml.get('path')
self.scene = xml.get('scene')
self.preset = xml.get('preset')
self.uid = xml.get('uid', uuid.uuid4().hex)
self.status = xml.get('status')
self.info = FileInfo(xml.find('fileInfo'))
node = xml.find('log')
if node is not None:
self.log = TaskLog(xml = node)
else:
self.log = None
def toXml(self):
xml = '<task path="'+XML.encode(self.path)+'" scene="'+XML.encode(self.scene)\
+'" preset="'+self.preset+'" uid="'+self.uid\
+'" status="'+self.status+'" >\n'\
+self.info.toXml()
if self.log is not None:
xml += self.log.toXml()
xml += '</task>\n'
return xml
def menu(self, log, index, tasks, preferences):
log.menuIn('Task n°'+str(index))
change = False
started = self.log is not None
if started:
menu = '''
Menu :
(TASK ALREADY STARTED : SOME OPTIONS IS NOT AVAILABLE!)
5- Change list row
6- Lock/Unlock task
7- Erase task
8- Copy task
9- See Rendering Log
0- Quit and save
'''
else:
menu = '''
Menu :
1- Change scene
2- Change preset
3- Edit preset
4- Active/desactive Renderlayer
5- Change list row
6- Lock/Unlock task
7- Erase task
8- Copy task
0- Quit and save
'''
while True:
log.print()
print('\n Edit Task n°'+str(index)+' :')
self.print()
print(menu)
choice= input('action : ').strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
elif choice == '1' and not started:
scene = self.info.sceneChoice(log, allChoice = False)
if scene is not None:
self.scene = scene[0]
log.write('Task n°'+str(index)+' : Scene set to «'+self.scene+'»')
change = True
elif choice == '2' and not started:
preset = Task.presetChoice(log, preferences)
if preset is not None :
self.preset = preset
log.write('Task n°'+str(index)+' : Preset set to «'+self.preset+'»')
change = True
elif choice == '3' and not started:
self.editPreset(log, preferences)
elif choice == '4' and not started:
confirm = self.info.scenes[self.scene].renderlayerActivator(log)
if confirm:
log.write('change task n°'+str(index)+' active renderlayer')
change = True
elif choice == '5':
confirm, select = tasks.move(log, [index])
if confirm:
change = True
index = select[0]
elif choice == '6':
if self.status in ['ready', 'pause']:
self.status = 'pendinglock'
change = True
log.write('Task n°'+str(index)+' locked')
elif self.status == 'waiting':
self.status = 'lock'
change = True
log.write('Task n°'+str(index)+' locked')
elif self.status == 'pendinglock':
self.status = 'pause'
change = True
log.write('Task n°'+str(index)+' unlocked')
elif self.status == 'lock':
self.status = 'waiting'
change = True
log.write('Task n°'+str(index)+' unlocked')
else:
log.error('Task n°'+str(index)+' is not lockable/unlockable')
elif choice == '7':
if tasks.remove(log, [index]):
log.menuOut()
log.write('Task n°'+str(index)+' removed')
return True
elif choice == '8':
new = self.copy()
new.status = 'waiting'
new.log = None
tasks.tasks.append(new)
log.write('a copy of the task n°'+str(index)+' have been added at the bottom of the task list')
change = True
elif choice == '9' and started:
self.log.menu(log, index)
else:
log.error('Unknow request!', False)
def menuArchive(self, log, index, tasks):
log.menuIn('Archived Task n°'+str(index))
change = False
while True:
log.print()
print('\n Task n°'+str(index)+' Log :')
self.print()
choice = input('''
Menu :
1- See Rendering Log
2- Copy Task In Rendering List
3- Erase Archived Task
0- Quit and save
action : ''').strip().lower()
if choice in ['0', 'q', 'quit', 'cancel']:
log.menuOut()
return change
elif choice == '1':
self.log.menu(log, index)
elif choice == '2':
new = self.copy()
new.status = 'waiting'
new.log = None
tasks.tasks.append(new)
log.write('A copy of the archived task n°'+str(index)+' have been added at the bottom of the pending task list.')
change = True
elif choice == '3':
conf = input('\n\nThe task gone be definitly erased. Confirm action (y) :').strip().lower()
if conf in ['y', 'yes']:
tasks.archive.pop(index)
log.write('The archived task n°'+str(index)+' have been erased.')
log.menuOut()
return True
else:
log.error('Unknow request!', False)
def print(self):
print('\n\nStatus : '+self.status)
print('Path : '+self.path)
print('File Name : '+self.path.split('/').pop())
print('Scene : '+self.scene)
print('Preset : '+self.preset+'\n')
print('\033[4mActive Renderlayer :\033[0m')
self.info.scenes[self.scene].printActiveRenderlayer()
print('\n')
def renamePreset(self, old, new):
if self.preset == old:
self.preset = new
def erasePreset(self, preset):
if self.preset == preset:
self.preset = '[default]'
def getRow(self):
name = self.path.split('/').pop()
return columnLimit(' '+name, 25, 5)\
+columnLimit(' '+self.scene, 25, 5)\
+columnLimit(' '+self.preset, 25, 5)
def presetChoice(log, preferences):
log.menuIn('Preset Choice')
log.print()
print('\n\n \033[4mPreset Choice :\033[0m\n\n')
confirm = input('Use «'+preferences.presets.default+'» default preset? (type anything else that y or yes to choose another one)')
if confirm in ['', 'y', 'yes']:
log.menuOut()
return '[default]'
else:
preset = preferences.presets.choose(log)
log.menuOut()
return preset
def editPreset(self, log, preferences):
log.error('Warning : all change made to the preset will be effective for all task that use it…')
if self.preset == '[default]' :
name = preferences.presets.default
preset = preferences.presets.presets[name]
else:
name = self.preset
preset = preferences.presets.presets[name]
if type(preset) is Preset:
confirm = preset.menu(log, name, preferences.blenderVersion)
else:
confirm = preset.menu(log, name, preferences.presets)
if confirm:
savePreferences(preferences)
def copy(self):
xml = '<?xml version="1.0" encoding="UTF-8"?>\n'+self.toXml()
xml = xmlMod.fromstring(xml)
copy = Task(xml = xml)
copy.uid = uuid.uuid4().hex
return copy
def printRunMenu(self, index, count, log):
log.print()
print('\n\nRun task n°'+str(index)+' of '+str(count)+' :\n\n')
if self.log is not None:
self.log.print()
log.runPrint()
def run(self, index, taskList, scriptPath, log, preferences):
log.menuIn('run Task '+str(index)+' from '+str(len(taskList.tasks)))
if self.log is None:
self.log = TaskLog(pref = preferences, task = self)
preferences.output.checkAndCreate(self, preferences, taskList)
self.printRunMenu(index, len(taskList.tasks), log)
metapreset = self.log.preset
if type(metapreset) is Preset:
if self.log.groups[0].remaining() > 0:
versions = { metapreset.engine.version : '[default]' }
else:
versions = {}
for group in self.log.groups:
if group.remaining() > 0:
if group.preset.engine.version in versions.keys():
versions[group.preset.engine.version].append(group.name)
else:
versions[group.preset.engine.version] = [group.name]
scripts = self.createTaskScript(scriptPath, preferences, versions, metapreset)
results = ''
for version in versions.keys():
try:
l = threading.Thread(target = self.socketAcceptClient,
args=(taskList, index, log))
l.start()
taskList.listenerThreads.append(l)
sub = subprocess.Popen(\
shlex.split(\
preferences.blenderVersion.getVersionPath(version)\
+' -b "'+self.path+'" -P "'\
+scripts[version]+'"'),\
stdout = subprocess.PIPE,\
stdin = subprocess.PIPE,\
stderr = subprocess.PIPE)
taskList.renderingSubprocess.append(sub)
result = sub.communicate()
taskList.renderingSubprocess.remove(sub)
results += result[0].decode()+result[1].decode()+'\n\n\n'
except FileNotFoundError:
log.write('\033[31mTask n°'+str(index)+' : Blender version call error! Try to verify the path of «'+version+'» blender version!\033[0m')
if taskList.runningMode in [taskList.UNTIL_GROUP_END,\
taskList.UNTIL_FRAME_END,\
taskList.STOP_NOW,\
taskList.STOP_FORCED]:
break
self.eraseTaskScript(scripts)
log.menuOut()
return True
def socketAcceptClient(self, taskList, index, log):
client = taskList.socket.accept()[0]
taskList.listenerSockets.append(
{
'socket':client,
'uid':self.uid
}
)
msg = ''
while taskList.runningMode < taskList.STOP_NOW:
msg += client.recv(1024).decode()
if msg == '':
time.sleep(1)
elif msg == self.uid+' VersionEnded EOS':
break
else:
msg = self.treatSocketMessage(msg, taskList, index, log)
client.close()
def treatSocketMessage(self, msg, taskList, index, log):
if msg[-4:] != ' EOS':
return msg
messages = msg.split(' EOS')
messages.pop()
for m in messages:
uid = m[0:32]
action = m[33:m.find('(')]
info = m[46:-1]
if uid == self.uid and action == 'debugMsg':
log.write(info)
elif uid == self.uid and action == 'ConfirmFrame':
info = info.split(',')
group = info[0]
frame = int(info[1])
computingTime = float(info[3])
date = info[2].split(':')
date = datetime.datetime(
year = int(date[2]),
month = int(date[1]),
day = int(date[0]),
hour = int(date[3]),
minute = int(date[4]),
second = int(date[5])
)
self.log.getGroup(group).confirmFrame(frame, date, computingTime)
self.printRunMenu(index, len(taskList.tasks), log)
if messages[-1] == self.uid+' VersionEnded':
return messages[-1]+' EOS'
else:
return ''
def createTaskScript(self, scriptPath, preferences, versions, preset):
start = '''#!/usr/bin/python3.4
# -*-coding:Utf-8 -*
''\'module to manage metapreset''\'
import sys
sys.path.append("'''+scriptPath+'''")
import xml.etree.ElementTree as xmlMod
from Preferences.Preferences import *
from Preferences.PresetList.Preset.Preset import *
from Preferences.PresetList.Preset.Metapreset import *
from TaskList.RenderingTask.RenderingTask import *
from TaskList.Task import *
preferences = Preferences( xml = xmlMod.fromstring(''\''''+preferences.toXml(False)+'''''\') )
task = Task( xml = xmlMod.fromstring(''\'<?xml version="1.0" encoding="UTF-8"?>\n'''+self.toXml()+'''''\'))
'''
end = '\nRenderingTask(task, preferences, groups)'
paths = {}
for v, g in versions.items():
script = start\
+'groups = ["'+('", "'.join(g) )+'"]\n'\
+end
paths[v] = scriptPath+'/TaskList/RenderingTask/TaskScripts/'+self.uid+'-'+v+'.py'
with open(paths[v],'w') as taskScriptFile:
taskScriptFile.write( script )
return paths
def eraseTaskScript(self, scripts):
for path in scripts.values():
os.remove(path)
def getUsefullGroup(self, groups, preferences):
renderlayers = self.info.scenes[self.scene].getActiveRenderlayers()
confirmed = []
for group in groups:
for RL in renderlayers:
if preferences.presets.renderlayers.groups[group].belongTo(RL.name):
confirmed.append(group)
break
return confirmed
| true | true |
f71bd874cf71759c3ef1a835fde21dc3e3e05f61 | 3,201 | py | Python | Finetune/cosface_finetune.py | yuhaoooo/FaceAdv | 73e27b7ca01243a9a3d115f5fabd1008b2afb34a | [
"MIT"
] | 1 | 2022-03-04T09:18:04.000Z | 2022-03-04T09:18:04.000Z | Finetune/cosface_finetune.py | yuhaoooo/FaceAdv | 73e27b7ca01243a9a3d115f5fabd1008b2afb34a | [
"MIT"
] | null | null | null | Finetune/cosface_finetune.py | yuhaoooo/FaceAdv | 73e27b7ca01243a9a3d115f5fabd1008b2afb34a | [
"MIT"
] | null | null | null | import os
import torch
import random
import numpy as np
import torch.optim as optim
from torch.utils.data import DataLoader
from torchvision import transforms, datasets
from module.units.cosface_module import CosFace
def accuracy(logits, y):
_, preds = torch.max(logits, 1)
return (preds == y).float().mean()
if __name__ == "__main__":
random.seed(117)
np.random.seed(117)
torch.manual_seed(117)
torch.cuda.manual_seed(117)
transform = transforms.Compose([
transforms.Resize((112, 96)),
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])
])
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
cosface = CosFace(classnum=156, pretrained=r'..\Auxiliary\PretrainedFeatureExtractor\ACC99.28.pth').to(device)
dataset_dir = r'..\Auxiliary\ClippedFaceBank'
dataset = datasets.ImageFolder(
dataset_dir, transform=transform)
len_imgs = int(len(dataset) * 0.2)
train_dataset, test_dataset = torch.utils.data.random_split(dataset, [len(dataset) - len_imgs, len_imgs])
batch_size = 32
workers = 0 if os.name == 'nt' else 8
epochs = 20
train_loader = DataLoader(
train_dataset,
num_workers=workers,
batch_size=batch_size,
shuffle=True
)
test_loader = DataLoader(
test_dataset,
num_workers=workers,
batch_size=1,
shuffle=False
)
optimizer = optim.Adam(cosface.logits.parameters(), lr=1e-3)
loss_fn = torch.nn.CrossEntropyLoss()
cosface.backbone.eval()
best_acc, best_state_dict = 0., {}
for epoch in range(epochs):
print('\nEpoch {}/{}'.format(epoch + 1, epochs))
print('-' * 10)
cosface.logits.train()
loss = 0.0
acc = 0.0
for i_batch, (x, y) in enumerate(train_loader):
x = x.to(device)
y = y.to(device)
optimizer.zero_grad()
y_pred = cosface(x)
loss_batch = loss_fn(y_pred, y)
# update
loss_batch.backward()
optimizer.step()
loss += loss_batch.detach().cpu().numpy()
acc += accuracy(y_pred, y).detach().cpu().numpy()
loss /= (i_batch + 1)
acc /= (i_batch + 1)
print('The train loss is {}, The accuracy is {}'.format(loss, acc))
cosface.logits.eval()
loss, acc = 0.0, 0.0
for i_batch, (x, y) in enumerate(test_loader):
x = x.to(device)
y = y.to(device)
y_pred = cosface(x)
loss_batch = loss_fn(y_pred, y)
# update
loss += loss_batch.detach().cpu().numpy()
acc += accuracy(y_pred, y).detach().cpu().numpy()
loss /= (i_batch + 1)
acc /= (i_batch + 1)
print('The test loss is {}, The accuracy is {}'.format(loss, acc))
if best_acc < acc:
best_acc = acc
best_state_dict = cosface.state_dict()
os.makedirs(r'..\Auxiliary\PretrainedFaceRecognizer', exist_ok=True)
torch.save(best_state_dict, r'..\Auxiliary\PretrainedFaceRecognizer\finetuned_cosface.pt')
| 31.07767 | 114 | 0.590753 | import os
import torch
import random
import numpy as np
import torch.optim as optim
from torch.utils.data import DataLoader
from torchvision import transforms, datasets
from module.units.cosface_module import CosFace
def accuracy(logits, y):
_, preds = torch.max(logits, 1)
return (preds == y).float().mean()
if __name__ == "__main__":
random.seed(117)
np.random.seed(117)
torch.manual_seed(117)
torch.cuda.manual_seed(117)
transform = transforms.Compose([
transforms.Resize((112, 96)),
transforms.ToTensor(),
transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])
])
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
cosface = CosFace(classnum=156, pretrained=r'..\Auxiliary\PretrainedFeatureExtractor\ACC99.28.pth').to(device)
dataset_dir = r'..\Auxiliary\ClippedFaceBank'
dataset = datasets.ImageFolder(
dataset_dir, transform=transform)
len_imgs = int(len(dataset) * 0.2)
train_dataset, test_dataset = torch.utils.data.random_split(dataset, [len(dataset) - len_imgs, len_imgs])
batch_size = 32
workers = 0 if os.name == 'nt' else 8
epochs = 20
train_loader = DataLoader(
train_dataset,
num_workers=workers,
batch_size=batch_size,
shuffle=True
)
test_loader = DataLoader(
test_dataset,
num_workers=workers,
batch_size=1,
shuffle=False
)
optimizer = optim.Adam(cosface.logits.parameters(), lr=1e-3)
loss_fn = torch.nn.CrossEntropyLoss()
cosface.backbone.eval()
best_acc, best_state_dict = 0., {}
for epoch in range(epochs):
print('\nEpoch {}/{}'.format(epoch + 1, epochs))
print('-' * 10)
cosface.logits.train()
loss = 0.0
acc = 0.0
for i_batch, (x, y) in enumerate(train_loader):
x = x.to(device)
y = y.to(device)
optimizer.zero_grad()
y_pred = cosface(x)
loss_batch = loss_fn(y_pred, y)
loss_batch.backward()
optimizer.step()
loss += loss_batch.detach().cpu().numpy()
acc += accuracy(y_pred, y).detach().cpu().numpy()
loss /= (i_batch + 1)
acc /= (i_batch + 1)
print('The train loss is {}, The accuracy is {}'.format(loss, acc))
cosface.logits.eval()
loss, acc = 0.0, 0.0
for i_batch, (x, y) in enumerate(test_loader):
x = x.to(device)
y = y.to(device)
y_pred = cosface(x)
loss_batch = loss_fn(y_pred, y)
loss += loss_batch.detach().cpu().numpy()
acc += accuracy(y_pred, y).detach().cpu().numpy()
loss /= (i_batch + 1)
acc /= (i_batch + 1)
print('The test loss is {}, The accuracy is {}'.format(loss, acc))
if best_acc < acc:
best_acc = acc
best_state_dict = cosface.state_dict()
os.makedirs(r'..\Auxiliary\PretrainedFaceRecognizer', exist_ok=True)
torch.save(best_state_dict, r'..\Auxiliary\PretrainedFaceRecognizer\finetuned_cosface.pt')
| true | true |
f71bd9829999b0a992bd60e9355172969776fbbe | 881 | py | Python | tests.py | gmr/ecs-pipeline-deploy | 1e3e93a2497f4697c01de093c73a5cd78eebad63 | [
"BSD-3-Clause"
] | 2 | 2018-08-13T17:48:58.000Z | 2018-10-29T20:10:11.000Z | tests.py | gmr/ecs-pipeline-deploy | 1e3e93a2497f4697c01de093c73a5cd78eebad63 | [
"BSD-3-Clause"
] | null | null | null | tests.py | gmr/ecs-pipeline-deploy | 1e3e93a2497f4697c01de093c73a5cd78eebad63 | [
"BSD-3-Clause"
] | 1 | 2018-08-09T18:34:58.000Z | 2018-08-09T18:34:58.000Z | # coding=utf-8
import unittest
from ecs_pipeline_deploy import cli
class TestImageParsing(unittest.TestCase):
IMAGES = {
'alpine': (None, 'alpine', 'latest'),
'alpine:3.7': (None, 'alpine', '3.7'),
'docker.aweber.io/_/alpine:3.7':
('docker.aweber.io', '_/alpine', '3.7'),
'docker.aweber.io/pse/anabroker:0.1.0':
('docker.aweber.io', 'pse/anabroker', '0.1.0'),
'docker.aweber.io:8000/pse/anabroker:latest':
('docker.aweber.io:8000', 'pse/anabroker', 'latest')
}
def test_parsing_expectations(self):
for image, expectation in self.IMAGES.items():
result = cli.ECSPipeline.parse_image(image)
self.assertEqual(result, expectation)
def test_parsing_exceptions(self):
with self.assertRaises(ValueError):
cli.ECSPipeline.parse_image(None)
| 32.62963 | 64 | 0.61521 |
import unittest
from ecs_pipeline_deploy import cli
class TestImageParsing(unittest.TestCase):
IMAGES = {
'alpine': (None, 'alpine', 'latest'),
'alpine:3.7': (None, 'alpine', '3.7'),
'docker.aweber.io/_/alpine:3.7':
('docker.aweber.io', '_/alpine', '3.7'),
'docker.aweber.io/pse/anabroker:0.1.0':
('docker.aweber.io', 'pse/anabroker', '0.1.0'),
'docker.aweber.io:8000/pse/anabroker:latest':
('docker.aweber.io:8000', 'pse/anabroker', 'latest')
}
def test_parsing_expectations(self):
for image, expectation in self.IMAGES.items():
result = cli.ECSPipeline.parse_image(image)
self.assertEqual(result, expectation)
def test_parsing_exceptions(self):
with self.assertRaises(ValueError):
cli.ECSPipeline.parse_image(None)
| true | true |
f71bd9cfe2794743457c4c581ce15146ca56acf0 | 3,438 | py | Python | trac_captcha/release.py | FelixSchwarz/trac-captcha | 90eb4d3b4dae297e23f09a99a91bcfabcd099dc6 | [
"MIT"
] | 1 | 2020-10-23T14:59:42.000Z | 2020-10-23T14:59:42.000Z | trac_captcha/release.py | FelixSchwarz/trac-captcha | 90eb4d3b4dae297e23f09a99a91bcfabcd099dc6 | [
"MIT"
] | null | null | null | trac_captcha/release.py | FelixSchwarz/trac-captcha | 90eb4d3b4dae297e23f09a99a91bcfabcd099dc6 | [
"MIT"
] | null | null | null | # -*- coding: UTF-8 -*-
name = 'TracCaptcha'
version = '0.4dev'
description = 'pluggable captcha infrastructure for Trac with reCAPTCHA included'
long_description = '''
TracCaptcha is a Trac plugin to embed a captcha in the ticket page in addition
to Trac's regular permission checks so that spammers are kept out.
**"It just works":** Installation and configuration is very simple, just
install the egg and put two configuration options in your trac.ini. No
database changes required.
**Batteries included:** The popular reCAPTCHA system is supported out of the box.
Technically it's a plugin - if you don't like it you're free to use any other
plugin while still leverage the benefits from the general captcha
infrastructure.
**Does not annoy users:** After the user entered the captcha once, he does not have
to solve the captcha again for the same ticket when he just clicks 'preview'.
Also you can configure exempt certain users or groups (e.g. 'all authenticated
users') from the captchas just by using Trac's permission system.
**Easy to extend:** Protecting an additional page with a captcha is very
simple. Implementing captchas for the ticket module took only 20 lines of code!
Captchas for the DiscussionPlugin needed 21 lines of code!
**Easy to write custom captchas:** If you don't like reCAPTCHA, you can still
use the generic infrastructure with all its features: You implement the code to
generate the captcha and validate the user's input. TracCaptcha will take care
of displaying your plugin in all supported pages!
Changelog
******************************
0.?.? (??.??.2012)
====================
- fixed a compatibility issue for Python 2.4 with separately installed
hashlib module
- reCAPTCHA: sometimes scripts were included with HTTP even though the page was
served with HTTPS
0.3.1 (30.03.2011)
====================
- add config option to omit noscript section for recaptcha to enforce Javascript
support for users
0.3 (25.03.2011)
====================
- add more debug logging about CAPTCHA display and accepted/rejected solutions
to identify better how spammers managed to file a spam ticket
0.2.2 (04.02.2011)
====================
- fix tests on current Trac trunk (0.13dev)
- fix: TICKET_ADMIN looses other ticket-related permissions on Trac < 0.13
thanks to Anton V. for reporting
0.2.1 (10.11.2010)
====================
- fix "installation" as egg file in Trac plugins folder
0.2 (10.07.2010)
====================
- integration in 3rd party Trac plugins: TracDiscussionPlugin and
AccountManager (registration only)
- reCAPTCHA: select widget theme via trac.ini (requires simplejson for
Python 2.3-2.5)
- reCAPTCHA: display the widget in the user's locale (if translation is provided
by the reCAPTCHA service)
- reCAPTCHA: use HTTPS to include script files if Trac page was served with
HTTPS
- reCAPTCHA: show link for reCAPTCHA signup if no keys configured
- reCAPTCHA: use new Google URLs
0.1 (25.06.2010)
==================
- initial release
'''
author = 'Felix Schwarz'
author_email = 'felix.schwarz@oss.schwarz.eu'
url = 'http://www.schwarz.eu/opensource/projects/trac_captcha/'
download_url = 'http://www.schwarz.eu/opensource/projects/trac_captcha/download/%(version)s/%(name)s-%(version)s.tar.gz' % dict(name=name, version=version)
# prefix it with '_' so the symbol is not passed to setuptools.setup()
_copyright = u'2010 Felix Schwarz'
license='MIT'
| 38.629213 | 155 | 0.726294 |
name = 'TracCaptcha'
version = '0.4dev'
description = 'pluggable captcha infrastructure for Trac with reCAPTCHA included'
long_description = '''
TracCaptcha is a Trac plugin to embed a captcha in the ticket page in addition
to Trac's regular permission checks so that spammers are kept out.
**"It just works":** Installation and configuration is very simple, just
install the egg and put two configuration options in your trac.ini. No
database changes required.
**Batteries included:** The popular reCAPTCHA system is supported out of the box.
Technically it's a plugin - if you don't like it you're free to use any other
plugin while still leverage the benefits from the general captcha
infrastructure.
**Does not annoy users:** After the user entered the captcha once, he does not have
to solve the captcha again for the same ticket when he just clicks 'preview'.
Also you can configure exempt certain users or groups (e.g. 'all authenticated
users') from the captchas just by using Trac's permission system.
**Easy to extend:** Protecting an additional page with a captcha is very
simple. Implementing captchas for the ticket module took only 20 lines of code!
Captchas for the DiscussionPlugin needed 21 lines of code!
**Easy to write custom captchas:** If you don't like reCAPTCHA, you can still
use the generic infrastructure with all its features: You implement the code to
generate the captcha and validate the user's input. TracCaptcha will take care
of displaying your plugin in all supported pages!
Changelog
******************************
0.?.? (??.??.2012)
====================
- fixed a compatibility issue for Python 2.4 with separately installed
hashlib module
- reCAPTCHA: sometimes scripts were included with HTTP even though the page was
served with HTTPS
0.3.1 (30.03.2011)
====================
- add config option to omit noscript section for recaptcha to enforce Javascript
support for users
0.3 (25.03.2011)
====================
- add more debug logging about CAPTCHA display and accepted/rejected solutions
to identify better how spammers managed to file a spam ticket
0.2.2 (04.02.2011)
====================
- fix tests on current Trac trunk (0.13dev)
- fix: TICKET_ADMIN looses other ticket-related permissions on Trac < 0.13
thanks to Anton V. for reporting
0.2.1 (10.11.2010)
====================
- fix "installation" as egg file in Trac plugins folder
0.2 (10.07.2010)
====================
- integration in 3rd party Trac plugins: TracDiscussionPlugin and
AccountManager (registration only)
- reCAPTCHA: select widget theme via trac.ini (requires simplejson for
Python 2.3-2.5)
- reCAPTCHA: display the widget in the user's locale (if translation is provided
by the reCAPTCHA service)
- reCAPTCHA: use HTTPS to include script files if Trac page was served with
HTTPS
- reCAPTCHA: show link for reCAPTCHA signup if no keys configured
- reCAPTCHA: use new Google URLs
0.1 (25.06.2010)
==================
- initial release
'''
author = 'Felix Schwarz'
author_email = 'felix.schwarz@oss.schwarz.eu'
url = 'http://www.schwarz.eu/opensource/projects/trac_captcha/'
download_url = 'http://www.schwarz.eu/opensource/projects/trac_captcha/download/%(version)s/%(name)s-%(version)s.tar.gz' % dict(name=name, version=version)
_copyright = u'2010 Felix Schwarz'
license='MIT'
| true | true |
f71bdaca1e11f3f5800e7701806de0a2ef37a0d7 | 4,092 | py | Python | setup.py | greedyuser/kur | ba6588ebfa5dec66d1e462c180618cc115fd38ef | [
"Apache-2.0"
] | 867 | 2016-12-05T20:24:23.000Z | 2022-02-18T09:07:14.000Z | setup.py | greedyuser/kur | ba6588ebfa5dec66d1e462c180618cc115fd38ef | [
"Apache-2.0"
] | 90 | 2017-01-14T22:46:23.000Z | 2021-02-09T13:32:27.000Z | setup.py | greedyuser/kur | ba6588ebfa5dec66d1e462c180618cc115fd38ef | [
"Apache-2.0"
] | 135 | 2017-01-18T19:21:20.000Z | 2022-01-24T16:57:59.000Z | """
Copyright 2016 Deepgram
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
###############################################################################
from __future__ import print_function
import sys
###############################################################################
def error_message(msg):
""" Prints an error message and exits.
"""
line_width = 60
format_spec = '{{: ^{width}}}'.format(width=line_width)
lines = [
'', '',
'='*line_width, '',
'ERROR', '',
msg, ''
'See our troubleshooting page to get started:', '',
'https://kur.deepgram.com/troubleshooting.html#installation', '',
'='*line_width, '',
"Uh, oh. There was an error. Look up there ^^^^ and you'll be",
'training awesome models in no time!'
]
for line in lines:
print(format_spec.format(line), file=sys.stderr)
sys.exit(1)
###############################################################################
if sys.version_info < (3, 4):
error_message('Kur requires Python 3.4 or later.')
###############################################################################
# pylint: disable=wrong-import-position
import os
from setuptools import setup, find_packages
# pylint: enable=wrong-import-position
################################################################################
def readme():
""" Return the README text.
"""
with open('README.rst', 'rb') as fh:
result = fh.read()
result = result.decode('utf-8')
token = '.. package_readme_ends_here'
mark = result.find(token)
if mark >= 0:
result = result[:mark]
token = '.. package_readme_starts_here'
mark = result.find(token)
if mark >= 0:
result = result[mark+len(token):]
chunks = []
skip = False
for chunk in result.split('\n\n'):
if not chunk:
pass
elif chunk.strip().startswith('.. package_readme_ignore'):
skip = True
elif skip:
skip = False
else:
chunks.append(chunk)
result = '\n\n'.join(chunks)
return result
################################################################################
def get_version():
""" Gets the current version of the package.
"""
version_py = os.path.join(os.path.dirname(__file__), 'kur', 'version.py')
with open(version_py, 'r') as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip().replace('"', '')
raise ValueError('Failed to parse version from: {}'.format(version_py))
################################################################################
setup(
# Package information
name='kur',
version=get_version(),
description='Descriptive deep learning',
long_description=readme(),
keywords='deep learning',
classifiers=[
],
# Author information
url='https://github.com/deepgram/kur',
author='Adam Sypniewski',
author_email='adam@deepgram.com',
license='Apache Software License '
'(http://www.apache.org/licenses/LICENSE-2.0)',
# What is packaged here.
packages=find_packages(),
# What to include.
package_data={
'': ['*.txt', '*.rst', '*.md']
},
# Dependencies
install_requires=[
'pyyaml>=3.12',
'jinja2>=2.8',
'numpy>=1.11.2',
'tqdm>=4.10.0',
# Keras - the default backend (with Theano)
'keras>=1.2.2',
'theano>=0.8.2',
'scipy>=0.18.1',
'python-magic>=0.4.12',
'pydub>=0.16.6',
'python_speech_features>=0.4',
'matplotlib>=1.5.3'
],
dependency_links=[
],
# Testing
test_suite='tests',
tests_require=[
'pytest',
'tensorflow'
],
setup_requires=['pytest-runner'],
entry_points={
'console_scripts' : ['kur=kur.__main__:main']
},
zip_safe=False
)
#### EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF
| 25.575 | 80 | 0.585533 | true | true | |
f71bdbabe6cef239bb72385c05db3589ac1298ec | 1,226 | py | Python | redash/cli/queries.py | zero1number/redash | caabc4afa4e60e273782a46d84099857821c6500 | [
"BSD-2-Clause"
] | 20,680 | 2015-11-16T15:38:37.000Z | 2022-03-31T21:43:43.000Z | redash/cli/queries.py | zero1number/redash | caabc4afa4e60e273782a46d84099857821c6500 | [
"BSD-2-Clause"
] | 3,934 | 2015-11-16T14:46:49.000Z | 2022-03-31T13:22:31.000Z | redash/cli/queries.py | zero1number/redash | caabc4afa4e60e273782a46d84099857821c6500 | [
"BSD-2-Clause"
] | 4,147 | 2015-11-17T15:57:23.000Z | 2022-03-31T11:57:43.000Z | from click import argument
from flask.cli import AppGroup
from sqlalchemy.orm.exc import NoResultFound
manager = AppGroup(help="Queries management commands.")
@manager.command()
@argument("query_id")
@argument("tag")
def add_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
tags = []
tags.append(tag)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag added.")
@manager.command()
@argument("query_id")
@argument("tag")
def remove_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
print("Tag is empty.")
exit(1)
try:
tags.remove(tag)
except ValueError:
print("Tag not found.")
exit(1)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag removed.")
| 18.861538 | 55 | 0.620718 | from click import argument
from flask.cli import AppGroup
from sqlalchemy.orm.exc import NoResultFound
manager = AppGroup(help="Queries management commands.")
@manager.command()
@argument("query_id")
@argument("tag")
def add_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
tags = []
tags.append(tag)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag added.")
@manager.command()
@argument("query_id")
@argument("tag")
def remove_tag(query_id, tag):
from redash import models
query_id = int(query_id)
try:
q = models.Query.get_by_id(query_id)
except NoResultFound:
print("Query not found.")
exit(1)
tags = q.tags
if tags is None:
print("Tag is empty.")
exit(1)
try:
tags.remove(tag)
except ValueError:
print("Tag not found.")
exit(1)
q.tags = list(set(tags))
models.db.session.add(q)
models.db.session.commit()
print("Tag removed.")
| true | true |
f71bdc17b0e03a9554e767de798e80d4152e68b0 | 4,093 | py | Python | scripts/mk_copy_coords.py | diogomart/Meeko | 8af7466fd45a5d5de00a2d188ee3d4531b8372a5 | [
"Apache-2.0"
] | 4 | 2021-08-04T07:25:48.000Z | 2021-09-22T09:57:35.000Z | scripts/mk_copy_coords.py | diogomart/Meeko | 8af7466fd45a5d5de00a2d188ee3d4531b8372a5 | [
"Apache-2.0"
] | 4 | 2021-08-16T19:05:19.000Z | 2021-10-04T22:21:35.000Z | scripts/mk_copy_coords.py | diogomart/Meeko | 8af7466fd45a5d5de00a2d188ee3d4531b8372a5 | [
"Apache-2.0"
] | 2 | 2021-09-18T12:10:12.000Z | 2021-09-22T06:13:02.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
import argparse
import os
import sys
from rdkit import Chem
from rdkit.six import StringIO
from meeko import PDBQTMolecule
def cmd_lineparser():
parser = argparse.ArgumentParser(description='Copy atom coordinates from PDBQT (or DLG) file \
to original molecule file format (SDF or MOL2)')
parser.add_argument(dest='docking_results_filename',
action='store', help='Docking output file to get coordinates. Either a PDBQT \
file from Vina or a DLG file from AD-GPU.')
parser.add_argument('-i', '--original_input', dest='template_filename',
action='store', help='Template molecule file, i.e. the original file that was \
used to prepare the PDBQT filename (hopefully SDF). If no template is provided, \
the SMILES string in the PDBQT remarks will be used to generate an SDF file.')
parser.add_argument('-o', '--output_filename', dest='output_filename',
action='store', help='Output molecule filename. If not specified, suffix _docked is \
added to the filename based on the input molecule file, and using the same \
molecule file format')
parser.add_argument('-s', '--suffix', dest='suffix_name', default='_docked',
action='store', help='Add suffix to output filename if -o/--output_filename \
not specified. WARNING: If specified as empty string (\'\'), this will overwrite \
the original molecule input file (default: _docked).')
parser.add_argument('-', '--', dest='redirect_stdout', action='store_true',
help='do not write file, redirect output to STDOUT. Arguments -o/--output_filename \
is ignored.')
return parser.parse_args()
if __name__ == '__main__':
args = cmd_lineparser()
docking_results_filename = args.docking_results_filename
template_filename = args.template_filename
output_filename = args.output_filename
suffix_name = args.suffix_name
redirect_stdout = args.redirect_stdout
output_string = ''
is_dlg = docking_results_filename.endswith('.dlg')
pdbqt_mol = PDBQTMolecule.from_file(docking_results_filename, is_dlg=is_dlg, skip_typing=True)
if template_filename is not None: # OBMol from template_filename
if output_filename is not None:
output_format = os.path.splitext(output_filename)[1][1:]
else:
output_format = os.path.splitext(template_filename)[1][1:]
conv = ob.OBConversion()
success = conv.SetOutFormat(output_format)
if not success:
raise RuntimeError('Input molecule file format %s not recognized by OpenBabel.' % output_format)
ori_obmol = obutils.load_molecule_from_file(template_filename)
for pose in pdbqt_mol:
copy_obmol = ob.OBMol(ori_obmol) # connectivity may be corrupted by removing and adding Hs multiple times
pose.copy_coordinates_to_obmol(copy_obmol)
output_string += conv.WriteString(copy_obmol)
else: # RDKit mol from SMILES in docking output PDBQT remarks
if pdbqt_mol._pose_data['smiles'] is None:
msg = "\n\n \"REMARK SMILES\" not found in %s.\n" % docking_results_filename
msg += " Consider using -i/--original_input\n"
raise RuntimeError(msg)
sio = StringIO()
f = Chem.SDWriter(sio)
for pose in pdbqt_mol:
rdmol = pose.export_rdkit_mol()
f.write(rdmol)
f.close()
output_string += sio.getvalue()
output_format = 'sdf'
if not redirect_stdout:
if output_filename is None:
output_filename = '%s%s.%s' % (os.path.splitext(docking_results_filename)[0], suffix_name, output_format)
print(output_string, file=open(output_filename, 'w'))
else:
print(output_string)
| 46.511364 | 117 | 0.636697 |
import argparse
import os
import sys
from rdkit import Chem
from rdkit.six import StringIO
from meeko import PDBQTMolecule
def cmd_lineparser():
parser = argparse.ArgumentParser(description='Copy atom coordinates from PDBQT (or DLG) file \
to original molecule file format (SDF or MOL2)')
parser.add_argument(dest='docking_results_filename',
action='store', help='Docking output file to get coordinates. Either a PDBQT \
file from Vina or a DLG file from AD-GPU.')
parser.add_argument('-i', '--original_input', dest='template_filename',
action='store', help='Template molecule file, i.e. the original file that was \
used to prepare the PDBQT filename (hopefully SDF). If no template is provided, \
the SMILES string in the PDBQT remarks will be used to generate an SDF file.')
parser.add_argument('-o', '--output_filename', dest='output_filename',
action='store', help='Output molecule filename. If not specified, suffix _docked is \
added to the filename based on the input molecule file, and using the same \
molecule file format')
parser.add_argument('-s', '--suffix', dest='suffix_name', default='_docked',
action='store', help='Add suffix to output filename if -o/--output_filename \
not specified. WARNING: If specified as empty string (\'\'), this will overwrite \
the original molecule input file (default: _docked).')
parser.add_argument('-', '--', dest='redirect_stdout', action='store_true',
help='do not write file, redirect output to STDOUT. Arguments -o/--output_filename \
is ignored.')
return parser.parse_args()
if __name__ == '__main__':
args = cmd_lineparser()
docking_results_filename = args.docking_results_filename
template_filename = args.template_filename
output_filename = args.output_filename
suffix_name = args.suffix_name
redirect_stdout = args.redirect_stdout
output_string = ''
is_dlg = docking_results_filename.endswith('.dlg')
pdbqt_mol = PDBQTMolecule.from_file(docking_results_filename, is_dlg=is_dlg, skip_typing=True)
if template_filename is not None:
if output_filename is not None:
output_format = os.path.splitext(output_filename)[1][1:]
else:
output_format = os.path.splitext(template_filename)[1][1:]
conv = ob.OBConversion()
success = conv.SetOutFormat(output_format)
if not success:
raise RuntimeError('Input molecule file format %s not recognized by OpenBabel.' % output_format)
ori_obmol = obutils.load_molecule_from_file(template_filename)
for pose in pdbqt_mol:
copy_obmol = ob.OBMol(ori_obmol)
pose.copy_coordinates_to_obmol(copy_obmol)
output_string += conv.WriteString(copy_obmol)
else:
if pdbqt_mol._pose_data['smiles'] is None:
msg = "\n\n \"REMARK SMILES\" not found in %s.\n" % docking_results_filename
msg += " Consider using -i/--original_input\n"
raise RuntimeError(msg)
sio = StringIO()
f = Chem.SDWriter(sio)
for pose in pdbqt_mol:
rdmol = pose.export_rdkit_mol()
f.write(rdmol)
f.close()
output_string += sio.getvalue()
output_format = 'sdf'
if not redirect_stdout:
if output_filename is None:
output_filename = '%s%s.%s' % (os.path.splitext(docking_results_filename)[0], suffix_name, output_format)
print(output_string, file=open(output_filename, 'w'))
else:
print(output_string)
| true | true |
f71bdcb3835402e5205a96a5e1e0df7c469df44e | 160 | py | Python | bidfx/__init__.py | bidfx/bidfx-api-py | 6b5e2c5efaa547b2d97a5556ef8d21d1de807f68 | [
"Apache-2.0"
] | 3 | 2020-04-29T09:19:56.000Z | 2021-03-08T11:12:05.000Z | bidfx/__init__.py | bidfx/bidfx-api-py | 6b5e2c5efaa547b2d97a5556ef8d21d1de807f68 | [
"Apache-2.0"
] | 3 | 2020-03-08T21:54:02.000Z | 2021-02-02T22:33:51.000Z | bidfx/__init__.py | bidfx/bidfx-api-py | 6b5e2c5efaa547b2d97a5556ef8d21d1de807f68 | [
"Apache-2.0"
] | 2 | 2020-06-13T10:52:18.000Z | 2022-03-02T17:29:45.000Z | from .exceptions import *
from .pricing import *
from .session import *
__all__ = session.__all__ + pricing.__all__ + exceptions.__all__
__version__ = "1.0.2"
| 22.857143 | 64 | 0.75 | from .exceptions import *
from .pricing import *
from .session import *
__all__ = session.__all__ + pricing.__all__ + exceptions.__all__
__version__ = "1.0.2"
| true | true |
f71bdcb6632ab0a396bd37cc7b250ad677eaebe8 | 12,992 | py | Python | examples/PPO_super_mario_bros/env.py | hybug/RL_Lab | 0748e143a0fb60b9912ca28fbebc25e8f97a2fe4 | [
"Unlicense"
] | 3 | 2020-12-31T02:20:15.000Z | 2021-11-16T02:26:57.000Z | examples/PPO_super_mario_bros/env.py | hybug/RL_Lab | 0748e143a0fb60b9912ca28fbebc25e8f97a2fe4 | [
"Unlicense"
] | null | null | null | examples/PPO_super_mario_bros/env.py | hybug/RL_Lab | 0748e143a0fb60b9912ca28fbebc25e8f97a2fe4 | [
"Unlicense"
] | null | null | null | '''
Author: hanyu
Date: 2020-11-06 13:04:12
LastEditTime: 2021-01-09 09:07:08
LastEditors: hanyu
Description: environment
FilePath: /test_ppo/examples/PPO_super_mario_bros/env.py
'''
import logging
import numpy as np
from collections import namedtuple
# todo, to common
def padding(input, seqlen, dtype):
input = np.array(input, dtype=dtype)
if len(input) >= seqlen:
return input
shape = input.shape
pad = np.tile(
np.zeros_like(input[0:1], dtype=dtype),
[seqlen - shape[0]] + (len(shape) - 1) * [1])
return np.concatenate([input, pad], axis=0)
Seg = namedtuple("Seg", ["s", "a", "a_logits",
"r", "gaes", "v_cur", "state_in"])
def _warp_env():
import random
from utils.get_gaes import get_gaes
import gym_super_mario_bros
from PIL import Image
from gym_super_mario_bros.actions import SIMPLE_MOVEMENT, COMPLEX_MOVEMENT
from nes_py.wrappers import JoypadSpace
class Env(object):
"""
Raw single environment of game
"""
def __init__(self, act_space, act_repeats, frames, state_size, burn_in, seqlen, game):
'''
description: init basic params settings.
param {
act_space: agent act spaces.
act_repeats: one a repeats number, default as 1.
frames: stack of frames for each state.
state_size: state_size calculated in build_policy_evaluator().
burn_in: sequences length of each burn-in(dropped) segment.
seqlen: sequences length of each training segment.
game: game environment.
}
return {None}
'''
self.act_space = act_space
self.act_repeats = act_repeats
self.act_repeat = random.choice(self.act_repeats)
self.frames = frames
self.state_size = state_size
self.game = game
self.burn_in = burn_in
self.seqlen = seqlen
self.max_pos = -10000
self.count = 0
# make gym env from gym_super_mario_bros
env = gym_super_mario_bros.make(game)
# warp the raw env through JoypadSpace according act_space
if self.act_space == 7:
self.env = JoypadSpace(env, SIMPLE_MOVEMENT)
elif self.act_space == 12:
self.env = JoypadSpace(env, COMPLEX_MOVEMENT)
# resize the output image to 84*84 & normalize the pixel
# input: (240, 256, 3)
# output: (84, 84, 1)
s_t = self.resize_image(self.env.reset())
# expand the state dimension
# output: (84, 84, frames)
self.s_t = np.tile(s_t, [1, 1, frames])
# add the batch_size dimension
# output: (batch_size, 84, 84, frames)
self.s = [self.s_t]
# action shape: (batch_size, )
self.a_t = random.randint(0, act_space - 1)
self.a = [self.a_t]
# action logits shape: (batch_size, act_space)
self.a_logits = []
self.r = [0]
self.pos = []
self.v_cur = []
# decides according to build_policy_evaluator()
state_in = np.zeros(self.state_size, dtype=np.float32)
# state_in shape: (batch_size, state_in_number)
self.state_in = [state_in]
self.done = False
def step(self, a, a_logits, v_cur, state_in, force=False):
'''
description: step function
param {
a: step action
a_logits: action logits
v_cur: current value
state_in: state_in
force: force flag
}
return {
segs: list of ["s", "a", "a_logits", "r", "gaes", "v_cur", "state_in"]
}
'''
# repeat the last action or step the current action
# according to the act_repeat
self.count += 1
if self.count % self.act_repeat == 0:
self.a_t = a
self.count = 0
self.act_repeat = random.choice(self.act_repeats)
# step the action and get the result
gs_t1, gr_t, gdone, ginfo = self.env.step(self.a_t)
if not gdone:
s_t1, r_t, done, info = self.env.step(self.a_t)
r_t += gr_t
r_t /= 2.
else:
s_t1 = gs_t1
r_t = gr_t
done = gdone
info = ginfo
# reward scaling
r_t /= 15.
s_t1 = self.resize_image(s_t1)
channels = s_t1.shape[-1]
# concatenate s_t1(the last stacked frame)
# to self.s_t(drop the first stacked frame)
self.s_t = np.concatenate(
[s_t1, self.s_t[:, :, :-channels]], axis=-1)
self.s.append(self.s_t)
self.a.append(self.a_t)
self.a_logits.append(a_logits)
self.r.append(r_t)
self.max_pos = max(self.max_pos, info["x_pos"])
self.pos.append(info["x_pos"])
if (len(self.pos) > 100) and (
info["x_pos"] - self.pos[-100] < 5) and (
self.pos[-100] - info["x_pos"] < 5):
done = True
self.done = done
self.v_cur.append(v_cur)
self.state_in.append(state_in)
"""
get segs
"""
segs = self.get_history(force)
"""
reset env
"""
self.reset(force)
return segs
def reset(self, force=False):
if self.done or force:
max_pos = self.max_pos
self.max_pos = -10000
print(" Max Position %s : %d" % (self.game, max_pos))
self.count = 0
self.act_repeat = random.choice(self.act_repeats)
s_t = self.resize_image(self.env.reset())
self.s_t = np.tile(s_t, [1, 1, self.frames])
self.s = [self.s_t]
self.a_t = random.randint(0, self.act_space - 1)
self.a = [self.a_t]
self.a_logits = []
self.r = [0]
self.pos = []
self.v_cur = []
state_in = np.zeros(self.state_size, dtype=np.float32)
self.state_in = [state_in]
self.done = False
def get_state(self):
return self.s_t
def get_act(self):
return self.a_t
def get_max_pos(self):
return self.max_pos
def reset_max_pos(self):
self.max_pos = -10000
def get_state_in(self):
return self.state_in[-1]
def get_history(self, force=False):
if self.done or force:
if self.done:
# using Generalized Advantage Estimator estimate Advantage
gaes, _ = get_gaes(None, self.r, self.v_cur,
self.v_cur[1:] + [0], 0.99, 0.95)
seg = Seg(self.s, self.a, self.a_logits, self.r,
gaes, self.v_cur, self.state_in)
return self.postprocess(seg)
if force and len(self.r) > 1:
gaes, _ = get_gaes(
None, self.r[:-1], self.v_cur[:-1], self.v_cur[1:], 0.99, 0.95)
seg = Seg(self.s[:-1], self.a[:-1], self.a_logits[:-1], self.r[:-1], gaes,
self.v_cur[:-1], self.state_in[:-1])
return self.postprocess(seg)
return None
@staticmethod
def resize_image(image, size=84):
'''
description: resize and norm the image
param {
image: image of np.array
size: the size after resize
}
return {the image after resize and norm}
'''
image = Image.fromarray(image)
image = image.convert("L")
image = image.resize((size, size))
image = np.array(image)
image = image / 255.
image = np.array(image, np.float32)
return image[:, :, None]
def postprocess(self, seg):
"""
postprocess the seg for training,
split the raw seg into several seqlen segs.
"""
burn_in = self.burn_in
seqlen = self.seqlen + burn_in
seg_results = []
if seg is not None:
while len(seg[0]) > burn_in:
next_seg = dict()
# input: (121(depends on done timing), 84, 84, frames)
# output: (seqlen, 84, 84, frames)
next_seg["s"] = padding(seg.s[:seqlen], seqlen, np.float32)
next_seg["a"] = padding(
seg.a[1:seqlen + 1], seqlen, np.int32)
next_seg["prev_a"] = padding(
seg.a[:seqlen], seqlen, np.int32)
next_seg["a_logits"] = padding(
seg.a_logits[:seqlen], seqlen, np.float32)
next_seg["r"] = padding(
seg.r[1:seqlen + 1], seqlen, np.float32)
next_seg["prev_r"] = padding(
seg.r[:seqlen], seqlen, np.float32)
next_seg["adv"] = padding(
seg.gaes[:seqlen], seqlen, np.float32)
next_seg["v_cur"] = padding(
seg.v_cur[:seqlen], seqlen, np.float32)
next_seg["state_in"] = np.array(
seg.state_in[0], np.float32)
next_seg["slots"] = padding(
len(seg.s[:seqlen]) * [1], seqlen, np.int32)
seg_results.append(next_seg)
seg = Seg(*[t[burn_in:] for t in seg])
if any(seg_results):
# print("full use one segs done!")
return seg_results
else:
return None
class Envs(object):
def __init__(self, act_space, act_repeats, frames,
state_size, burn_in, seqlen, games):
'''
description: init the environment list
param {params}
return {*}
'''
self.envs = []
for game in games:
env = Env(act_space, act_repeats, frames,
state_size, burn_in, seqlen, game)
self.envs.append(env)
def step(self, sess, model):
'''
description: step action according to neural network model
param {
sess: tensorflow session
model: the neural network model
}
return {the list of Seg}
'''
feed_dict = self.get_feed_dict(model)
# get predicted action from model
a, a_logits, v_cur, state_in = sess.run(
[model.current_act, model.current_act_logits,
model.current_value, model.state_out],
feed_dict=feed_dict
)
# step the predicted action in turn
segs = [env.step(
a[i][0],
a_logits[i][0],
v_cur[i][0],
state_in[i]
) for (i, env) in enumerate(self.envs)]
segs = [t2 for t1 in segs if t1 is not None for t2 in t1]
return segs
def get_feed_dict(self, model):
'''
description: get the feed_dict of model
param {*}
return {*}
'''
feed_dict = dict()
feed_dict[model.s_t] = [[env.get_state()] for env in self.envs]
feed_dict[model.previous_actions] = [[env.get_act()]
for env in self.envs]
feed_dict[model.prev_r] = [[env.r[-1]] for env in self.envs]
feed_dict[model.state_in] = [env.get_state_in()
for env in self.envs]
return feed_dict
return Envs
def build_env(kwargs):
Envs = _warp_env()
state_size = kwargs['state_size']
action_repeats = kwargs['action_repeats']
frames = kwargs["frames"]
parallel = kwargs['parallel']
act_space = kwargs['act_space']
burn_in = kwargs['burn_in']
seqlen = kwargs['seqlen']
games = ["SuperMarioBros-%d-%d-v0" %
(i, j) for i in range(1, 9) for j in range(1, 5)]
games = games * (parallel // len(games))
envs = Envs(act_space, action_repeats, frames,
state_size, burn_in, seqlen, games)
return envs
| 34.831099 | 94 | 0.491379 | import logging
import numpy as np
from collections import namedtuple
def padding(input, seqlen, dtype):
input = np.array(input, dtype=dtype)
if len(input) >= seqlen:
return input
shape = input.shape
pad = np.tile(
np.zeros_like(input[0:1], dtype=dtype),
[seqlen - shape[0]] + (len(shape) - 1) * [1])
return np.concatenate([input, pad], axis=0)
Seg = namedtuple("Seg", ["s", "a", "a_logits",
"r", "gaes", "v_cur", "state_in"])
def _warp_env():
import random
from utils.get_gaes import get_gaes
import gym_super_mario_bros
from PIL import Image
from gym_super_mario_bros.actions import SIMPLE_MOVEMENT, COMPLEX_MOVEMENT
from nes_py.wrappers import JoypadSpace
class Env(object):
def __init__(self, act_space, act_repeats, frames, state_size, burn_in, seqlen, game):
self.act_space = act_space
self.act_repeats = act_repeats
self.act_repeat = random.choice(self.act_repeats)
self.frames = frames
self.state_size = state_size
self.game = game
self.burn_in = burn_in
self.seqlen = seqlen
self.max_pos = -10000
self.count = 0
env = gym_super_mario_bros.make(game)
if self.act_space == 7:
self.env = JoypadSpace(env, SIMPLE_MOVEMENT)
elif self.act_space == 12:
self.env = JoypadSpace(env, COMPLEX_MOVEMENT)
s_t = self.resize_image(self.env.reset())
self.s_t = np.tile(s_t, [1, 1, frames])
self.s = [self.s_t]
self.a_t = random.randint(0, act_space - 1)
self.a = [self.a_t]
self.a_logits = []
self.r = [0]
self.pos = []
self.v_cur = []
state_in = np.zeros(self.state_size, dtype=np.float32)
self.state_in = [state_in]
self.done = False
def step(self, a, a_logits, v_cur, state_in, force=False):
self.count += 1
if self.count % self.act_repeat == 0:
self.a_t = a
self.count = 0
self.act_repeat = random.choice(self.act_repeats)
gs_t1, gr_t, gdone, ginfo = self.env.step(self.a_t)
if not gdone:
s_t1, r_t, done, info = self.env.step(self.a_t)
r_t += gr_t
r_t /= 2.
else:
s_t1 = gs_t1
r_t = gr_t
done = gdone
info = ginfo
r_t /= 15.
s_t1 = self.resize_image(s_t1)
channels = s_t1.shape[-1]
self.s_t = np.concatenate(
[s_t1, self.s_t[:, :, :-channels]], axis=-1)
self.s.append(self.s_t)
self.a.append(self.a_t)
self.a_logits.append(a_logits)
self.r.append(r_t)
self.max_pos = max(self.max_pos, info["x_pos"])
self.pos.append(info["x_pos"])
if (len(self.pos) > 100) and (
info["x_pos"] - self.pos[-100] < 5) and (
self.pos[-100] - info["x_pos"] < 5):
done = True
self.done = done
self.v_cur.append(v_cur)
self.state_in.append(state_in)
segs = self.get_history(force)
self.reset(force)
return segs
def reset(self, force=False):
if self.done or force:
max_pos = self.max_pos
self.max_pos = -10000
print(" Max Position %s : %d" % (self.game, max_pos))
self.count = 0
self.act_repeat = random.choice(self.act_repeats)
s_t = self.resize_image(self.env.reset())
self.s_t = np.tile(s_t, [1, 1, self.frames])
self.s = [self.s_t]
self.a_t = random.randint(0, self.act_space - 1)
self.a = [self.a_t]
self.a_logits = []
self.r = [0]
self.pos = []
self.v_cur = []
state_in = np.zeros(self.state_size, dtype=np.float32)
self.state_in = [state_in]
self.done = False
def get_state(self):
return self.s_t
def get_act(self):
return self.a_t
def get_max_pos(self):
return self.max_pos
def reset_max_pos(self):
self.max_pos = -10000
def get_state_in(self):
return self.state_in[-1]
def get_history(self, force=False):
if self.done or force:
if self.done:
gaes, _ = get_gaes(None, self.r, self.v_cur,
self.v_cur[1:] + [0], 0.99, 0.95)
seg = Seg(self.s, self.a, self.a_logits, self.r,
gaes, self.v_cur, self.state_in)
return self.postprocess(seg)
if force and len(self.r) > 1:
gaes, _ = get_gaes(
None, self.r[:-1], self.v_cur[:-1], self.v_cur[1:], 0.99, 0.95)
seg = Seg(self.s[:-1], self.a[:-1], self.a_logits[:-1], self.r[:-1], gaes,
self.v_cur[:-1], self.state_in[:-1])
return self.postprocess(seg)
return None
@staticmethod
def resize_image(image, size=84):
image = Image.fromarray(image)
image = image.convert("L")
image = image.resize((size, size))
image = np.array(image)
image = image / 255.
image = np.array(image, np.float32)
return image[:, :, None]
def postprocess(self, seg):
burn_in = self.burn_in
seqlen = self.seqlen + burn_in
seg_results = []
if seg is not None:
while len(seg[0]) > burn_in:
next_seg = dict()
next_seg["s"] = padding(seg.s[:seqlen], seqlen, np.float32)
next_seg["a"] = padding(
seg.a[1:seqlen + 1], seqlen, np.int32)
next_seg["prev_a"] = padding(
seg.a[:seqlen], seqlen, np.int32)
next_seg["a_logits"] = padding(
seg.a_logits[:seqlen], seqlen, np.float32)
next_seg["r"] = padding(
seg.r[1:seqlen + 1], seqlen, np.float32)
next_seg["prev_r"] = padding(
seg.r[:seqlen], seqlen, np.float32)
next_seg["adv"] = padding(
seg.gaes[:seqlen], seqlen, np.float32)
next_seg["v_cur"] = padding(
seg.v_cur[:seqlen], seqlen, np.float32)
next_seg["state_in"] = np.array(
seg.state_in[0], np.float32)
next_seg["slots"] = padding(
len(seg.s[:seqlen]) * [1], seqlen, np.int32)
seg_results.append(next_seg)
seg = Seg(*[t[burn_in:] for t in seg])
if any(seg_results):
return seg_results
else:
return None
class Envs(object):
def __init__(self, act_space, act_repeats, frames,
state_size, burn_in, seqlen, games):
self.envs = []
for game in games:
env = Env(act_space, act_repeats, frames,
state_size, burn_in, seqlen, game)
self.envs.append(env)
def step(self, sess, model):
feed_dict = self.get_feed_dict(model)
a, a_logits, v_cur, state_in = sess.run(
[model.current_act, model.current_act_logits,
model.current_value, model.state_out],
feed_dict=feed_dict
)
segs = [env.step(
a[i][0],
a_logits[i][0],
v_cur[i][0],
state_in[i]
) for (i, env) in enumerate(self.envs)]
segs = [t2 for t1 in segs if t1 is not None for t2 in t1]
return segs
def get_feed_dict(self, model):
feed_dict = dict()
feed_dict[model.s_t] = [[env.get_state()] for env in self.envs]
feed_dict[model.previous_actions] = [[env.get_act()]
for env in self.envs]
feed_dict[model.prev_r] = [[env.r[-1]] for env in self.envs]
feed_dict[model.state_in] = [env.get_state_in()
for env in self.envs]
return feed_dict
return Envs
def build_env(kwargs):
Envs = _warp_env()
state_size = kwargs['state_size']
action_repeats = kwargs['action_repeats']
frames = kwargs["frames"]
parallel = kwargs['parallel']
act_space = kwargs['act_space']
burn_in = kwargs['burn_in']
seqlen = kwargs['seqlen']
games = ["SuperMarioBros-%d-%d-v0" %
(i, j) for i in range(1, 9) for j in range(1, 5)]
games = games * (parallel // len(games))
envs = Envs(act_space, action_repeats, frames,
state_size, burn_in, seqlen, games)
return envs
| true | true |
f71bdd99c5f2350a22133188e60716a5121147ae | 403 | py | Python | SentyectorAPI/SentyectorAPI/wsgi.py | vaibhavarora102/Sentyector | c9023fe38e3517bd39b932a3282f5aebe5e84fbf | [
"MIT"
] | null | null | null | SentyectorAPI/SentyectorAPI/wsgi.py | vaibhavarora102/Sentyector | c9023fe38e3517bd39b932a3282f5aebe5e84fbf | [
"MIT"
] | null | null | null | SentyectorAPI/SentyectorAPI/wsgi.py | vaibhavarora102/Sentyector | c9023fe38e3517bd39b932a3282f5aebe5e84fbf | [
"MIT"
] | 3 | 2021-04-10T06:33:07.000Z | 2021-04-10T12:04:19.000Z | """
WSGI config for SentyectorAPI project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'SentyectorAPI.settings')
application = get_wsgi_application()
| 23.705882 | 78 | 0.791563 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'SentyectorAPI.settings')
application = get_wsgi_application()
| true | true |
f71bddb9bef15222e2b3b6e6039b80e283a19bf6 | 11,278 | py | Python | src/model/dataProcessing/coco_utils.py | monkeypants/CartridgeOCR | a2cdaa72e3839a881118b85f5ff7b4515579004b | [
"MIT"
] | 2 | 2021-07-12T02:37:46.000Z | 2021-12-28T23:03:20.000Z | src/model/dataProcessing/coco_utils.py | monkeypants/CartridgeOCR | a2cdaa72e3839a881118b85f5ff7b4515579004b | [
"MIT"
] | 28 | 2021-12-29T00:51:24.000Z | 2022-03-24T08:03:59.000Z | src/model/dataProcessing/coco_utils.py | monkeypants/CartridgeOCR | a2cdaa72e3839a881118b85f5ff7b4515579004b | [
"MIT"
] | 4 | 2021-09-24T16:13:43.000Z | 2022-03-09T17:52:35.000Z | import copy
import os
import torch
import torch.utils.data
import torchvision
from pycocotools import mask as coco_mask
from pycocotools.coco import COCO
import dataProcessing.transforms as T
import logging
class FilterAndRemapCocoCategories(object):
def __init__(self, categories, remap=True):
self.categories = categories
self.remap = remap
def __call__(self, image, target):
anno = target["annotations"]
anno = [obj for obj in anno if obj["category_id"] in self.categories]
if not self.remap:
target["annotations"] = anno
return image, target
anno = copy.deepcopy(anno)
for obj in anno:
obj["category_id"] = self.categories.index(obj["category_id"])
target["annotations"] = anno
return image, target
def convert_polygons(polygons, height, width):
max_width = 1080
if width > max_width:
logging.warn('invalid width needs normalizing')
polyout = []
for p in polygons:
mult = [width, height] * (len(p) // 2)
assert(len(mult) == len(p))
polyout.append([x * y for x, y in zip(p, mult)])
return polyout
def transform_coco_polygon(segmentations, height, width):
result = []
for polygons in segmentations:
# print('polygons: ',polygons)
polyout = convert_polygons(polygons, height, width)
result.append(polyout)
return result
def convert_coco_poly_to_mask(segmentations, height, width):
masks = []
for polygons in segmentations:
# print('polygons: ',polygons)
polygons = convert_polygons(polygons, height, width)
# print('poly2', polygons)
rles = coco_mask.frPyObjects(polygons, height, width)
mask = coco_mask.decode(rles)
if len(mask.shape) < 3:
mask = mask[..., None]
mask = torch.as_tensor(mask, dtype=torch.uint8)
mask = mask.any(dim=2)
masks.append(mask)
if masks:
masks = torch.stack(masks, dim=0)
else:
masks = torch.zeros((0, height, width), dtype=torch.uint8)
return masks
def transform_coco_annotation(anno, height, width):
anno['segmentation'] = convert_polygons(anno['segmentation'], height, width)
anno['bbox'] = [x * y for (x, y) in zip(anno['bbox'], [width, height, width, height])]
for i in range(2, len(anno['bbox'])):
anno['bbox'][i] += anno['bbox'][i - 2]
class ConvertCocoPolysToMask(object):
def __call__(self, image, target):
w, h = image.size
# print(w,h)
image_id = target["image_id"]
image_id = torch.tensor([image_id])
anno = target["annotations"]
# TODO: now fixed in the conversion script.
# for obj in anno:
# obj['iscrowd']=0
anno = [obj for obj in anno if obj['iscrowd'] == 0]
boxes = [obj["bbox"] for obj in anno]
# guard against no boxes via resizing
boxes = torch.as_tensor(boxes, dtype=torch.float32).reshape(-1, 4)
boxes[:, 2:] += boxes[:, :2]
boxes *= torch.as_tensor([w, h, w, h])
boxes[:, 0::2].clamp_(min=0, max=w)
boxes[:, 1::2].clamp_(min=0, max=h)
classes = [obj["category_id"] for obj in anno]
classes = torch.tensor(classes, dtype=torch.int64)
segmentations = [obj["segmentation"] for obj in anno]
masks = convert_coco_poly_to_mask(segmentations, h, w)
keypoints = None
if anno and "keypoints" in anno[0]:
keypoints = [obj["keypoints"] for obj in anno]
keypoints = torch.as_tensor(keypoints, dtype=torch.float32)
num_keypoints = keypoints.shape[0]
if num_keypoints:
keypoints = keypoints.view(num_keypoints, -1, 3)
keep = (boxes[:, 3] > boxes[:, 1]) & (boxes[:, 2] > boxes[:, 0])
boxes = boxes[keep]
classes = classes[keep]
masks = masks[keep]
if keypoints is not None:
keypoints = keypoints[keep]
target = {}
target["boxes"] = boxes
target["labels"] = classes
target["masks"] = masks
target["image_id"] = image_id
if keypoints is not None:
target["keypoints"] = keypoints
# for conversion to coco api
area = torch.tensor([obj["area"] for obj in anno])
iscrowd = torch.tensor([obj["iscrowd"] for obj in anno])
# iscrowd = torch.tensor([0 for obj in anno])
target["area"] = area
target["iscrowd"] = iscrowd
return image, target
def _coco_remove_images_without_annotations(dataset, cat_list=None):
def _has_only_empty_bbox(anno):
return all(any(o <= 1 for o in obj["bbox"][2:]) for obj in anno)
def _count_visible_keypoints(anno):
return sum(sum(1 for v in ann["keypoints"][2::3] if v > 0) for ann in anno)
min_keypoints_per_image = 10
def _has_valid_annotation(anno):
# if it's empty, there is no annotation
if len(anno) == 0:
return False
# if all boxes have close to zero area, there is no annotation
if _has_only_empty_bbox(anno):
return False
# keypoints task have a slight different critera for considering
# if an annotation is valid
if "keypoints" not in anno[0]:
return True
# for keypoint detection tasks, only consider valid images those
# containing at least min_keypoints_per_image
if _count_visible_keypoints(anno) >= min_keypoints_per_image:
return True
return False
assert isinstance(dataset, torchvision.datasets.CocoDetection)
ids = []
for ds_idx, img_id in enumerate(dataset.ids):
ann_ids = dataset.coco.getAnnIds(imgIds=img_id, iscrowd=None)
anno = dataset.coco.loadAnns(ann_ids)
if cat_list:
anno = [obj for obj in anno if obj["category_id"] in cat_list]
if _has_valid_annotation(anno):
ids.append(ds_idx)
dataset = torch.utils.data.Subset(dataset, ids)
return dataset
def convert_to_coco_api(ds):
coco_ds = COCO()
ann_id = 0
dataset = {'images': [], 'categories': [], 'annotations': []}
categories = set()
for img_idx in range(len(ds)):
# find better way to get target
# targets = ds.get_annotations(img_idx)
img, targets = ds[img_idx]
image_id = targets["image_id"].item()
img_dict = {}
img_dict['id'] = image_id
img_dict['height'] = img.shape[-2]
img_dict['width'] = img.shape[-1]
img_dict['image'] = img
dataset['images'].append(img_dict)
bboxes = targets["boxes"]
bboxes[:, 2:] -= bboxes[:, :2]
bboxes = bboxes.tolist()
labels = targets['labels'].tolist()
areas = targets['area'].tolist()
iscrowd = targets['iscrowd'].tolist()
if 'masks' in targets:
masks = targets['masks']
# make masks Fortran contiguous for coco_mask
masks = masks.permute(0, 2, 1).contiguous().permute(0, 2, 1)
if 'keypoints' in targets:
keypoints = targets['keypoints']
keypoints = keypoints.reshape(keypoints.shape[0], -1).tolist()
num_objs = len(bboxes)
for i in range(num_objs):
ann = {}
ann['image_id'] = image_id
ann['bbox'] = bboxes[i]
ann['category_id'] = labels[i]
categories.add(labels[i])
ann['area'] = areas[i]
ann['iscrowd'] = iscrowd[i]
ann['id'] = ann_id
if 'masks' in targets:
ann["segmentation"] = coco_mask.encode(masks[i].numpy())
if 'keypoints' in targets:
ann['keypoints'] = keypoints[i]
ann['num_keypoints'] = sum(k != 0 for k in keypoints[i][2::3])
dataset['annotations'].append(ann)
ann_id += 1
dataset['categories'] = [{'id': i} for i in sorted(categories)]
coco_ds.dataset = dataset
coco_ds.createIndex()
return coco_ds
def get_coco_api_from_dataset(dataset):
for i in range(10):
if isinstance(dataset, torchvision.datasets.CocoDetection):
break
if isinstance(dataset, torch.utils.data.Subset):
dataset = dataset.dataset
if isinstance(dataset, torchvision.datasets.CocoDetection):
return dataset.coco
return convert_to_coco_api(dataset)
class CocoDetection(torchvision.datasets.CocoDetection):
def __init__(self, img_folder, ann_file, transforms):
super(CocoDetection, self).__init__(img_folder, ann_file)
self._transforms = transforms
def __getitem__(self, idx):
img, target = super(CocoDetection, self).__getitem__(idx)
image_id = self.ids[idx]
# print(image_id)
target = dict(image_id=image_id, annotations=target)
if self._transforms is not None:
img, target = self._transforms(img, target)
return img, target
@staticmethod
def get_coco_api(dataset, transform=False):
for i in range(10):
if isinstance(dataset, torchvision.datasets.CocoDetection):
break
if isinstance(dataset, torch.utils.data.Subset):
dataset = dataset.dataset
if isinstance(dataset, torchvision.datasets.CocoDetection):
if not transform:
return dataset.coco
else:
return dataset.transform_coco_api()
raise Exception("No instance of CocoDetection found")
def transform_coco_api(self):
coco = copy.deepcopy(self.coco)
image_sizes = {}
for img, target in self:
image_sizes[target['image_id'].item()] = img.size()[1:] # TODO: width vs height. Always len 3?
for img in coco.dataset['images']:
(h, w) = image_sizes[img['id']]
img['width'] = w
img['height'] = h
for ann in coco.dataset['annotations']:
id = ann['image_id']
(h, w) = image_sizes[id]
transform_coco_annotation(ann, h, w)
coco.createIndex()
return coco
def get_coco(root, image_set, transforms, mode='instances'):
anno_file_template = "{}_{}2017.json"
PATHS = {
"train": ("train2017", os.path.join("annotations", anno_file_template.format(mode, "train"))),
"val": ("val2017", os.path.join("annotations", anno_file_template.format(mode, "val"))),
# "train": ("val2017", os.path.join("annotations", anno_file_template.format(mode, "val")))
}
t = [ConvertCocoPolysToMask()]
if transforms is not None:
t.append(transforms)
transforms = T.Compose(t)
img_folder, ann_file = PATHS[image_set]
img_folder = os.path.join(root, img_folder)
ann_file = os.path.join(root, ann_file)
dataset = CocoDetection(img_folder, ann_file, transforms=transforms)
if image_set == "train":
dataset = _coco_remove_images_without_annotations(dataset)
# dataset = torch.utils.data.Subset(dataset, [i for i in range(500)])
return dataset
def get_coco_kp(root, image_set, transforms):
return get_coco(root, image_set, transforms, mode="person_keypoints")
| 34.916409 | 107 | 0.605604 | import copy
import os
import torch
import torch.utils.data
import torchvision
from pycocotools import mask as coco_mask
from pycocotools.coco import COCO
import dataProcessing.transforms as T
import logging
class FilterAndRemapCocoCategories(object):
def __init__(self, categories, remap=True):
self.categories = categories
self.remap = remap
def __call__(self, image, target):
anno = target["annotations"]
anno = [obj for obj in anno if obj["category_id"] in self.categories]
if not self.remap:
target["annotations"] = anno
return image, target
anno = copy.deepcopy(anno)
for obj in anno:
obj["category_id"] = self.categories.index(obj["category_id"])
target["annotations"] = anno
return image, target
def convert_polygons(polygons, height, width):
max_width = 1080
if width > max_width:
logging.warn('invalid width needs normalizing')
polyout = []
for p in polygons:
mult = [width, height] * (len(p) // 2)
assert(len(mult) == len(p))
polyout.append([x * y for x, y in zip(p, mult)])
return polyout
def transform_coco_polygon(segmentations, height, width):
result = []
for polygons in segmentations:
polyout = convert_polygons(polygons, height, width)
result.append(polyout)
return result
def convert_coco_poly_to_mask(segmentations, height, width):
masks = []
for polygons in segmentations:
polygons = convert_polygons(polygons, height, width)
rles = coco_mask.frPyObjects(polygons, height, width)
mask = coco_mask.decode(rles)
if len(mask.shape) < 3:
mask = mask[..., None]
mask = torch.as_tensor(mask, dtype=torch.uint8)
mask = mask.any(dim=2)
masks.append(mask)
if masks:
masks = torch.stack(masks, dim=0)
else:
masks = torch.zeros((0, height, width), dtype=torch.uint8)
return masks
def transform_coco_annotation(anno, height, width):
anno['segmentation'] = convert_polygons(anno['segmentation'], height, width)
anno['bbox'] = [x * y for (x, y) in zip(anno['bbox'], [width, height, width, height])]
for i in range(2, len(anno['bbox'])):
anno['bbox'][i] += anno['bbox'][i - 2]
class ConvertCocoPolysToMask(object):
def __call__(self, image, target):
w, h = image.size
image_id = target["image_id"]
image_id = torch.tensor([image_id])
anno = target["annotations"]
anno = [obj for obj in anno if obj['iscrowd'] == 0]
boxes = [obj["bbox"] for obj in anno]
boxes = torch.as_tensor(boxes, dtype=torch.float32).reshape(-1, 4)
boxes[:, 2:] += boxes[:, :2]
boxes *= torch.as_tensor([w, h, w, h])
boxes[:, 0::2].clamp_(min=0, max=w)
boxes[:, 1::2].clamp_(min=0, max=h)
classes = [obj["category_id"] for obj in anno]
classes = torch.tensor(classes, dtype=torch.int64)
segmentations = [obj["segmentation"] for obj in anno]
masks = convert_coco_poly_to_mask(segmentations, h, w)
keypoints = None
if anno and "keypoints" in anno[0]:
keypoints = [obj["keypoints"] for obj in anno]
keypoints = torch.as_tensor(keypoints, dtype=torch.float32)
num_keypoints = keypoints.shape[0]
if num_keypoints:
keypoints = keypoints.view(num_keypoints, -1, 3)
keep = (boxes[:, 3] > boxes[:, 1]) & (boxes[:, 2] > boxes[:, 0])
boxes = boxes[keep]
classes = classes[keep]
masks = masks[keep]
if keypoints is not None:
keypoints = keypoints[keep]
target = {}
target["boxes"] = boxes
target["labels"] = classes
target["masks"] = masks
target["image_id"] = image_id
if keypoints is not None:
target["keypoints"] = keypoints
area = torch.tensor([obj["area"] for obj in anno])
iscrowd = torch.tensor([obj["iscrowd"] for obj in anno])
target["area"] = area
target["iscrowd"] = iscrowd
return image, target
def _coco_remove_images_without_annotations(dataset, cat_list=None):
def _has_only_empty_bbox(anno):
return all(any(o <= 1 for o in obj["bbox"][2:]) for obj in anno)
def _count_visible_keypoints(anno):
return sum(sum(1 for v in ann["keypoints"][2::3] if v > 0) for ann in anno)
min_keypoints_per_image = 10
def _has_valid_annotation(anno):
if len(anno) == 0:
return False
# if all boxes have close to zero area, there is no annotation
if _has_only_empty_bbox(anno):
return False
# keypoints task have a slight different critera for considering
# if an annotation is valid
if "keypoints" not in anno[0]:
return True
# for keypoint detection tasks, only consider valid images those
# containing at least min_keypoints_per_image
if _count_visible_keypoints(anno) >= min_keypoints_per_image:
return True
return False
assert isinstance(dataset, torchvision.datasets.CocoDetection)
ids = []
for ds_idx, img_id in enumerate(dataset.ids):
ann_ids = dataset.coco.getAnnIds(imgIds=img_id, iscrowd=None)
anno = dataset.coco.loadAnns(ann_ids)
if cat_list:
anno = [obj for obj in anno if obj["category_id"] in cat_list]
if _has_valid_annotation(anno):
ids.append(ds_idx)
dataset = torch.utils.data.Subset(dataset, ids)
return dataset
def convert_to_coco_api(ds):
coco_ds = COCO()
ann_id = 0
dataset = {'images': [], 'categories': [], 'annotations': []}
categories = set()
for img_idx in range(len(ds)):
# find better way to get target
# targets = ds.get_annotations(img_idx)
img, targets = ds[img_idx]
image_id = targets["image_id"].item()
img_dict = {}
img_dict['id'] = image_id
img_dict['height'] = img.shape[-2]
img_dict['width'] = img.shape[-1]
img_dict['image'] = img
dataset['images'].append(img_dict)
bboxes = targets["boxes"]
bboxes[:, 2:] -= bboxes[:, :2]
bboxes = bboxes.tolist()
labels = targets['labels'].tolist()
areas = targets['area'].tolist()
iscrowd = targets['iscrowd'].tolist()
if 'masks' in targets:
masks = targets['masks']
# make masks Fortran contiguous for coco_mask
masks = masks.permute(0, 2, 1).contiguous().permute(0, 2, 1)
if 'keypoints' in targets:
keypoints = targets['keypoints']
keypoints = keypoints.reshape(keypoints.shape[0], -1).tolist()
num_objs = len(bboxes)
for i in range(num_objs):
ann = {}
ann['image_id'] = image_id
ann['bbox'] = bboxes[i]
ann['category_id'] = labels[i]
categories.add(labels[i])
ann['area'] = areas[i]
ann['iscrowd'] = iscrowd[i]
ann['id'] = ann_id
if 'masks' in targets:
ann["segmentation"] = coco_mask.encode(masks[i].numpy())
if 'keypoints' in targets:
ann['keypoints'] = keypoints[i]
ann['num_keypoints'] = sum(k != 0 for k in keypoints[i][2::3])
dataset['annotations'].append(ann)
ann_id += 1
dataset['categories'] = [{'id': i} for i in sorted(categories)]
coco_ds.dataset = dataset
coco_ds.createIndex()
return coco_ds
def get_coco_api_from_dataset(dataset):
for i in range(10):
if isinstance(dataset, torchvision.datasets.CocoDetection):
break
if isinstance(dataset, torch.utils.data.Subset):
dataset = dataset.dataset
if isinstance(dataset, torchvision.datasets.CocoDetection):
return dataset.coco
return convert_to_coco_api(dataset)
class CocoDetection(torchvision.datasets.CocoDetection):
def __init__(self, img_folder, ann_file, transforms):
super(CocoDetection, self).__init__(img_folder, ann_file)
self._transforms = transforms
def __getitem__(self, idx):
img, target = super(CocoDetection, self).__getitem__(idx)
image_id = self.ids[idx]
# print(image_id)
target = dict(image_id=image_id, annotations=target)
if self._transforms is not None:
img, target = self._transforms(img, target)
return img, target
@staticmethod
def get_coco_api(dataset, transform=False):
for i in range(10):
if isinstance(dataset, torchvision.datasets.CocoDetection):
break
if isinstance(dataset, torch.utils.data.Subset):
dataset = dataset.dataset
if isinstance(dataset, torchvision.datasets.CocoDetection):
if not transform:
return dataset.coco
else:
return dataset.transform_coco_api()
raise Exception("No instance of CocoDetection found")
def transform_coco_api(self):
coco = copy.deepcopy(self.coco)
image_sizes = {}
for img, target in self:
image_sizes[target['image_id'].item()] = img.size()[1:] # TODO: width vs height. Always len 3?
for img in coco.dataset['images']:
(h, w) = image_sizes[img['id']]
img['width'] = w
img['height'] = h
for ann in coco.dataset['annotations']:
id = ann['image_id']
(h, w) = image_sizes[id]
transform_coco_annotation(ann, h, w)
coco.createIndex()
return coco
def get_coco(root, image_set, transforms, mode='instances'):
anno_file_template = "{}_{}2017.json"
PATHS = {
"train": ("train2017", os.path.join("annotations", anno_file_template.format(mode, "train"))),
"val": ("val2017", os.path.join("annotations", anno_file_template.format(mode, "val"))),
# "train": ("val2017", os.path.join("annotations", anno_file_template.format(mode, "val")))
}
t = [ConvertCocoPolysToMask()]
if transforms is not None:
t.append(transforms)
transforms = T.Compose(t)
img_folder, ann_file = PATHS[image_set]
img_folder = os.path.join(root, img_folder)
ann_file = os.path.join(root, ann_file)
dataset = CocoDetection(img_folder, ann_file, transforms=transforms)
if image_set == "train":
dataset = _coco_remove_images_without_annotations(dataset)
# dataset = torch.utils.data.Subset(dataset, [i for i in range(500)])
return dataset
def get_coco_kp(root, image_set, transforms):
return get_coco(root, image_set, transforms, mode="person_keypoints")
| true | true |
f71bdfb4494b0ea7e8661dce26911937bbed2de0 | 1,952 | py | Python | facility_management/facility_management/doctype/fm_dashboard/fm_dashboard.py | odoochain/facility_management | 545146db4e58e90311934a9d39c77def2d2a3e70 | [
"MIT"
] | 13 | 2020-06-23T23:44:16.000Z | 2022-03-19T14:40:49.000Z | facility_management/facility_management/doctype/fm_dashboard/fm_dashboard.py | pazari/fmt | 649618d47cd5cdefce93b5dc7efe5c25c299ad9c | [
"MIT"
] | 2 | 2021-06-04T06:18:09.000Z | 2021-06-06T08:41:36.000Z | facility_management/facility_management/doctype/fm_dashboard/fm_dashboard.py | pazari/fmt | 649618d47cd5cdefce93b5dc7efe5c25c299ad9c | [
"MIT"
] | 18 | 2020-02-18T10:57:13.000Z | 2022-01-26T09:01:21.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2020, 9T9IT and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class FMDashboard(Document):
def make_outstanding_balances(self):
"""
Make outstanding balances for display
:return:
"""
self.outstanding_balances = None
outstanding_balances = _get_outstanding_balances(_get_properties(self.real_estate_property))
for outstanding_balance in outstanding_balances:
self.append('outstanding_balances', {
'property_name': outstanding_balance.get('property_name'),
'sales_invoice': outstanding_balance.get('sales_invoice'),
'outstanding_amount': outstanding_balance.get('outstanding_amount')
})
def _get_properties(real_estate_property):
return list(map(lambda x: x['name'], frappe.get_all('Property', {'property_location': real_estate_property})))
def _get_outstanding_balances(filter_properties):
def make_data(balance):
property_name = _get_property_name(balance.get('pm_tenant_renting'))
return {
'property_name': property_name,
'sales_invoice': balance.get('name'),
'outstanding_amount': balance.get('outstanding_amount')
}
outstanding = frappe.db.sql("""
SELECT
si.name,
si.pm_tenant_renting,
si.outstanding_amount,
tr.property
FROM `tabSales Invoice` si
LEFT JOIN `tabTenant Renting` tr ON si.pm_tenant_renting = tr.name
WHERE si.docstatus = 1
AND si.outstanding_amount > 0
AND si.pm_tenant_renting != ''
""", as_dict=True)
outstanding = filter(lambda x: x['property'] in filter_properties, outstanding)
return list(map(make_data, outstanding))
def _get_property_name(tenant_renting):
data = frappe.db.sql("""
SELECT p.title
FROM `tabTenant Renting` tr
JOIN `tabProperty` p
ON tr.property = p.name
WHERE tr.name = %s
""", tenant_renting, as_dict=True)
return data[0]['title'] if data else None
| 29.134328 | 111 | 0.749488 |
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class FMDashboard(Document):
def make_outstanding_balances(self):
self.outstanding_balances = None
outstanding_balances = _get_outstanding_balances(_get_properties(self.real_estate_property))
for outstanding_balance in outstanding_balances:
self.append('outstanding_balances', {
'property_name': outstanding_balance.get('property_name'),
'sales_invoice': outstanding_balance.get('sales_invoice'),
'outstanding_amount': outstanding_balance.get('outstanding_amount')
})
def _get_properties(real_estate_property):
return list(map(lambda x: x['name'], frappe.get_all('Property', {'property_location': real_estate_property})))
def _get_outstanding_balances(filter_properties):
def make_data(balance):
property_name = _get_property_name(balance.get('pm_tenant_renting'))
return {
'property_name': property_name,
'sales_invoice': balance.get('name'),
'outstanding_amount': balance.get('outstanding_amount')
}
outstanding = frappe.db.sql("""
SELECT
si.name,
si.pm_tenant_renting,
si.outstanding_amount,
tr.property
FROM `tabSales Invoice` si
LEFT JOIN `tabTenant Renting` tr ON si.pm_tenant_renting = tr.name
WHERE si.docstatus = 1
AND si.outstanding_amount > 0
AND si.pm_tenant_renting != ''
""", as_dict=True)
outstanding = filter(lambda x: x['property'] in filter_properties, outstanding)
return list(map(make_data, outstanding))
def _get_property_name(tenant_renting):
data = frappe.db.sql("""
SELECT p.title
FROM `tabTenant Renting` tr
JOIN `tabProperty` p
ON tr.property = p.name
WHERE tr.name = %s
""", tenant_renting, as_dict=True)
return data[0]['title'] if data else None
| true | true |
f71be05c5a13840628f874af73f7d2dd87d4c2db | 765 | py | Python | ImagePoster/urls.py | AllyxMiko/ImagePoster | 70b95a74b3ddb639e658f6b780dae5351947d6d6 | [
"MIT"
] | null | null | null | ImagePoster/urls.py | AllyxMiko/ImagePoster | 70b95a74b3ddb639e658f6b780dae5351947d6d6 | [
"MIT"
] | null | null | null | ImagePoster/urls.py | AllyxMiko/ImagePoster | 70b95a74b3ddb639e658f6b780dae5351947d6d6 | [
"MIT"
] | null | null | null | """ImagePoster URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from django.urls.conf import include
urlpatterns = [
path('images/', include('images.urls')),
]
| 34.772727 | 77 | 0.709804 | from django.urls import path
from django.urls.conf import include
urlpatterns = [
path('images/', include('images.urls')),
]
| true | true |
f71be08bb68624aa77a36c61980a89daf8d0c77e | 1,386 | py | Python | setup.py | audiosamsung/brentvollebregt | 077bbb8601b6c179d7b750a7e31ee141bd5b0644 | [
"MIT"
] | 1 | 2021-11-09T10:24:40.000Z | 2021-11-09T10:24:40.000Z | setup.py | audiosamsung/brentvollebregt | 077bbb8601b6c179d7b750a7e31ee141bd5b0644 | [
"MIT"
] | null | null | null | setup.py | audiosamsung/brentvollebregt | 077bbb8601b6c179d7b750a7e31ee141bd5b0644 | [
"MIT"
] | null | null | null | from io import open
from setuptools import setup
from auto_py_to_exe import __version__ as version
setup(
name='auto-py-to-exe',
version=version,
url='https://github.com/brentvollebregt/auto-py-to-exe',
license='MIT',
author='Brent Vollebregt',
author_email='brent@nitratine.net',
description='Converts .py to .exe using a simple graphical interface.',
long_description=''.join(open('README.md', encoding='utf-8').readlines()),
long_description_content_type='text/markdown',
keywords=['gui', 'executable'],
packages=['auto_py_to_exe'],
include_package_data=True,
install_requires=['Eel==0.12.4', 'pyinstaller>=4.1'],
python_requires='>=3.5',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
],
entry_points={
'console_scripts': [
'autopytoexe=auto_py_to_exe.__main__:run',
'auto-py-to-exe=auto_py_to_exe.__main__:run'
],
},
)
| 35.538462 | 78 | 0.629149 | from io import open
from setuptools import setup
from auto_py_to_exe import __version__ as version
setup(
name='auto-py-to-exe',
version=version,
url='https://github.com/brentvollebregt/auto-py-to-exe',
license='MIT',
author='Brent Vollebregt',
author_email='brent@nitratine.net',
description='Converts .py to .exe using a simple graphical interface.',
long_description=''.join(open('README.md', encoding='utf-8').readlines()),
long_description_content_type='text/markdown',
keywords=['gui', 'executable'],
packages=['auto_py_to_exe'],
include_package_data=True,
install_requires=['Eel==0.12.4', 'pyinstaller>=4.1'],
python_requires='>=3.5',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
],
entry_points={
'console_scripts': [
'autopytoexe=auto_py_to_exe.__main__:run',
'auto-py-to-exe=auto_py_to_exe.__main__:run'
],
},
)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.