hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f737936a6a56fbaeef9388d0c2890f297b18d367 | 925 | py | Python | kubernetes/test/test_v1_projected_volume_source.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_projected_volume_source.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_projected_volume_source.py | jraby/kubernetes-client-python | e6e7b710d0b15fbde686bc9dccf00da5951bef84 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_projected_volume_source import V1ProjectedVolumeSource
class TestV1ProjectedVolumeSource(unittest.TestCase):
""" V1ProjectedVolumeSource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ProjectedVolumeSource(self):
"""
Test V1ProjectedVolumeSource
"""
model = kubernetes.client.models.v1_projected_volume_source.V1ProjectedVolumeSource()
if __name__ == '__main__':
unittest.main()
| 21.511628 | 105 | 0.72973 |
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_projected_volume_source import V1ProjectedVolumeSource
class TestV1ProjectedVolumeSource(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testV1ProjectedVolumeSource(self):
model = kubernetes.client.models.v1_projected_volume_source.V1ProjectedVolumeSource()
if __name__ == '__main__':
unittest.main()
| true | true |
f73794471a1d5903494c9844b55e13a364aceb5f | 3,491 | py | Python | deepchem/molnet/__init__.py | hsjang001205/deepchem | 02fce35729826b1ef12a1cfa6519b491510217be | [
"MIT"
] | 1 | 2021-03-24T09:28:05.000Z | 2021-03-24T09:28:05.000Z | deepchem/molnet/__init__.py | hsjang001205/deepchem | 02fce35729826b1ef12a1cfa6519b491510217be | [
"MIT"
] | null | null | null | deepchem/molnet/__init__.py | hsjang001205/deepchem | 02fce35729826b1ef12a1cfa6519b491510217be | [
"MIT"
] | null | null | null | from deepchem.molnet.load_function.bace_datasets import load_bace_classification, load_bace_regression
from deepchem.molnet.load_function.bbbc_datasets import load_bbbc001, load_bbbc002
from deepchem.molnet.load_function.bbbp_datasets import load_bbbp
from deepchem.molnet.load_function.cell_counting_datasets import load_cell_counting
from deepchem.molnet.load_function.chembl_datasets import load_chembl
from deepchem.molnet.load_function.clearance_datasets import load_clearance
from deepchem.molnet.load_function.clintox_datasets import load_clintox
from deepchem.molnet.load_function.delaney_datasets import load_delaney
from deepchem.molnet.load_function.hiv_datasets import load_hiv
from deepchem.molnet.load_function.hopv_datasets import load_hopv
from deepchem.molnet.load_function.kaggle_datasets import load_kaggle
from deepchem.molnet.load_function.lipo_datasets import load_lipo
from deepchem.molnet.load_function.muv_datasets import load_muv
from deepchem.molnet.load_function.nci_datasets import load_nci
from deepchem.molnet.load_function.pcba_datasets import load_pcba, load_pcba_146, load_pcba_2475
from deepchem.molnet.load_function.pdbbind_datasets import load_pdbbind_grid, load_pdbbind, load_pdbbind_from_dir
from deepchem.molnet.load_function.ppb_datasets import load_ppb
from deepchem.molnet.load_function.qm7_datasets import load_qm7
from deepchem.molnet.load_function.qm7_datasets import load_qm7_from_mat, load_qm7b_from_mat
from deepchem.molnet.load_function.qm8_datasets import load_qm8
from deepchem.molnet.load_function.qm9_datasets import load_qm9
from deepchem.molnet.load_function.sampl_datasets import load_sampl
from deepchem.molnet.load_function.sider_datasets import load_sider
from deepchem.molnet.load_function.sweetlead_datasets import load_sweet
from deepchem.molnet.load_function.tox21_datasets import load_tox21
from deepchem.molnet.load_function.toxcast_datasets import load_toxcast
from deepchem.molnet.load_function.uspto_datasets import load_uspto
from deepchem.molnet.load_function.uv_datasets import load_uv
from deepchem.molnet.load_function.factors_datasets import load_factors
from deepchem.molnet.load_function.kinase_datasets import load_kinase
from deepchem.molnet.load_function.thermosol_datasets import load_thermosol
from deepchem.molnet.load_function.hppb_datasets import load_hppb
from deepchem.molnet.load_function.chembl25_datasets import load_chembl25
from deepchem.molnet.load_function.zinc15_datasets import load_zinc15
from deepchem.molnet.load_function.material_datasets.load_bandgap import load_bandgap
from deepchem.molnet.load_function.material_datasets.load_perovskite import load_perovskite
from deepchem.molnet.load_function.material_datasets.load_mp_formation_energy import load_mp_formation_energy
from deepchem.molnet.load_function.material_datasets.load_mp_metallicity import load_mp_metallicity
from deepchem.molnet.load_function.molnet_loader import featurizers, splitters, transformers, TransformerGenerator, _MolnetLoader
from deepchem.molnet.dnasim import simulate_motif_density_localization
from deepchem.molnet.dnasim import simulate_motif_counting
from deepchem.molnet.dnasim import simple_motif_embedding
from deepchem.molnet.dnasim import motif_density
from deepchem.molnet.dnasim import simulate_single_motif_detection
from deepchem.molnet.run_benchmark import run_benchmark
#from deepchem.molnet.run_benchmark_low_data import run_benchmark_low_data
from deepchem.molnet import run_benchmark_models
| 68.45098 | 129 | 0.899742 | from deepchem.molnet.load_function.bace_datasets import load_bace_classification, load_bace_regression
from deepchem.molnet.load_function.bbbc_datasets import load_bbbc001, load_bbbc002
from deepchem.molnet.load_function.bbbp_datasets import load_bbbp
from deepchem.molnet.load_function.cell_counting_datasets import load_cell_counting
from deepchem.molnet.load_function.chembl_datasets import load_chembl
from deepchem.molnet.load_function.clearance_datasets import load_clearance
from deepchem.molnet.load_function.clintox_datasets import load_clintox
from deepchem.molnet.load_function.delaney_datasets import load_delaney
from deepchem.molnet.load_function.hiv_datasets import load_hiv
from deepchem.molnet.load_function.hopv_datasets import load_hopv
from deepchem.molnet.load_function.kaggle_datasets import load_kaggle
from deepchem.molnet.load_function.lipo_datasets import load_lipo
from deepchem.molnet.load_function.muv_datasets import load_muv
from deepchem.molnet.load_function.nci_datasets import load_nci
from deepchem.molnet.load_function.pcba_datasets import load_pcba, load_pcba_146, load_pcba_2475
from deepchem.molnet.load_function.pdbbind_datasets import load_pdbbind_grid, load_pdbbind, load_pdbbind_from_dir
from deepchem.molnet.load_function.ppb_datasets import load_ppb
from deepchem.molnet.load_function.qm7_datasets import load_qm7
from deepchem.molnet.load_function.qm7_datasets import load_qm7_from_mat, load_qm7b_from_mat
from deepchem.molnet.load_function.qm8_datasets import load_qm8
from deepchem.molnet.load_function.qm9_datasets import load_qm9
from deepchem.molnet.load_function.sampl_datasets import load_sampl
from deepchem.molnet.load_function.sider_datasets import load_sider
from deepchem.molnet.load_function.sweetlead_datasets import load_sweet
from deepchem.molnet.load_function.tox21_datasets import load_tox21
from deepchem.molnet.load_function.toxcast_datasets import load_toxcast
from deepchem.molnet.load_function.uspto_datasets import load_uspto
from deepchem.molnet.load_function.uv_datasets import load_uv
from deepchem.molnet.load_function.factors_datasets import load_factors
from deepchem.molnet.load_function.kinase_datasets import load_kinase
from deepchem.molnet.load_function.thermosol_datasets import load_thermosol
from deepchem.molnet.load_function.hppb_datasets import load_hppb
from deepchem.molnet.load_function.chembl25_datasets import load_chembl25
from deepchem.molnet.load_function.zinc15_datasets import load_zinc15
from deepchem.molnet.load_function.material_datasets.load_bandgap import load_bandgap
from deepchem.molnet.load_function.material_datasets.load_perovskite import load_perovskite
from deepchem.molnet.load_function.material_datasets.load_mp_formation_energy import load_mp_formation_energy
from deepchem.molnet.load_function.material_datasets.load_mp_metallicity import load_mp_metallicity
from deepchem.molnet.load_function.molnet_loader import featurizers, splitters, transformers, TransformerGenerator, _MolnetLoader
from deepchem.molnet.dnasim import simulate_motif_density_localization
from deepchem.molnet.dnasim import simulate_motif_counting
from deepchem.molnet.dnasim import simple_motif_embedding
from deepchem.molnet.dnasim import motif_density
from deepchem.molnet.dnasim import simulate_single_motif_detection
from deepchem.molnet.run_benchmark import run_benchmark
from deepchem.molnet import run_benchmark_models
| true | true |
f73794a235f314ddb7e307e688f102290a6a5311 | 654 | py | Python | ws/handler/event/enum/enable.py | fabaff/automate-ws | a9442f287692787e3f253e1ff23758bec8f3902e | [
"MIT"
] | null | null | null | ws/handler/event/enum/enable.py | fabaff/automate-ws | a9442f287692787e3f253e1ff23758bec8f3902e | [
"MIT"
] | 1 | 2021-12-21T11:34:47.000Z | 2021-12-21T11:34:47.000Z | ws/handler/event/enum/enable.py | fabaff/automate-ws | a9442f287692787e3f253e1ff23758bec8f3902e | [
"MIT"
] | 1 | 2021-12-21T10:10:13.000Z | 2021-12-21T10:10:13.000Z | import home
from ws.handler.event.enum import Handler as Parent
class Handler(Parent):
KLASS = home.event.enable.Event
TEMPLATE = "event/enum.html"
LABEL = "Detach logic is"
ENABLED = "enabled"
DISABLED = "disabled"
def _get_str(self, e):
if e == home.event.enable.Event.On:
return self.ENABLED
elif e == home.event.enable.Event.Off:
return self.DISABLED
return e
def get_icon(self, e):
if e == home.event.enable.Event.On:
return "fas fa-toggle-on"
elif e == home.event.enable.Event.Off:
return "fas fa-toggle-off"
return e
| 24.222222 | 51 | 0.597859 | import home
from ws.handler.event.enum import Handler as Parent
class Handler(Parent):
KLASS = home.event.enable.Event
TEMPLATE = "event/enum.html"
LABEL = "Detach logic is"
ENABLED = "enabled"
DISABLED = "disabled"
def _get_str(self, e):
if e == home.event.enable.Event.On:
return self.ENABLED
elif e == home.event.enable.Event.Off:
return self.DISABLED
return e
def get_icon(self, e):
if e == home.event.enable.Event.On:
return "fas fa-toggle-on"
elif e == home.event.enable.Event.Off:
return "fas fa-toggle-off"
return e
| true | true |
f737954b6b6e59fd4ada184d6b50c10829ed9110 | 403 | py | Python | Exercicios em python/ex52.py | GabrielSantos25/Python | 208eec0144587aa4e0aa7fa00da29ffa0478eac8 | [
"MIT"
] | null | null | null | Exercicios em python/ex52.py | GabrielSantos25/Python | 208eec0144587aa4e0aa7fa00da29ffa0478eac8 | [
"MIT"
] | null | null | null | Exercicios em python/ex52.py | GabrielSantos25/Python | 208eec0144587aa4e0aa7fa00da29ffa0478eac8 | [
"MIT"
] | null | null | null | #Identificar números primos!
n = int(input('Digite um número: '))
tot = 0
for c in range(1, n + 1):
if n % c == 0:
print('\033[33m', end='')
tot += 1
else:
print('\033[31m', end='')
print('{} '.format(c), end='')
print('\n\033[mO número {} foi divisível {} vezes'.format(n, tot))
if tot == 2:
print('Número primo!')
else:
print('Não é número primo!')
| 25.1875 | 66 | 0.523573 |
n = int(input('Digite um número: '))
tot = 0
for c in range(1, n + 1):
if n % c == 0:
print('\033[33m', end='')
tot += 1
else:
print('\033[31m', end='')
print('{} '.format(c), end='')
print('\n\033[mO número {} foi divisível {} vezes'.format(n, tot))
if tot == 2:
print('Número primo!')
else:
print('Não é número primo!')
| true | true |
f73795a45cbd4951c7250875958b8d9bd03156db | 913 | py | Python | noloco/constants.py | noloco-io/python-sdk | 0ae6c8ff026abb60fdd2fc6eaf67f08f786f15a2 | [
"MIT"
] | null | null | null | noloco/constants.py | noloco-io/python-sdk | 0ae6c8ff026abb60fdd2fc6eaf67f08f786f15a2 | [
"MIT"
] | null | null | null | noloco/constants.py | noloco-io/python-sdk | 0ae6c8ff026abb60fdd2fc6eaf67f08f786f15a2 | [
"MIT"
] | null | null | null | ###############################################################################
# Field Types
###############################################################################
TEXT = 'TEXT'
DATE = 'DATE'
INTEGER = 'INTEGER'
DECIMAL = 'DECIMAL'
DURATION = 'DURATION'
BOOLEAN = 'BOOLEAN'
SINGLE_OPTION = 'SINGLE_OPTION'
MULTIPLE_OPTION = 'MULTIPLE_OPTION'
###############################################################################
# Relationship Types
###############################################################################
ONE_TO_ONE = 'ONE_TO_ONE'
ONE_TO_MANY = 'ONE_TO_MANY'
MANY_TO_ONE = 'MANY_TO_ONE'
MANY_TO_MANY = 'MANY_TO_MANY'
###############################################################################
# GraphQL Error Codes
###############################################################################
GRAPHQL_VALIDATION_FAILED = 'GRAPHQL_VALIDATION_FAILED'
| 17.226415 | 79 | 0.341731 | true | true | |
f73795a96479817ec6ec09a28f1d10ecd495825d | 7,291 | py | Python | outrun/tests/test_filesystem/test_caching/test_service.py | Jacke/outrun | c67779b4c8c3f1095e84158b10a5307a443936a2 | [
"Apache-2.0"
] | 3,070 | 2020-07-14T21:43:05.000Z | 2022-03-30T05:10:35.000Z | outrun/tests/test_filesystem/test_caching/test_service.py | Jacke/outrun | c67779b4c8c3f1095e84158b10a5307a443936a2 | [
"Apache-2.0"
] | 17 | 2020-07-19T21:46:13.000Z | 2021-12-27T16:18:38.000Z | outrun/tests/test_filesystem/test_caching/test_service.py | Jacke/outrun | c67779b4c8c3f1095e84158b10a5307a443936a2 | [
"Apache-2.0"
] | 61 | 2020-07-23T23:34:00.000Z | 2022-02-13T01:28:25.000Z | import os
from pathlib import Path
import shutil
import stat
from unittest import mock
import pytest
from outrun.filesystem.common import Attributes
from outrun.filesystem.caching.common import Metadata
from outrun.filesystem.caching.prefetching import PrefetchSuggestion
from outrun.filesystem.caching.service import LocalCacheService
@pytest.fixture
def service():
return LocalCacheService()
def test_get_metadata_error(service, tmp_path):
meta = service.get_metadata(str(tmp_path / "nonexistent"))
assert meta.attr is None
assert meta.link is None
assert isinstance(meta.error, FileNotFoundError)
def test_get_metadata_dir(service, tmp_path):
(tmp_path / "dir").mkdir()
meta = service.get_metadata(str(tmp_path / "dir"))
assert meta.error is None
assert meta.link is None
assert stat.S_ISDIR(meta.attr.st_mode)
def test_get_metadata_file(service, tmp_path):
(tmp_path / "file").write_text("")
meta = service.get_metadata(str(tmp_path / "file"))
assert meta.error is None
assert meta.link is None
assert stat.S_ISREG(meta.attr.st_mode)
def test_get_metadata_symlink(service, tmp_path):
os.symlink(tmp_path / "nonexistent", tmp_path / "link")
meta = service.get_metadata(str(tmp_path / "link"))
assert meta.error is None
assert meta.link == str(tmp_path / "nonexistent")
assert stat.S_ISLNK(meta.attr.st_mode)
def test_changed_metadata(service, tmp_path):
(tmp_path / "a").mkdir()
(tmp_path / "b").mkdir()
meta = {
str(tmp_path / "a"): service.get_metadata(str(tmp_path / "a")),
str(tmp_path / "b"): service.get_metadata(str(tmp_path / "b")),
str(tmp_path / "c"): service.get_metadata(str(tmp_path / "c")),
}
# No changes yet
assert list(service.get_changed_metadata(meta).keys()) == []
# Make changes to metadata
os.utime(tmp_path / "a", (0, 0))
os.makedirs(tmp_path / "c")
# Expect to receive changes
changed_meta = service.get_changed_metadata(meta)
assert list(changed_meta.keys()) == [
str(tmp_path / "a"),
str(tmp_path / "c"),
]
assert changed_meta[str(tmp_path / "a")] == service.get_metadata(tmp_path / "a")
assert changed_meta[str(tmp_path / "c")] == service.get_metadata(tmp_path / "c")
def test_access_time_changes_ignored(service):
metadata = service.get_metadata("/")
cached_metadata = {"/": metadata}
with mock.patch(
"outrun.filesystem.caching.service.LocalCacheService.get_metadata"
) as mock_meta:
new_attr = Attributes(**metadata.attr.__dict__)
new_attr.st_atime = 0.0
new_metadata = Metadata(attr=new_attr)
mock_meta.return_value = new_metadata
assert service.get_changed_metadata(cached_metadata) == {}
def test_readfile(service, tmp_path):
(tmp_path / "file").write_text("abc")
contents = service.readfile(str(tmp_path / "file"))
assert contents.data == b"abc"
(tmp_path / "file").write_text("def")
new_contents = service.readfile(str(tmp_path / "file"))
assert new_contents.data == b"def"
assert contents.checksum != new_contents.checksum
def test_readfile_conditional(service, tmp_path):
(tmp_path / "file").write_text("abc")
contents = service.readfile_conditional(str(tmp_path / "file"), "")
assert contents.data == b"abc"
new_contents = service.readfile_conditional(
str(tmp_path / "file"), contents.checksum
)
assert new_contents is None
(tmp_path / "file").write_text("def")
new_contents = service.readfile_conditional(
str(tmp_path / "file"), contents.checksum
)
assert new_contents.data == b"def"
assert contents.checksum != new_contents.checksum
def test_machine_id_consistent(service):
machine_id_1 = service.get_app_specific_machine_id()
machine_id_2 = service.get_app_specific_machine_id()
assert machine_id_1 == machine_id_2
def test_original_machine_id_not_being_exposed(service):
machine_id = service.get_app_specific_machine_id()
assert machine_id.strip() != Path("/etc/machine-id").read_text().strip()
def test_get_metadata_prefetch_symlink(service, tmp_path):
os.symlink("foo", tmp_path / "link")
metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert metadata.link is not None
assert len(prefetches) == 1
assert prefetches[0].path == str(tmp_path / "foo")
assert isinstance(prefetches[0].metadata.error, FileNotFoundError)
def test_get_metadata_prefetch_symlink_with_previously_fetched_target(
service, tmp_path
):
os.symlink("foo", tmp_path / "link")
service.get_metadata(str(tmp_path / "foo"))
_metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert len(prefetches) == 0
def test_readfile_prefetch_executable(service):
sh_path = shutil.which("ssh")
_metadata, prefetches = service.readfile_prefetch(sh_path)
assert len(prefetches) > 0
assert all(".so" in p.path for p in prefetches)
def test_readfile_prefetch_executable_with_previously_fetched_contents():
sh_path = shutil.which("ssh")
service = LocalCacheService()
_metadata, prefetches = service.readfile_prefetch(sh_path)
assert any(p.contents for p in prefetches)
service = LocalCacheService()
service.mark_previously_fetched_contents([p.path for p in prefetches])
_metadata, prefetches = service.readfile_prefetch(sh_path)
assert not any(p.contents for p in prefetches)
def test_prefetch_inside_prefetchable_paths(service, tmp_path):
os.symlink("foo", tmp_path / "link")
service.set_prefetchable_paths([str(tmp_path)])
_metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert len(prefetches) != 0
def test_prefetch_outside_prefetchable_paths(service, tmp_path):
os.symlink("foo", tmp_path / "link")
service.set_prefetchable_paths(["/nonexistent"])
_metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert len(prefetches) == 0
def test_get_metadata_prefetch_failure_handling(service, tmp_path):
with mock.patch(
"outrun.filesystem.caching.prefetching.file_access"
) as mock_prefetch:
mock_prefetch.side_effect = Exception()
service.get_metadata_prefetch(str(tmp_path))
def test_readfile_prefetch_failure_handling(service, tmp_path):
(tmp_path / "foo").write_text("bar")
with mock.patch("outrun.filesystem.caching.prefetching.file_read") as mock_prefetch:
mock_prefetch.side_effect = Exception()
service.readfile_prefetch(str(tmp_path / "foo"))
def test_prefetching_unreadable_file(service, tmp_path):
with mock.patch(
"outrun.filesystem.caching.prefetching.file_access"
) as mock_prefetch:
mock_prefetch.return_value = [
PrefetchSuggestion(str(tmp_path / "nonexistent"), contents=True)
]
_metadata, prefetches = service.get_metadata_prefetch("/")
# Assert that the metadata (non-contents) are still successfully prefetched
assert len(prefetches) == 1
assert isinstance(prefetches[0].metadata.error, FileNotFoundError)
assert prefetches[0].path == str(tmp_path / "nonexistent")
| 29.881148 | 88 | 0.712934 | import os
from pathlib import Path
import shutil
import stat
from unittest import mock
import pytest
from outrun.filesystem.common import Attributes
from outrun.filesystem.caching.common import Metadata
from outrun.filesystem.caching.prefetching import PrefetchSuggestion
from outrun.filesystem.caching.service import LocalCacheService
@pytest.fixture
def service():
return LocalCacheService()
def test_get_metadata_error(service, tmp_path):
meta = service.get_metadata(str(tmp_path / "nonexistent"))
assert meta.attr is None
assert meta.link is None
assert isinstance(meta.error, FileNotFoundError)
def test_get_metadata_dir(service, tmp_path):
(tmp_path / "dir").mkdir()
meta = service.get_metadata(str(tmp_path / "dir"))
assert meta.error is None
assert meta.link is None
assert stat.S_ISDIR(meta.attr.st_mode)
def test_get_metadata_file(service, tmp_path):
(tmp_path / "file").write_text("")
meta = service.get_metadata(str(tmp_path / "file"))
assert meta.error is None
assert meta.link is None
assert stat.S_ISREG(meta.attr.st_mode)
def test_get_metadata_symlink(service, tmp_path):
os.symlink(tmp_path / "nonexistent", tmp_path / "link")
meta = service.get_metadata(str(tmp_path / "link"))
assert meta.error is None
assert meta.link == str(tmp_path / "nonexistent")
assert stat.S_ISLNK(meta.attr.st_mode)
def test_changed_metadata(service, tmp_path):
(tmp_path / "a").mkdir()
(tmp_path / "b").mkdir()
meta = {
str(tmp_path / "a"): service.get_metadata(str(tmp_path / "a")),
str(tmp_path / "b"): service.get_metadata(str(tmp_path / "b")),
str(tmp_path / "c"): service.get_metadata(str(tmp_path / "c")),
}
assert list(service.get_changed_metadata(meta).keys()) == []
os.utime(tmp_path / "a", (0, 0))
os.makedirs(tmp_path / "c")
changed_meta = service.get_changed_metadata(meta)
assert list(changed_meta.keys()) == [
str(tmp_path / "a"),
str(tmp_path / "c"),
]
assert changed_meta[str(tmp_path / "a")] == service.get_metadata(tmp_path / "a")
assert changed_meta[str(tmp_path / "c")] == service.get_metadata(tmp_path / "c")
def test_access_time_changes_ignored(service):
metadata = service.get_metadata("/")
cached_metadata = {"/": metadata}
with mock.patch(
"outrun.filesystem.caching.service.LocalCacheService.get_metadata"
) as mock_meta:
new_attr = Attributes(**metadata.attr.__dict__)
new_attr.st_atime = 0.0
new_metadata = Metadata(attr=new_attr)
mock_meta.return_value = new_metadata
assert service.get_changed_metadata(cached_metadata) == {}
def test_readfile(service, tmp_path):
(tmp_path / "file").write_text("abc")
contents = service.readfile(str(tmp_path / "file"))
assert contents.data == b"abc"
(tmp_path / "file").write_text("def")
new_contents = service.readfile(str(tmp_path / "file"))
assert new_contents.data == b"def"
assert contents.checksum != new_contents.checksum
def test_readfile_conditional(service, tmp_path):
(tmp_path / "file").write_text("abc")
contents = service.readfile_conditional(str(tmp_path / "file"), "")
assert contents.data == b"abc"
new_contents = service.readfile_conditional(
str(tmp_path / "file"), contents.checksum
)
assert new_contents is None
(tmp_path / "file").write_text("def")
new_contents = service.readfile_conditional(
str(tmp_path / "file"), contents.checksum
)
assert new_contents.data == b"def"
assert contents.checksum != new_contents.checksum
def test_machine_id_consistent(service):
machine_id_1 = service.get_app_specific_machine_id()
machine_id_2 = service.get_app_specific_machine_id()
assert machine_id_1 == machine_id_2
def test_original_machine_id_not_being_exposed(service):
machine_id = service.get_app_specific_machine_id()
assert machine_id.strip() != Path("/etc/machine-id").read_text().strip()
def test_get_metadata_prefetch_symlink(service, tmp_path):
os.symlink("foo", tmp_path / "link")
metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert metadata.link is not None
assert len(prefetches) == 1
assert prefetches[0].path == str(tmp_path / "foo")
assert isinstance(prefetches[0].metadata.error, FileNotFoundError)
def test_get_metadata_prefetch_symlink_with_previously_fetched_target(
service, tmp_path
):
os.symlink("foo", tmp_path / "link")
service.get_metadata(str(tmp_path / "foo"))
_metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert len(prefetches) == 0
def test_readfile_prefetch_executable(service):
sh_path = shutil.which("ssh")
_metadata, prefetches = service.readfile_prefetch(sh_path)
assert len(prefetches) > 0
assert all(".so" in p.path for p in prefetches)
def test_readfile_prefetch_executable_with_previously_fetched_contents():
sh_path = shutil.which("ssh")
service = LocalCacheService()
_metadata, prefetches = service.readfile_prefetch(sh_path)
assert any(p.contents for p in prefetches)
service = LocalCacheService()
service.mark_previously_fetched_contents([p.path for p in prefetches])
_metadata, prefetches = service.readfile_prefetch(sh_path)
assert not any(p.contents for p in prefetches)
def test_prefetch_inside_prefetchable_paths(service, tmp_path):
os.symlink("foo", tmp_path / "link")
service.set_prefetchable_paths([str(tmp_path)])
_metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert len(prefetches) != 0
def test_prefetch_outside_prefetchable_paths(service, tmp_path):
os.symlink("foo", tmp_path / "link")
service.set_prefetchable_paths(["/nonexistent"])
_metadata, prefetches = service.get_metadata_prefetch(str(tmp_path / "link"))
assert len(prefetches) == 0
def test_get_metadata_prefetch_failure_handling(service, tmp_path):
with mock.patch(
"outrun.filesystem.caching.prefetching.file_access"
) as mock_prefetch:
mock_prefetch.side_effect = Exception()
service.get_metadata_prefetch(str(tmp_path))
def test_readfile_prefetch_failure_handling(service, tmp_path):
(tmp_path / "foo").write_text("bar")
with mock.patch("outrun.filesystem.caching.prefetching.file_read") as mock_prefetch:
mock_prefetch.side_effect = Exception()
service.readfile_prefetch(str(tmp_path / "foo"))
def test_prefetching_unreadable_file(service, tmp_path):
with mock.patch(
"outrun.filesystem.caching.prefetching.file_access"
) as mock_prefetch:
mock_prefetch.return_value = [
PrefetchSuggestion(str(tmp_path / "nonexistent"), contents=True)
]
_metadata, prefetches = service.get_metadata_prefetch("/")
assert len(prefetches) == 1
assert isinstance(prefetches[0].metadata.error, FileNotFoundError)
assert prefetches[0].path == str(tmp_path / "nonexistent")
| true | true |
f73796022a8a1b4123640ee7560369d57637ddb7 | 352 | py | Python | PARTE_1/EX012/index.py | 0Fernando0/CursoPython | 1dcfdb6556e41c6dedcba2857aa4382b2f81aa59 | [
"MIT"
] | null | null | null | PARTE_1/EX012/index.py | 0Fernando0/CursoPython | 1dcfdb6556e41c6dedcba2857aa4382b2f81aa59 | [
"MIT"
] | null | null | null | PARTE_1/EX012/index.py | 0Fernando0/CursoPython | 1dcfdb6556e41c6dedcba2857aa4382b2f81aa59 | [
"MIT"
] | null | null | null | '''
script reduz o valor do um produto com base no desconto(%)
'''
produto = float(input('preço do produto: '))
desconto = float(input('porcentagem de desconto: '))
novo_preço = produto - ((produto / 100) * desconto)
print(f'o produto custa {produto:.2f}')
print(f'o valor do desconto é {desconto:.2f}')
print(f'o produto custará {novo_preço:.2f}') | 27.076923 | 58 | 0.693182 |
produto = float(input('preço do produto: '))
desconto = float(input('porcentagem de desconto: '))
novo_preço = produto - ((produto / 100) * desconto)
print(f'o produto custa {produto:.2f}')
print(f'o valor do desconto é {desconto:.2f}')
print(f'o produto custará {novo_preço:.2f}') | true | true |
f73796113f70b1acc1ee507553e29b09111da657 | 1,866 | py | Python | wiki/views.py | javiermms/makewiki | 53cd7d22a36608f1edabb5199262cbb36c75ffc1 | [
"MIT"
] | null | null | null | wiki/views.py | javiermms/makewiki | 53cd7d22a36608f1edabb5199262cbb36c75ffc1 | [
"MIT"
] | 5 | 2021-03-19T03:12:58.000Z | 2022-02-10T11:07:11.000Z | wiki/views.py | javiermms/makewiki | 53cd7d22a36608f1edabb5199262cbb36c75ffc1 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from wiki.models import Page
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
# Create your views here.
class PageList(ListView):
"""
CHALLENGES:
1. On GET, display a homepage that shows all Pages in your wiki.
2. Replace this CHALLENGE text with a descriptive docstring for PageList.
3. Replace pass below with the code to render a template named `list.html`.
"""
model = Page
def get(self, request):
""" Returns a list of wiki pages. """
latest_question_list = Page.objects.order_by('-pub_date')[:5]
context = {'latest_question_list': latest_question_list}
return render(request, 'wiki/index.html', context)
class PageDetailView(DetailView):
"""
CHALLENGES:
1. On GET, render a template named `page.html`.
2. Replace this docstring with a description of what thos accomplishes.
STRETCH CHALLENGES:
1. Import the PageForm class from forms.py.
- This ModelForm enables editing of an existing Page object in the database.
2. On GET, render an edit form below the page details.
3. On POST, check if the data in the form is valid.
- If True, save the data, and redirect back to the DetailsView.
- If False, display all the errors in the template, above the form fields.
4. Instead of hard-coding the path to redirect to, use the `reverse` function to return the path.
5. After successfully editing a Page, use Django Messages to "flash" the user a success message
- Message Content: REPLACE_WITH_PAGE_TITLE has been successfully updated.
"""
model = Page
def get(self, request, slug):
""" Returns a specific of wiki page by slug. """
pass
def post(self, request, slug):
pass
| 38.081633 | 103 | 0.682744 | from django.shortcuts import render
from wiki.models import Page
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
class PageList(ListView):
model = Page
def get(self, request):
latest_question_list = Page.objects.order_by('-pub_date')[:5]
context = {'latest_question_list': latest_question_list}
return render(request, 'wiki/index.html', context)
class PageDetailView(DetailView):
model = Page
def get(self, request, slug):
pass
def post(self, request, slug):
pass
| true | true |
f73797de561c0f93602ad3a48a3e1e59cae031c5 | 4,535 | py | Python | downstream/OpenPCDet/pcdet/utils/calibration_kitti.py | YurongYou/MODEST | cfc0465ed737f6c3166e6b5d08231880073b4552 | [
"MIT"
] | 5 | 2022-03-31T02:30:06.000Z | 2022-03-31T12:34:08.000Z | downstream/OpenPCDet/pcdet/utils/calibration_kitti.py | YurongYou/MODEST | cfc0465ed737f6c3166e6b5d08231880073b4552 | [
"MIT"
] | null | null | null | downstream/OpenPCDet/pcdet/utils/calibration_kitti.py | YurongYou/MODEST | cfc0465ed737f6c3166e6b5d08231880073b4552 | [
"MIT"
] | null | null | null | import numpy as np
def get_calib_from_file(calib_file):
with open(calib_file) as f:
lines = f.readlines()
obj = lines[2].strip().split(' ')[1:]
P2 = np.array(obj, dtype=np.float32)
obj = lines[3].strip().split(' ')[1:]
P3 = np.array(obj, dtype=np.float32)
obj = lines[4].strip().split(' ')[1:]
R0 = np.array(obj, dtype=np.float32)
obj = lines[5].strip().split(' ')[1:]
Tr_velo_to_cam = np.array(obj, dtype=np.float32)
return {'P2': P2.reshape(3, 4),
'P3': P3.reshape(3, 4),
'R0': R0.reshape(3, 3),
'Tr_velo2cam': Tr_velo_to_cam.reshape(3, 4)}
class Calibration(object):
def __init__(self, calib_file):
if not isinstance(calib_file, dict):
calib = get_calib_from_file(calib_file)
else:
calib = calib_file
self.P2 = calib['P2'] # 3 x 4
self.R0 = calib['R0'] # 3 x 3
self.V2C = calib['Tr_velo2cam'] # 3 x 4
# Camera intrinsics and extrinsics
self.cu = self.P2[0, 2]
self.cv = self.P2[1, 2]
self.fu = self.P2[0, 0]
self.fv = self.P2[1, 1]
self.tx = self.P2[0, 3] / (-self.fu)
self.ty = self.P2[1, 3] / (-self.fv)
def cart_to_hom(self, pts):
"""
:param pts: (N, 3 or 2)
:return pts_hom: (N, 4 or 3)
"""
pts_hom = np.hstack((pts, np.ones((pts.shape[0], 1), dtype=np.float32)))
return pts_hom
def rect_to_lidar(self, pts_rect):
"""
:param pts_lidar: (N, 3)
:return pts_rect: (N, 3)
"""
pts_rect_hom = self.cart_to_hom(pts_rect) # (N, 4)
R0_ext = np.hstack((self.R0, np.zeros((3, 1), dtype=np.float32))) # (3, 4)
R0_ext = np.vstack((R0_ext, np.zeros((1, 4), dtype=np.float32))) # (4, 4)
R0_ext[3, 3] = 1
V2C_ext = np.vstack((self.V2C, np.zeros((1, 4), dtype=np.float32))) # (4, 4)
V2C_ext[3, 3] = 1
pts_lidar = np.dot(pts_rect_hom, np.linalg.inv(np.dot(R0_ext, V2C_ext).T))
return pts_lidar[:, 0:3]
def lidar_to_rect(self, pts_lidar):
"""
:param pts_lidar: (N, 3)
:return pts_rect: (N, 3)
"""
pts_lidar_hom = self.cart_to_hom(pts_lidar)
pts_rect = np.dot(pts_lidar_hom, np.dot(self.V2C.T, self.R0.T))
# pts_rect = reduce(np.dot, (pts_lidar_hom, self.V2C.T, self.R0.T))
return pts_rect
def rect_to_img(self, pts_rect):
"""
:param pts_rect: (N, 3)
:return pts_img: (N, 2)
"""
pts_rect_hom = self.cart_to_hom(pts_rect)
pts_2d_hom = np.dot(pts_rect_hom, self.P2.T)
# pts_rect_hom[:, 2][np.isclose(pts_rect_hom[:, 2], 0)] = 1e-6
pts_img = (pts_2d_hom[:, 0:2].T / pts_rect_hom[:, 2]).T # (N, 2)
pts_rect_depth = pts_2d_hom[:, 2] - self.P2.T[3, 2] # depth in rect camera coord
return pts_img, pts_rect_depth
def lidar_to_img(self, pts_lidar):
"""
:param pts_lidar: (N, 3)
:return pts_img: (N, 2)
"""
pts_rect = self.lidar_to_rect(pts_lidar)
pts_img, pts_depth = self.rect_to_img(pts_rect)
return pts_img, pts_depth
def img_to_rect(self, u, v, depth_rect):
"""
:param u: (N)
:param v: (N)
:param depth_rect: (N)
:return:
"""
x = ((u - self.cu) * depth_rect) / self.fu + self.tx
y = ((v - self.cv) * depth_rect) / self.fv + self.ty
pts_rect = np.concatenate((x.reshape(-1, 1), y.reshape(-1, 1), depth_rect.reshape(-1, 1)), axis=1)
return pts_rect
def corners3d_to_img_boxes(self, corners3d):
"""
:param corners3d: (N, 8, 3) corners in rect coordinate
:return: boxes: (None, 4) [x1, y1, x2, y2] in rgb coordinate
:return: boxes_corner: (None, 8) [xi, yi] in rgb coordinate
"""
sample_num = corners3d.shape[0]
corners3d_hom = np.concatenate((corners3d, np.ones((sample_num, 8, 1))), axis=2) # (N, 8, 4)
img_pts = np.matmul(corners3d_hom, self.P2.T) # (N, 8, 3)
x, y = img_pts[:, :, 0] / img_pts[:, :, 2], img_pts[:, :, 1] / img_pts[:, :, 2]
x1, y1 = np.min(x, axis=1), np.min(y, axis=1)
x2, y2 = np.max(x, axis=1), np.max(y, axis=1)
boxes = np.concatenate((x1.reshape(-1, 1), y1.reshape(-1, 1), x2.reshape(-1, 1), y2.reshape(-1, 1)), axis=1)
boxes_corner = np.concatenate((x.reshape(-1, 8, 1), y.reshape(-1, 8, 1)), axis=2)
return boxes, boxes_corner
| 35.708661 | 116 | 0.543991 | import numpy as np
def get_calib_from_file(calib_file):
with open(calib_file) as f:
lines = f.readlines()
obj = lines[2].strip().split(' ')[1:]
P2 = np.array(obj, dtype=np.float32)
obj = lines[3].strip().split(' ')[1:]
P3 = np.array(obj, dtype=np.float32)
obj = lines[4].strip().split(' ')[1:]
R0 = np.array(obj, dtype=np.float32)
obj = lines[5].strip().split(' ')[1:]
Tr_velo_to_cam = np.array(obj, dtype=np.float32)
return {'P2': P2.reshape(3, 4),
'P3': P3.reshape(3, 4),
'R0': R0.reshape(3, 3),
'Tr_velo2cam': Tr_velo_to_cam.reshape(3, 4)}
class Calibration(object):
def __init__(self, calib_file):
if not isinstance(calib_file, dict):
calib = get_calib_from_file(calib_file)
else:
calib = calib_file
self.P2 = calib['P2']
self.R0 = calib['R0']
self.V2C = calib['Tr_velo2cam']
self.cu = self.P2[0, 2]
self.cv = self.P2[1, 2]
self.fu = self.P2[0, 0]
self.fv = self.P2[1, 1]
self.tx = self.P2[0, 3] / (-self.fu)
self.ty = self.P2[1, 3] / (-self.fv)
def cart_to_hom(self, pts):
pts_hom = np.hstack((pts, np.ones((pts.shape[0], 1), dtype=np.float32)))
return pts_hom
def rect_to_lidar(self, pts_rect):
pts_rect_hom = self.cart_to_hom(pts_rect)
R0_ext = np.hstack((self.R0, np.zeros((3, 1), dtype=np.float32)))
R0_ext = np.vstack((R0_ext, np.zeros((1, 4), dtype=np.float32)))
R0_ext[3, 3] = 1
V2C_ext = np.vstack((self.V2C, np.zeros((1, 4), dtype=np.float32)))
V2C_ext[3, 3] = 1
pts_lidar = np.dot(pts_rect_hom, np.linalg.inv(np.dot(R0_ext, V2C_ext).T))
return pts_lidar[:, 0:3]
def lidar_to_rect(self, pts_lidar):
pts_lidar_hom = self.cart_to_hom(pts_lidar)
pts_rect = np.dot(pts_lidar_hom, np.dot(self.V2C.T, self.R0.T))
return pts_rect
def rect_to_img(self, pts_rect):
pts_rect_hom = self.cart_to_hom(pts_rect)
pts_2d_hom = np.dot(pts_rect_hom, self.P2.T)
pts_img = (pts_2d_hom[:, 0:2].T / pts_rect_hom[:, 2]).T
pts_rect_depth = pts_2d_hom[:, 2] - self.P2.T[3, 2]
return pts_img, pts_rect_depth
def lidar_to_img(self, pts_lidar):
pts_rect = self.lidar_to_rect(pts_lidar)
pts_img, pts_depth = self.rect_to_img(pts_rect)
return pts_img, pts_depth
def img_to_rect(self, u, v, depth_rect):
x = ((u - self.cu) * depth_rect) / self.fu + self.tx
y = ((v - self.cv) * depth_rect) / self.fv + self.ty
pts_rect = np.concatenate((x.reshape(-1, 1), y.reshape(-1, 1), depth_rect.reshape(-1, 1)), axis=1)
return pts_rect
def corners3d_to_img_boxes(self, corners3d):
sample_num = corners3d.shape[0]
corners3d_hom = np.concatenate((corners3d, np.ones((sample_num, 8, 1))), axis=2)
img_pts = np.matmul(corners3d_hom, self.P2.T)
x, y = img_pts[:, :, 0] / img_pts[:, :, 2], img_pts[:, :, 1] / img_pts[:, :, 2]
x1, y1 = np.min(x, axis=1), np.min(y, axis=1)
x2, y2 = np.max(x, axis=1), np.max(y, axis=1)
boxes = np.concatenate((x1.reshape(-1, 1), y1.reshape(-1, 1), x2.reshape(-1, 1), y2.reshape(-1, 1)), axis=1)
boxes_corner = np.concatenate((x.reshape(-1, 8, 1), y.reshape(-1, 8, 1)), axis=2)
return boxes, boxes_corner
| true | true |
f7379885b7e68ab94c856752a65a99f0cb05dfaf | 326 | py | Python | Python Spider/requests/07 requests timeout.py | CodingGorit/Coding-with-Python | b0f1d5d704b816a85b0ae57b46d00314de2a67b9 | [
"Apache-2.0"
] | 1 | 2020-01-31T15:57:29.000Z | 2020-01-31T15:57:29.000Z | Python Spider/requests/07 requests timeout.py | CodingGorit/Coding-with-Python | b0f1d5d704b816a85b0ae57b46d00314de2a67b9 | [
"Apache-2.0"
] | null | null | null | Python Spider/requests/07 requests timeout.py | CodingGorit/Coding-with-Python | b0f1d5d704b816a85b0ae57b46d00314de2a67b9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 --
#@File: 07 requests timeout.py
#@author: Gorit
#@contact: gorit@qq.com
#@time: 2020/5/25 20:52
'''
超时处理: timeout 参数 (防止服务器不能正常响应而抛出异常)
'''
import requests
# 设置超时时间为 1s (连接 + 读取), 永久等待设置 timeout = None
r = requests.get("https://httpbin.org/get", timeout = 1)
print(r.status_code) | 20.375 | 56 | 0.662577 |
import requests
r = requests.get("https://httpbin.org/get", timeout = 1)
print(r.status_code) | true | true |
f737990a9b720caa889d499551a9a922db2b4c1a | 884 | bzl | Python | src/main/kotlin/org/wfanet/measurement/gcloud/spanner/testing/macros.bzl | VideoAmp/cross-media-measurement | 66fd8f1a81b7d93f23d769f7be76d529b6a8d222 | [
"Apache-2.0"
] | 4 | 2021-05-17T17:48:16.000Z | 2021-05-17T18:03:59.000Z | src/main/kotlin/org/wfanet/measurement/gcloud/spanner/testing/macros.bzl | VideoAmp/cross-media-measurement | 66fd8f1a81b7d93f23d769f7be76d529b6a8d222 | [
"Apache-2.0"
] | null | null | null | src/main/kotlin/org/wfanet/measurement/gcloud/spanner/testing/macros.bzl | VideoAmp/cross-media-measurement | 66fd8f1a81b7d93f23d769f7be76d529b6a8d222 | [
"Apache-2.0"
] | 1 | 2022-02-21T09:39:05.000Z | 2022-02-21T09:39:05.000Z | # Copyright 2020 The Cross-Media Measurement Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Macros for Spanner test targets."""
load("@io_bazel_rules_kotlin//kotlin:kotlin.bzl", "kt_jvm_test")
def spanner_emulator_test(name, data = [], **kwargs):
kt_jvm_test(
name = name,
data = data + ["@cloud_spanner_emulator//:emulator"],
**kwargs
)
| 35.36 | 74 | 0.722851 |
load("@io_bazel_rules_kotlin//kotlin:kotlin.bzl", "kt_jvm_test")
def spanner_emulator_test(name, data = [], **kwargs):
kt_jvm_test(
name = name,
data = data + ["@cloud_spanner_emulator//:emulator"],
**kwargs
)
| true | true |
f737992c266c7cceb4ca208322c4341e502d2914 | 503 | py | Python | data_analysis_byPro/Day07-01.py | yunjung-lee/class_python_data | 67ceab73e67ec63d408894a6ab016a8d25a4e30b | [
"MIT"
] | null | null | null | data_analysis_byPro/Day07-01.py | yunjung-lee/class_python_data | 67ceab73e67ec63d408894a6ab016a8d25a4e30b | [
"MIT"
] | null | null | null | data_analysis_byPro/Day07-01.py | yunjung-lee/class_python_data | 67ceab73e67ec63d408894a6ab016a8d25a4e30b | [
"MIT"
] | null | null | null | # JSON 데이터도 처리하기.
import json
jsonDic = {}
jsonList = []
csvList = []
filereader = open('TEST01.json', 'r', encoding='utf-8')
jsonDic = json.load(filereader)
csvName = list(jsonDic.keys())
jsonList = jsonDic[ csvName[0]]
# 헤더 추출
header_list = list(jsonList[0].keys())
csvList.append(header_list)
# 행들 추출
for tmpDic in jsonList :
tmpList = []
for header in header_list :
data = tmpDic[header]
tmpList.append(data)
csvList.append(tmpList)
print(csvList)
filereader.close() | 21.869565 | 55 | 0.673956 |
import json
jsonDic = {}
jsonList = []
csvList = []
filereader = open('TEST01.json', 'r', encoding='utf-8')
jsonDic = json.load(filereader)
csvName = list(jsonDic.keys())
jsonList = jsonDic[ csvName[0]]
header_list = list(jsonList[0].keys())
csvList.append(header_list)
for tmpDic in jsonList :
tmpList = []
for header in header_list :
data = tmpDic[header]
tmpList.append(data)
csvList.append(tmpList)
print(csvList)
filereader.close() | true | true |
f7379b0878bc82b5b66fe61e3324022d7479dcbf | 65 | py | Python | ephypype/pipelines/__init__.py | jasmainak/ephypype | 257603cbb099cef7847a96c8eb141332fb85ebfa | [
"BSD-3-Clause"
] | null | null | null | ephypype/pipelines/__init__.py | jasmainak/ephypype | 257603cbb099cef7847a96c8eb141332fb85ebfa | [
"BSD-3-Clause"
] | null | null | null | ephypype/pipelines/__init__.py | jasmainak/ephypype | 257603cbb099cef7847a96c8eb141332fb85ebfa | [
"BSD-3-Clause"
] | null | null | null | from . import brain_vision_to_conmat
from . import ts_to_conmat
| 16.25 | 36 | 0.830769 | from . import brain_vision_to_conmat
from . import ts_to_conmat
| true | true |
f7379b356a89fe71720ab4c69c58e4366aa15b6b | 2,157 | py | Python | modules/python/package/setup.py | BananaHemic/opencv | 13d88239b7744a2097d0e1f343457da5ce276e90 | [
"Apache-2.0"
] | 3 | 2021-08-20T08:35:42.000Z | 2021-08-20T08:43:07.000Z | modules/python/package/setup.py | BananaHemic/opencv | 13d88239b7744a2097d0e1f343457da5ce276e90 | [
"Apache-2.0"
] | 1 | 2021-02-13T09:35:19.000Z | 2021-02-13T11:23:50.000Z | modules/python/package/setup.py | BananaHemic/opencv | 13d88239b7744a2097d0e1f343457da5ce276e90 | [
"Apache-2.0"
] | 4 | 2021-02-01T11:07:17.000Z | 2021-11-26T08:11:23.000Z | import os
import sys
import platform
import setuptools
SCRIPT_DIR=os.path.dirname(os.path.abspath(__file__))
def main():
os.chdir(SCRIPT_DIR)
package_name = 'opencv'
package_version = os.environ.get('OPENCV_VERSION', '4.5.1') # TODO
long_description = 'Open Source Computer Vision Library Python bindings' # TODO
setuptools.setup(
name=package_name,
version=package_version,
url='https://github.com/opencv/opencv',
license='Apache 2.0',
description='OpenCV python bindings',
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
maintainer="OpenCV Team",
install_requires="numpy",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: Apache 2.0 License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: C++',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Image Recognition',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
)
if __name__ == '__main__':
main()
| 35.95 | 84 | 0.59898 | import os
import sys
import platform
import setuptools
SCRIPT_DIR=os.path.dirname(os.path.abspath(__file__))
def main():
os.chdir(SCRIPT_DIR)
package_name = 'opencv'
package_version = os.environ.get('OPENCV_VERSION', '4.5.1')
long_description = 'Open Source Computer Vision Library Python bindings'
setuptools.setup(
name=package_name,
version=package_version,
url='https://github.com/opencv/opencv',
license='Apache 2.0',
description='OpenCV python bindings',
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
maintainer="OpenCV Team",
install_requires="numpy",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: Apache 2.0 License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: C++',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Image Recognition',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
],
)
if __name__ == '__main__':
main()
| true | true |
f7379b5956ff2d8fe9dc03c33e6667605516f54b | 1,214 | py | Python | Section_7/Exercise_19.py | Szymon-Budziak/WDI_exercises_solutions | 51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460 | [
"MIT"
] | null | null | null | Section_7/Exercise_19.py | Szymon-Budziak/WDI_exercises_solutions | 51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460 | [
"MIT"
] | null | null | null | Section_7/Exercise_19.py | Szymon-Budziak/WDI_exercises_solutions | 51ffc9ec8b3cd6809bd55e98ecb8aed759c2d460 | [
"MIT"
] | 1 | 2021-11-21T09:38:33.000Z | 2021-11-21T09:38:33.000Z | """
Elementy w liście są uporządkowane według wartości klucza. Proszę napisać funkcję usuwającą z listy
elementy o nieunikalnym kluczu. Do funkcji przekazujemy wskazanie na pierwszy element listy,
funkcja powinna zwrócić liczbę usuniętych elementów.
"""
class Node:
def __init__(self, value):
self.value = value
self.next = None
def remove_non_unique_elements(head):
if head is None:
return None
current = head
while current is not None and current.next is not None:
if current.value == current.next.value:
actual = current.next
while actual is not None and actual.value == current.value:
actual = actual.next
current.next = actual
else:
current = current.next
return head
def create_linked_list(T):
p = None
for i in range(len(T) - 1, -1, -1):
q = Node(T[i])
q.next = p
p = q
return p
def create_list(p):
T = []
while p is not None:
T.append(p.value)
p = p.next
return T
T = [3, 9, 9, 12, 18, 18, 18, 26, 26, 65, 80, 80, 93]
ll = create_linked_list(T)
head = remove_non_unique_elements(ll)
print(create_list(head))
| 24.28 | 99 | 0.621087 |
class Node:
def __init__(self, value):
self.value = value
self.next = None
def remove_non_unique_elements(head):
if head is None:
return None
current = head
while current is not None and current.next is not None:
if current.value == current.next.value:
actual = current.next
while actual is not None and actual.value == current.value:
actual = actual.next
current.next = actual
else:
current = current.next
return head
def create_linked_list(T):
p = None
for i in range(len(T) - 1, -1, -1):
q = Node(T[i])
q.next = p
p = q
return p
def create_list(p):
T = []
while p is not None:
T.append(p.value)
p = p.next
return T
T = [3, 9, 9, 12, 18, 18, 18, 26, 26, 65, 80, 80, 93]
ll = create_linked_list(T)
head = remove_non_unique_elements(ll)
print(create_list(head))
| true | true |
f7379b5a4d92f740d0e941f62a5474e9d907dfdb | 1,650 | py | Python | model/src/DataModel.py | roman-baldaev/course-project | b65ba018c16697224f15916b08ce7f09634d1f8c | [
"MIT"
] | null | null | null | model/src/DataModel.py | roman-baldaev/course-project | b65ba018c16697224f15916b08ce7f09634d1f8c | [
"MIT"
] | null | null | null | model/src/DataModel.py | roman-baldaev/course-project | b65ba018c16697224f15916b08ce7f09634d1f8c | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
class DataModel:
"""
This class implements a data model - values at time points and provides methods for working with these data.
"""
def __init__(self, n=0, values=None, times=None):
"""
A constructor that takes values and a time point.
:param values: Array of values process
:param times: Array of a time points
"""
if (values is None) or (times is None):
self._times = np.zeros((n, ))
self._values = np.zeros((n, ))
else:
if len(values) != len(times):
print("Different size of values and times")
else:
self._times = np.array(times, dtype=float)
self._values = np.array(values, dtype=float)
def print(self, n=None):
if n is not None:
_n = n
elif self._times.shape:
_n = self._times.shape[0]
for i in range(_n):
print("Time: {}___Value: {}".format(self._times[i], self._values[i]))
@property
def mean(self):
"""
:return: Mean of values
"""
return self._times.mean()
def get_values(self):
return self._values
def get_times(self):
return self._times
def add_value(self, value, index):
# self._values.__add__(value)
self._values[index] = value
def add_time(self, time, index):
# self._times.__add__(time)
self._times[index] = time
def get_value(self, index):
return self._values[index]
def get_time(self, index):
return self._times[index] | 27.966102 | 112 | 0.562424 | import numpy as np
import pandas as pd
class DataModel:
def __init__(self, n=0, values=None, times=None):
if (values is None) or (times is None):
self._times = np.zeros((n, ))
self._values = np.zeros((n, ))
else:
if len(values) != len(times):
print("Different size of values and times")
else:
self._times = np.array(times, dtype=float)
self._values = np.array(values, dtype=float)
def print(self, n=None):
if n is not None:
_n = n
elif self._times.shape:
_n = self._times.shape[0]
for i in range(_n):
print("Time: {}___Value: {}".format(self._times[i], self._values[i]))
@property
def mean(self):
return self._times.mean()
def get_values(self):
return self._values
def get_times(self):
return self._times
def add_value(self, value, index):
self._values[index] = value
def add_time(self, time, index):
self._times[index] = time
def get_value(self, index):
return self._values[index]
def get_time(self, index):
return self._times[index] | true | true |
f7379b8f4f3ad2bc4fa7c81f28d494a2a5dc2644 | 1,252 | py | Python | django/apps/stories/management/commands/frontpage_stories.py | Haakenlid/tassen | 911a2541c77eca522ba5a723f175786f4f9eb481 | [
"Apache-2.0"
] | 16 | 2017-03-21T03:53:37.000Z | 2021-08-14T06:28:02.000Z | django/apps/stories/management/commands/frontpage_stories.py | universitas/universitas.no | 911a2541c77eca522ba5a723f175786f4f9eb481 | [
"Apache-2.0"
] | 104 | 2017-03-25T00:12:46.000Z | 2021-03-09T22:40:58.000Z | django/apps/stories/management/commands/frontpage_stories.py | Haakenlid/tassen | 911a2541c77eca522ba5a723f175786f4f9eb481 | [
"Apache-2.0"
] | 6 | 2017-03-21T03:53:40.000Z | 2020-06-07T14:15:38.000Z | import logging
from django.core.management.base import BaseCommand
from apps.frontpage.models import FrontpageStory
from apps.stories.models import Story
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Populates frontpage'
def add_arguments(self, parser):
parser.add_argument(
'--number',
'-n',
type=int,
dest='number',
default=0,
help='Number of stories.'
)
parser.add_argument(
'--delete',
'-d',
action='store_true',
dest='delete',
default=False,
help='Delete old stories'
)
def handle(self, *args, **options):
if options['delete']:
FrontpageStory.objects.all().delete()
stories = Story.objects.published().order_by('publication_date')
counter = 0
limit = options.get('number')
for story in stories:
if limit and counter > limit:
break
else:
counter += 1
if story.frontpagestory_set.count() == 0:
FrontpageStory.objects.create_for_story(story)
self.stdout.write(story.title)
| 25.55102 | 72 | 0.551118 | import logging
from django.core.management.base import BaseCommand
from apps.frontpage.models import FrontpageStory
from apps.stories.models import Story
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Populates frontpage'
def add_arguments(self, parser):
parser.add_argument(
'--number',
'-n',
type=int,
dest='number',
default=0,
help='Number of stories.'
)
parser.add_argument(
'--delete',
'-d',
action='store_true',
dest='delete',
default=False,
help='Delete old stories'
)
def handle(self, *args, **options):
if options['delete']:
FrontpageStory.objects.all().delete()
stories = Story.objects.published().order_by('publication_date')
counter = 0
limit = options.get('number')
for story in stories:
if limit and counter > limit:
break
else:
counter += 1
if story.frontpagestory_set.count() == 0:
FrontpageStory.objects.create_for_story(story)
self.stdout.write(story.title)
| true | true |
f7379c4455be7a4cbb702bd6f60e6eca937b6a10 | 5,681 | py | Python | run.py | gatran/DSC180B-Face-Mask-Detection | 1ccce9b9a50d8e833ca3b7478045d9f1fb01c2fa | [
"BSD-2-Clause-FreeBSD",
"MIT"
] | null | null | null | run.py | gatran/DSC180B-Face-Mask-Detection | 1ccce9b9a50d8e833ca3b7478045d9f1fb01c2fa | [
"BSD-2-Clause-FreeBSD",
"MIT"
] | null | null | null | run.py | gatran/DSC180B-Face-Mask-Detection | 1ccce9b9a50d8e833ca3b7478045d9f1fb01c2fa | [
"BSD-2-Clause-FreeBSD",
"MIT"
] | 1 | 2021-06-02T19:42:11.000Z | 2021-06-02T19:42:11.000Z | #!/usr/bin/env python
import argparse
import os
import sys
import json
import shutil
#from src.gradcam import *
data_ingest_params = './config/data-params.json'
fp_params = './config/file_path.json'
gradcam_params = './config/gradcam_params.json'
ig_params = './config/ig_params.json'
train_params = './config/train_params.json'
test_params = './config/test_params.json'
def load_params(fp):
with open(fp) as fh:
param = json.load(fh)
return param
def main(targets):
if 'clean' in targets:
shutil.rmtree('results/gradcam/', ignore_errors=True)
shutil.rmtree('results/model_prediction/', ignore_errors=True)
shutil.rmtree('results/integrated_gradient/', ignore_errors=True)
os.mkdir('results/gradcam')
os.mkdir('results/model_prediction')
os.mkdir('results/integrated_gradient')
if "gradcam" in targets:
# Check if directory "results" is created
if not os.path.isdir('results/gradcam'):
os.makedirs('results/gradcam')
gradcam_fp = load_params(fp_params)['gradcam_path']
input_gradcam_params = load_params(gradcam_params)
input_images = input_gradcam_params["load_image_path"]["image_input_path_train_covered"]
save_images = input_gradcam_params['save_image_path']
model_path = input_gradcam_params['model_path']
if "custom_image_path" in input_gradcam_params:
custom_image_path = input_gradcam_params['custom_image_path']
os.system("python " + gradcam_fp + " --image-path " + input_images + " --custom-image-path " + custom_image_path + " --save-path-gb " + save_images['gb_path'] + " --save-path-cam-gb " + save_images['cam_gb_path'] + " --save-path-cam " + save_images['cam_path'] + " --model-path " + model_path + " --use-cuda")
else:
os.system("python " + gradcam_fp + " --image-path " + input_images + " --save-path-gb " + save_images['gb_path'] + " --save-path-cam-gb " + save_images['cam_gb_path'] + " --save-path-cam " + save_images['cam_path'] + " --model-path " + model_path + " --use-cuda")
if "training" in targets:
if not os.path.isdir('models'):
os.makedirs('models')
train_fp = load_params(fp_params)['train_path']
input_train_params = load_params(train_params)
model_name = input_train_params['model_name']
feature_extract = input_train_params['feature_extracting']
batch_size = input_train_params['batch_size']
learning_rate = input_train_params['learning_rate']
num_epochs = input_train_params['num_epochs']
if feature_extract:
os.system("python " + train_fp + " --model-name " + model_name + " --batch-size " + str(batch_size) + " --learning-rate " + str(learning_rate) + " --num-epochs " + str(num_epochs) + " --use-cuda --feature-extracting")
else:
os.system("python " + train_fp + " --model-name " + model_name + " --batch-size " + str(batch_size) + " --learning-rate " + str(learning_rate) + " --num-epochs " + str(num_epochs) + " --use-cuda")
if "testing" in targets:
if not os.path.isdir('models'):
print("No models available. Train a model first")
sys.exit(0)
if not os.path.isdir('results/model_prediction'):
os.mkdir('results/model_prediction')
test_fp = load_params(fp_params)['test_path']
input_test_params = load_params(test_params)
model_name = input_test_params['model_name']
model_path = input_test_params['model_path']
batch_size = input_test_params['batch_size']
test_size = input_test_params['test_size']
if model_name not in model_path:
print("Model name and model path mismatch, please check your parameters again!")
sys.exit(0)
if "custom_image_path" in input_test_params:
custom_image_path = input_test_params['custom_image_path']
os.system("python " + test_fp + " --model-name " + model_name + " --model-path " + model_path + " --custom-image-path " + custom_image_path + " --batch-size " + str(batch_size) + " --use-cuda")
else:
os.system("python " + test_fp + " --model-name " + model_name + " --model-path " + model_path + " --batch-size " + str(batch_size) + " --test-size " + str(test_size) + " --use-cuda")
if "ig" in targets:
if not os.path.isdir('models'):
print("No models available. Train a model first")
sys.exit(0)
if not os.path.isdir('results/integrated_gradient'):
os.mkdir('results/integrated_gradient')
ig_fp = load_params(fp_params)['ig_path']
input_ig_params = load_params(ig_params)
img_load_path = input_ig_params['image_load_path']
img_save_path = input_ig_params['image_save_path']
model_path = input_ig_params['model_path']
if "custom_image_path" in input_ig_params:
custom_image_path = input_ig_params['custom_image_path']
os.system("python " + ig_fp + " --custom-image-path " + custom_image_path + " --img-load-path " + img_load_path + " --img-save-path " + img_save_path + " --model-path " + model_path + " --use-cuda")
else:
os.system("python " + ig_fp + " --img-load-path " + img_load_path + " --img-save-path " + img_save_path + " --model-path " + model_path + " --use-cuda")
if __name__ == '__main__':
if not os.path.isdir('results'):
os.makedirs('results')
targets = sys.argv[1:]
main(targets)
| 48.974138 | 321 | 0.628939 |
import argparse
import os
import sys
import json
import shutil
data_ingest_params = './config/data-params.json'
fp_params = './config/file_path.json'
gradcam_params = './config/gradcam_params.json'
ig_params = './config/ig_params.json'
train_params = './config/train_params.json'
test_params = './config/test_params.json'
def load_params(fp):
with open(fp) as fh:
param = json.load(fh)
return param
def main(targets):
if 'clean' in targets:
shutil.rmtree('results/gradcam/', ignore_errors=True)
shutil.rmtree('results/model_prediction/', ignore_errors=True)
shutil.rmtree('results/integrated_gradient/', ignore_errors=True)
os.mkdir('results/gradcam')
os.mkdir('results/model_prediction')
os.mkdir('results/integrated_gradient')
if "gradcam" in targets:
if not os.path.isdir('results/gradcam'):
os.makedirs('results/gradcam')
gradcam_fp = load_params(fp_params)['gradcam_path']
input_gradcam_params = load_params(gradcam_params)
input_images = input_gradcam_params["load_image_path"]["image_input_path_train_covered"]
save_images = input_gradcam_params['save_image_path']
model_path = input_gradcam_params['model_path']
if "custom_image_path" in input_gradcam_params:
custom_image_path = input_gradcam_params['custom_image_path']
os.system("python " + gradcam_fp + " --image-path " + input_images + " --custom-image-path " + custom_image_path + " --save-path-gb " + save_images['gb_path'] + " --save-path-cam-gb " + save_images['cam_gb_path'] + " --save-path-cam " + save_images['cam_path'] + " --model-path " + model_path + " --use-cuda")
else:
os.system("python " + gradcam_fp + " --image-path " + input_images + " --save-path-gb " + save_images['gb_path'] + " --save-path-cam-gb " + save_images['cam_gb_path'] + " --save-path-cam " + save_images['cam_path'] + " --model-path " + model_path + " --use-cuda")
if "training" in targets:
if not os.path.isdir('models'):
os.makedirs('models')
train_fp = load_params(fp_params)['train_path']
input_train_params = load_params(train_params)
model_name = input_train_params['model_name']
feature_extract = input_train_params['feature_extracting']
batch_size = input_train_params['batch_size']
learning_rate = input_train_params['learning_rate']
num_epochs = input_train_params['num_epochs']
if feature_extract:
os.system("python " + train_fp + " --model-name " + model_name + " --batch-size " + str(batch_size) + " --learning-rate " + str(learning_rate) + " --num-epochs " + str(num_epochs) + " --use-cuda --feature-extracting")
else:
os.system("python " + train_fp + " --model-name " + model_name + " --batch-size " + str(batch_size) + " --learning-rate " + str(learning_rate) + " --num-epochs " + str(num_epochs) + " --use-cuda")
if "testing" in targets:
if not os.path.isdir('models'):
print("No models available. Train a model first")
sys.exit(0)
if not os.path.isdir('results/model_prediction'):
os.mkdir('results/model_prediction')
test_fp = load_params(fp_params)['test_path']
input_test_params = load_params(test_params)
model_name = input_test_params['model_name']
model_path = input_test_params['model_path']
batch_size = input_test_params['batch_size']
test_size = input_test_params['test_size']
if model_name not in model_path:
print("Model name and model path mismatch, please check your parameters again!")
sys.exit(0)
if "custom_image_path" in input_test_params:
custom_image_path = input_test_params['custom_image_path']
os.system("python " + test_fp + " --model-name " + model_name + " --model-path " + model_path + " --custom-image-path " + custom_image_path + " --batch-size " + str(batch_size) + " --use-cuda")
else:
os.system("python " + test_fp + " --model-name " + model_name + " --model-path " + model_path + " --batch-size " + str(batch_size) + " --test-size " + str(test_size) + " --use-cuda")
if "ig" in targets:
if not os.path.isdir('models'):
print("No models available. Train a model first")
sys.exit(0)
if not os.path.isdir('results/integrated_gradient'):
os.mkdir('results/integrated_gradient')
ig_fp = load_params(fp_params)['ig_path']
input_ig_params = load_params(ig_params)
img_load_path = input_ig_params['image_load_path']
img_save_path = input_ig_params['image_save_path']
model_path = input_ig_params['model_path']
if "custom_image_path" in input_ig_params:
custom_image_path = input_ig_params['custom_image_path']
os.system("python " + ig_fp + " --custom-image-path " + custom_image_path + " --img-load-path " + img_load_path + " --img-save-path " + img_save_path + " --model-path " + model_path + " --use-cuda")
else:
os.system("python " + ig_fp + " --img-load-path " + img_load_path + " --img-save-path " + img_save_path + " --model-path " + model_path + " --use-cuda")
if __name__ == '__main__':
if not os.path.isdir('results'):
os.makedirs('results')
targets = sys.argv[1:]
main(targets)
| true | true |
f7379e122a84b507b399b4dbc7b52402dd7e2908 | 214 | py | Python | aergo/herapy/errors/__init__.py | aspiers/herapy | cf8a6ab0fd50c12b0f5f3ca85ff015eda5108863 | [
"MIT"
] | 4 | 2019-02-22T09:33:30.000Z | 2021-03-26T14:22:04.000Z | aergo/herapy/errors/__init__.py | aspiers/herapy | cf8a6ab0fd50c12b0f5f3ca85ff015eda5108863 | [
"MIT"
] | 50 | 2019-03-10T02:45:55.000Z | 2022-02-01T15:00:44.000Z | aergo/herapy/errors/__init__.py | aspiers/herapy | cf8a6ab0fd50c12b0f5f3ca85ff015eda5108863 | [
"MIT"
] | 4 | 2019-08-03T11:01:29.000Z | 2021-03-31T08:31:23.000Z | # -*- coding: utf-8 -*-
__all__ = ["exception"]
from .exception import AergoException, CommunicationException
from .conversion_exception import ConversionException
from .general_exception import GeneralException
| 26.75 | 61 | 0.808411 |
__all__ = ["exception"]
from .exception import AergoException, CommunicationException
from .conversion_exception import ConversionException
from .general_exception import GeneralException
| true | true |
f7379f0a932918d82994fe95749b56a6277fc071 | 13,020 | py | Python | gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py | alshedivat/federated | 100f0e0940282818c42c39156407ae419f26de50 | [
"Apache-2.0"
] | 2 | 2021-10-19T13:55:11.000Z | 2021-11-11T11:26:05.000Z | federated/gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py | luke-who/TFF | fe9f44a504bc51b603a3ab9a181148da0aa9612f | [
"MIT"
] | null | null | null | federated/gans/experiments/emnist/preprocessing/filtered_emnist_data_utils.py | luke-who/TFF | fe9f44a504bc51b603a3ab9a181148da0aa9612f | [
"MIT"
] | null | null | null | # Copyright 2019, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for filtering (via class. accuracy) the Federated EMNIST dataset."""
import csv
import functools
import os.path
import tensorflow as tf
import tensorflow_federated as tff
from gans.experiments.emnist import emnist_data_utils
BASE_URL = 'https://storage.googleapis.com/tff-experiments-public/'
CSVS_BASE_PATH = 'gans/csvs/'
@functools.lru_cache(maxsize=1)
def get_unfiltered_client_data_for_training(batch_size):
r"""Returns `tff.simulation.datasets.ClientData` of unfiltered Federated EMNIST data.
The data returned will neither be filtered by user nor by example, so training
can take place with all users and all examples for each user.
Args:
batch_size: Batch size of output dataset. If None, don't batch.
Returns:
A tff.simulation.datasets.ClientData` of real images of numbers/letters. The
data has
not been filtered.
"""
return get_filtered_client_data_for_training(None, None, batch_size)
@functools.lru_cache(maxsize=1)
def get_filtered_by_user_client_data_for_training(invert_imagery_probability,
accuracy_threshold,
batch_size,
cache_dir=None):
r"""Returns `tff.simulation.datasets.ClientData` of filtered Federated EMNIST data.
Input data gets filtered on a per-user basis; users get selected via the
`accuracy_threshold` criterion, and then training can take place with all
examples from only the selected users.
Args:
invert_imagery_probability: The probability that a user\'s image data has
pixel intensity inverted. E.g., `0p1` corresponds to 0.1, or a 10%
probability that a user\'s data is flipped. Note that to save time in
experiment execution, this is precomputed via the ./filter_users.py
script, and the selection here controls which file to read from.
accuracy_threshold: Indicates the classification threshold by which a user
is included in the training population. E.g., `lt0p882` means any user
who\'s data cumulatively classifies with <0.882 accuracy would be used for
training; `gt0p939` means any user who\'s data cumulatively classifies
with >0.939 accuracy would be used for training. To save time in
experiment execution, this assignment is precomputed via the
./filter_users.py script, and the flag selection here is to indicate which
file to read from.
batch_size: Batch size of output dataset. If None, don't batch.
cache_dir: (Optional) base directory to cache the downloaded files. If None,
caches in Keras' default cache directory.
Returns:
A tff.simulation.datasets.ClientData` of real images of numbers/letters. The
data has
been filtered by user classification accuracy as per the input arguments.
"""
path_to_data = os.path.join(CSVS_BASE_PATH,
'inv_prob_{}'.format(invert_imagery_probability),
'filter_by_user',
'acc_{}'.format(accuracy_threshold))
try:
filename = 'client_ids.csv'
path_to_read_inversions_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
except Exception:
msg = ('A URL fetch failure was encountered when trying to retrieve '
'filter-by-user generated csv file with invert_imagery_probability '
'`{}` and accuracy_threshold `{}`. Please run the ./filter_users.py '
'script to generate the missing data, and use the `cache_dir` '
'argument to this method to specify the location of the generated '
'data csv file.'.format(invert_imagery_probability,
accuracy_threshold))
raise ValueError(msg)
return get_filtered_client_data_for_training(path_to_read_inversions_csv,
None, batch_size)
@functools.lru_cache(maxsize=1)
def get_filtered_by_example_client_data_for_training(invert_imagery_probability,
min_num_examples,
example_class_selection,
batch_size,
cache_dir=None):
r"""Returns `tff.simulation.datasets.ClientData` of filtered Federated EMNIST data.
Input data gets filtered on a per-example basis. Any user meeting the
`min_num_examples` criterion is included. The examples are limited to those
that classified according to the `example_class_selection` criterion.
Args:
invert_imagery_probability: The probability that a user\'s image data has
pixel intensity inverted. E.g., `0p1` corresponds to 0.1, or a 10%
probability that a user\'s data is flipped. Note that to save time in
experiment execution, this is precomputed via the ./filter_examples.py
scripts, and the selection here controls which file to read from.
min_num_examples: Indicates the minimum number of examples that are either
correct or incorrect (as set by the `example_class_selection` argument) in
a client\'s local dataset for that client to be considered as part of
training sub-population. To save time in experiment execution, this
assignment is precomputed via the ./filter_examples.py script, and the
flag selection here is to indicate which file to read from.
example_class_selection: Indicates whether to train on a client\'s correct
or incorrect examples. To save time in experiment execution, this
assignment is precomputed via the ./filter_examples.py script, and the
flag selection here is to indicate which file to read from.
batch_size: Batch size of output dataset. If None, don't batch.
cache_dir: (Optional) base directory to cache the downloaded files. If None,
caches in Keras' default cache directory.
Returns:
A `tff.simulation.datasets.ClientData` of real images of numbers/letters.
The data
has been filtered as per the input arguments (either not filtered, filtered
by user classification accuracy, or filtered by example classification
correctness).
"""
path_to_data = os.path.join(CSVS_BASE_PATH,
'inv_prob_{}'.format(invert_imagery_probability),
'filter_by_example',
'min_num_examples_{}'.format(min_num_examples),
'{}'.format(example_class_selection))
try:
filename = 'client_ids.csv'
path_to_read_inversions_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
filename = 'example_indices_map.csv'
path_to_read_example_indices_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
except Exception:
msg = ('A URL fetch failure was encountered when trying to retrieve '
'filter-by-example generated csv files with '
'invert_imagery_probability `{}`, min_num_examples `{}`, and '
'example_class_selection `{}`. Please run the ./filter_examples.py '
'script to generate the missing data, and use the `cache_dir` '
'argument to this method to specify the location of the generated '
'data csv files.'.format(invert_imagery_probability,
min_num_examples, example_class_selection))
raise ValueError(msg)
return get_filtered_client_data_for_training(
path_to_read_inversions_csv, path_to_read_example_indices_csv, batch_size)
def get_filtered_client_data_for_training(path_to_read_inversions_csv,
path_to_read_example_indices_csv,
batch_size):
"""Form ClientData using paths to pixel inversion, example selection data."""
raw_client_data = emnist_data_utils.create_real_images_tff_client_data(
'train')
client_ids = raw_client_data.client_ids
selected_client_ids_inversion_map = None
client_ids_example_indices_map = None
# If filter-by-user or filter-by-example, load the csv data into maps, and
# update the client IDs to just the users that will be part of training.
if path_to_read_inversions_csv is not None:
selected_client_ids_inversion_map, client_ids_example_indices_map = (
_get_client_ids_inversion_and_example_indices_maps(
path_to_read_inversions_csv, path_to_read_example_indices_csv))
client_ids = list(selected_client_ids_inversion_map.keys())
def _get_dataset(client_id):
"""Retrieve/preprocess a tf.data.Dataset for a given client_id."""
raw_ds = raw_client_data.create_tf_dataset_for_client(client_id)
invert_imagery = False
if selected_client_ids_inversion_map:
invert_imagery = selected_client_ids_inversion_map[client_id]
# If filter-by-example, do it here.
if client_ids_example_indices_map:
raw_ds = _filter_by_example(raw_ds, client_ids_example_indices_map,
client_id)
return emnist_data_utils.preprocess_img_dataset(
raw_ds,
invert_imagery=invert_imagery,
include_label=False,
batch_size=batch_size,
shuffle=True,
repeat=False)
return tff.simulation.datasets.ClientData.from_clients_and_fn(
client_ids, _get_dataset)
def _filter_by_example(raw_ds, client_ids_example_indices_map, client_id):
"""Form a tf.data.Dataset from the examples in the map for the client_id."""
example_indices = client_ids_example_indices_map[client_id]
# B/c the csv stores the list as a string, we need to do some slightly
# klugey conversion from a string to list. (We strip off the first and
# last characters in the string, which are [ and ], and then split on
# commas as delimiters, to recover the original list of ints.
example_indices = [int(s) for s in example_indices[1:-1].split(',')]
# Get the elements (OrderedDicts) in the raw data which are at the indices
# indicated by the list above.
elements = []
index = 0
for element in raw_ds:
if index in example_indices:
elements.append(element)
index += 1
# Bind the elements (via a generator fn) into a new tf.data.Dataset.
def _generator():
for element in elements:
yield element
return tf.data.Dataset.from_generator(_generator, raw_ds.output_types,
raw_ds.output_shapes)
def _get_client_ids_inversion_and_example_indices_maps(
path_to_read_inversions_csv, path_to_read_example_indices_csv):
"""Return paths to csv files storing maps indicating the data to train on."""
if path_to_read_inversions_csv is None:
raise ValueError(
'No path provided to the CSV file that stores map from client ids to '
'image inversion data.')
# Load (from CSV file) the specific client IDs that the GAN will train on, and
# whether or not the images on that client are inverted.
selected_client_ids_inversion_map = {}
with tf.io.gfile.GFile(path_to_read_inversions_csv, 'r') as csvfile:
csvreader = csv.reader(csvfile)
for [key, val] in csvreader:
selected_client_ids_inversion_map[key] = (val == 'True')
# If specified (via CSV file), the specific examples on each client ID that
# the GAN will be trained on.
client_ids_example_indices_map = None
if path_to_read_example_indices_csv:
client_ids_example_indices_map = {}
with tf.io.gfile.GFile(path_to_read_example_indices_csv, 'r') as csvfile:
csvreader = csv.reader(csvfile)
for [key, val] in csvreader:
client_ids_example_indices_map[key] = val
set_1 = set(client_ids_example_indices_map.keys())
set_2 = set(selected_client_ids_inversion_map.keys())
symmetric_diff = set_1 ^ set_2
if symmetric_diff:
raise ValueError(
'The CSV files at path_to_read_inversions_csv and '
'path_to_read_example_indices_csv contain different keys.')
return selected_client_ids_inversion_map, client_ids_example_indices_map
| 45.365854 | 87 | 0.701613 |
import csv
import functools
import os.path
import tensorflow as tf
import tensorflow_federated as tff
from gans.experiments.emnist import emnist_data_utils
BASE_URL = 'https://storage.googleapis.com/tff-experiments-public/'
CSVS_BASE_PATH = 'gans/csvs/'
@functools.lru_cache(maxsize=1)
def get_unfiltered_client_data_for_training(batch_size):
return get_filtered_client_data_for_training(None, None, batch_size)
@functools.lru_cache(maxsize=1)
def get_filtered_by_user_client_data_for_training(invert_imagery_probability,
accuracy_threshold,
batch_size,
cache_dir=None):
path_to_data = os.path.join(CSVS_BASE_PATH,
'inv_prob_{}'.format(invert_imagery_probability),
'filter_by_user',
'acc_{}'.format(accuracy_threshold))
try:
filename = 'client_ids.csv'
path_to_read_inversions_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
except Exception:
msg = ('A URL fetch failure was encountered when trying to retrieve '
'filter-by-user generated csv file with invert_imagery_probability '
'`{}` and accuracy_threshold `{}`. Please run the ./filter_users.py '
'script to generate the missing data, and use the `cache_dir` '
'argument to this method to specify the location of the generated '
'data csv file.'.format(invert_imagery_probability,
accuracy_threshold))
raise ValueError(msg)
return get_filtered_client_data_for_training(path_to_read_inversions_csv,
None, batch_size)
@functools.lru_cache(maxsize=1)
def get_filtered_by_example_client_data_for_training(invert_imagery_probability,
min_num_examples,
example_class_selection,
batch_size,
cache_dir=None):
path_to_data = os.path.join(CSVS_BASE_PATH,
'inv_prob_{}'.format(invert_imagery_probability),
'filter_by_example',
'min_num_examples_{}'.format(min_num_examples),
'{}'.format(example_class_selection))
try:
filename = 'client_ids.csv'
path_to_read_inversions_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
filename = 'example_indices_map.csv'
path_to_read_example_indices_csv = tf.keras.utils.get_file(
fname=filename,
cache_subdir=path_to_data,
cache_dir=cache_dir,
origin=os.path.join(BASE_URL, path_to_data, filename))
except Exception:
msg = ('A URL fetch failure was encountered when trying to retrieve '
'filter-by-example generated csv files with '
'invert_imagery_probability `{}`, min_num_examples `{}`, and '
'example_class_selection `{}`. Please run the ./filter_examples.py '
'script to generate the missing data, and use the `cache_dir` '
'argument to this method to specify the location of the generated '
'data csv files.'.format(invert_imagery_probability,
min_num_examples, example_class_selection))
raise ValueError(msg)
return get_filtered_client_data_for_training(
path_to_read_inversions_csv, path_to_read_example_indices_csv, batch_size)
def get_filtered_client_data_for_training(path_to_read_inversions_csv,
path_to_read_example_indices_csv,
batch_size):
raw_client_data = emnist_data_utils.create_real_images_tff_client_data(
'train')
client_ids = raw_client_data.client_ids
selected_client_ids_inversion_map = None
client_ids_example_indices_map = None
if path_to_read_inversions_csv is not None:
selected_client_ids_inversion_map, client_ids_example_indices_map = (
_get_client_ids_inversion_and_example_indices_maps(
path_to_read_inversions_csv, path_to_read_example_indices_csv))
client_ids = list(selected_client_ids_inversion_map.keys())
def _get_dataset(client_id):
raw_ds = raw_client_data.create_tf_dataset_for_client(client_id)
invert_imagery = False
if selected_client_ids_inversion_map:
invert_imagery = selected_client_ids_inversion_map[client_id]
if client_ids_example_indices_map:
raw_ds = _filter_by_example(raw_ds, client_ids_example_indices_map,
client_id)
return emnist_data_utils.preprocess_img_dataset(
raw_ds,
invert_imagery=invert_imagery,
include_label=False,
batch_size=batch_size,
shuffle=True,
repeat=False)
return tff.simulation.datasets.ClientData.from_clients_and_fn(
client_ids, _get_dataset)
def _filter_by_example(raw_ds, client_ids_example_indices_map, client_id):
example_indices = client_ids_example_indices_map[client_id]
example_indices = [int(s) for s in example_indices[1:-1].split(',')]
elements = []
index = 0
for element in raw_ds:
if index in example_indices:
elements.append(element)
index += 1
def _generator():
for element in elements:
yield element
return tf.data.Dataset.from_generator(_generator, raw_ds.output_types,
raw_ds.output_shapes)
def _get_client_ids_inversion_and_example_indices_maps(
path_to_read_inversions_csv, path_to_read_example_indices_csv):
if path_to_read_inversions_csv is None:
raise ValueError(
'No path provided to the CSV file that stores map from client ids to '
'image inversion data.')
selected_client_ids_inversion_map = {}
with tf.io.gfile.GFile(path_to_read_inversions_csv, 'r') as csvfile:
csvreader = csv.reader(csvfile)
for [key, val] in csvreader:
selected_client_ids_inversion_map[key] = (val == 'True')
client_ids_example_indices_map = None
if path_to_read_example_indices_csv:
client_ids_example_indices_map = {}
with tf.io.gfile.GFile(path_to_read_example_indices_csv, 'r') as csvfile:
csvreader = csv.reader(csvfile)
for [key, val] in csvreader:
client_ids_example_indices_map[key] = val
set_1 = set(client_ids_example_indices_map.keys())
set_2 = set(selected_client_ids_inversion_map.keys())
symmetric_diff = set_1 ^ set_2
if symmetric_diff:
raise ValueError(
'The CSV files at path_to_read_inversions_csv and '
'path_to_read_example_indices_csv contain different keys.')
return selected_client_ids_inversion_map, client_ids_example_indices_map
| true | true |
f7379f7c6b6ddd780c88ebd76500c40a01cae31c | 2,892 | py | Python | get_grok_repos.py | icecube-pixel/grok-auto-complete | 747aab90f846410f444914713d238034fcf767a2 | [
"MIT"
] | null | null | null | get_grok_repos.py | icecube-pixel/grok-auto-complete | 747aab90f846410f444914713d238034fcf767a2 | [
"MIT"
] | null | null | null | get_grok_repos.py | icecube-pixel/grok-auto-complete | 747aab90f846410f444914713d238034fcf767a2 | [
"MIT"
] | null | null | null | import logging
from github import Github
from typing import Dict, Tuple, List
import os
import argparse
import traceback
from collections import Counter
from tenacity import retry, stop_after_attempt, wait_exponential
from time import sleep
import pandas as pd
logging.basicConfig(format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
datefmt='%Y-%m-%d:%H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
# https://docs.github.com/en/github/searching-for-information-on-github/searching-on-github/searching-for-repositories#search-by-when-a-repository-was-created-or-last-updated
def get_query_string_to_exclude()->str:
"""
Generates query string instead of hard-coding and appends to the query string
:return:
"""
logger.info("Inside function to generate query to hit API")
languages_to_exclude = ['Jinja', 'Shell', 'YAML', 'INI', 'Perl', 'Haskell']
exclude_languages = " ".join(["NOT language:{}".format(language) for language in languages_to_exclude])
return " " + exclude_languages
def get_matching_code(args: Dict)->None:
"""
Gets the top matches of code based on pattern where grok is used and is of not YAML etc
"""
logger.info("Inside to get top repositories function")
master_data = []
observed_licences = []
try:
g_obj = Github(args['token'], timeout=3000) # Overriding timeout of 3000 seconds
pattern_file_extension = '"grok" in:file extension:j2'
lang_to_exclude = get_query_string_to_exclude()
_query_str = f"{pattern_file_extension}{lang_to_exclude}"
logger.info(f"Processing query {_query_str}")
sleep(10)
results = g_obj.search_code(_query_str)
for repo in results:
master_data.append(vars(repo))
observed_licences.append(repo.license)
file_name = str(repo).split("ContentFile(path=")[1].replace('"',"")[:-1].replace("/", "_")
path_to_dump = os.path.join(os.getcwd(), "data", file_name)
logger.info("Dumping file {}".format(file_name))
with open(path_to_dump, "wb") as f:
f.write(repo.decoded_content)
logger.info(Counter(observed_licences))
except Exception as e:
logger.error(e)
logger.error(traceback.format_exc())
pd.DataFrame(master_data).to_csv("RepoData.csv", index=False)
def get_inputs()->Dict:
"""Gets the username and password from the console """
parser = argparse.ArgumentParser()
parser.add_argument("--token", dest="token", help="Enter the oAuth token", required=True)
args = vars(parser.parse_args())
return args
def main():
logger.info("Inside Main")
args = get_inputs()
get_matching_code(args=args)
if __name__ == '__main__':
main()
| 37.558442 | 175 | 0.662863 | import logging
from github import Github
from typing import Dict, Tuple, List
import os
import argparse
import traceback
from collections import Counter
from tenacity import retry, stop_after_attempt, wait_exponential
from time import sleep
import pandas as pd
logging.basicConfig(format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
datefmt='%Y-%m-%d:%H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
fo("Inside function to generate query to hit API")
languages_to_exclude = ['Jinja', 'Shell', 'YAML', 'INI', 'Perl', 'Haskell']
exclude_languages = " ".join(["NOT language:{}".format(language) for language in languages_to_exclude])
return " " + exclude_languages
def get_matching_code(args: Dict)->None:
logger.info("Inside to get top repositories function")
master_data = []
observed_licences = []
try:
g_obj = Github(args['token'], timeout=3000)
pattern_file_extension = '"grok" in:file extension:j2'
lang_to_exclude = get_query_string_to_exclude()
_query_str = f"{pattern_file_extension}{lang_to_exclude}"
logger.info(f"Processing query {_query_str}")
sleep(10)
results = g_obj.search_code(_query_str)
for repo in results:
master_data.append(vars(repo))
observed_licences.append(repo.license)
file_name = str(repo).split("ContentFile(path=")[1].replace('"',"")[:-1].replace("/", "_")
path_to_dump = os.path.join(os.getcwd(), "data", file_name)
logger.info("Dumping file {}".format(file_name))
with open(path_to_dump, "wb") as f:
f.write(repo.decoded_content)
logger.info(Counter(observed_licences))
except Exception as e:
logger.error(e)
logger.error(traceback.format_exc())
pd.DataFrame(master_data).to_csv("RepoData.csv", index=False)
def get_inputs()->Dict:
parser = argparse.ArgumentParser()
parser.add_argument("--token", dest="token", help="Enter the oAuth token", required=True)
args = vars(parser.parse_args())
return args
def main():
logger.info("Inside Main")
args = get_inputs()
get_matching_code(args=args)
if __name__ == '__main__':
main()
| true | true |
f737a0875ea46f345c5fb511aa7fbf04d063db12 | 7,473 | py | Python | foiamachine/apps/government/models.py | dwillis/foiamachine | 26d3b02870227696cdaab639c39d47b2a7a42ae5 | [
"Unlicense",
"MIT"
] | 9 | 2017-08-02T16:28:10.000Z | 2021-07-19T09:51:46.000Z | foiamachine/apps/government/models.py | dwillis/foiamachine | 26d3b02870227696cdaab639c39d47b2a7a42ae5 | [
"Unlicense",
"MIT"
] | null | null | null | foiamachine/apps/government/models.py | dwillis/foiamachine | 26d3b02870227696cdaab639c39d47b2a7a42ae5 | [
"Unlicense",
"MIT"
] | 5 | 2017-10-10T23:15:02.000Z | 2021-07-19T09:51:48.000Z | from django.db import models
from django_extensions.db.fields import AutoSlugField
from apps.core.models import BaseData
from django.contrib.auth.models import User, Group
from django.utils.html import escape
import pytz
import datetime
import bleach
from django.utils import timezone
class Language(BaseData):
name = models.CharField(max_length=255)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
class Meta:
verbose_name_plural = 'Languages'
def __unicode__(self):
return self.name
class AdminName(BaseData):
name = models.CharField(max_length=255)
name_plural = models.CharField(max_length=255, blank=True, null=True)
def __unicode__(self):
return self.name
class Update(BaseData):
author = models.ForeignKey(User)
pubbed = models.DateTimeField(null=True)
headline = models.CharField(max_length=1024, default="The latest")
text = models.TextField()
class FeeExemptionOtherManager(models.Manager):
def all_them(self):
return super(FeeExemptionOtherManager, self).get_query_set()
def get_query_set(self):
return super(FeeExemptionOtherManager, self).get_query_set().filter(deprecated__isnull=True)
class FeeExemptionOther(BaseData):
statute_relation_types = (
('E', 'Exemption'),
('F', 'Fee'),
('O', 'Other'),
)
name = models.CharField(max_length=512)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
description = models.TextField(blank=True, null=True)
source = models.URLField(blank=True, null=True)
typee = models.CharField(max_length=1, choices=statute_relation_types,)
objects = FeeExemptionOtherManager()
def __unicode__(self):
return self.name
@property
def deleted(self):
return self.deprecated is not None
@property
def get_name(self):
return bleach.clean(self.name, strip=True)
@property
def get_description(self):
return bleach.clean(self.description, strip=True)
@property
def get_source(self):
return bleach.clean(self.source, strip=True)
#Nation (for example, names for admin 1, 2, etc. levels, language modules)
class Nation(BaseData):
name = models.CharField(max_length=255)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
primary_language = models.ForeignKey(Language, related_name='primary_language_nations', blank=True, null=True)
foi_languages = models.ManyToManyField(Language, blank=True, null=True)
admin_0_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_0_nations')
admin_1_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_1_nations')
admin_2_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_2_nations')
admin_3_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_3_nations')
class Meta:
verbose_name_plural = 'Nations'
def __unicode__(self):
return self.name
class StatuteManager(models.Manager):
def all_them(self):
return super(StatuteManager, self).get_query_set()
def get_query_set(self):
return super(StatuteManager, self).get_query_set().filter(deprecated__isnull=True)
class Statute(BaseData):
short_title = models.CharField(max_length=255)
designator = models.CharField(max_length=255, blank=True, null=True)
text = models.TextField(blank=True,null=True)
days_till_due = models.IntegerField(default=-1)
slug = AutoSlugField(populate_from=('short_title', ), overwrite=False)
fees_exemptions = models.ManyToManyField(FeeExemptionOther, null=True, blank=True)
updates = models.ManyToManyField(Update, null=True, blank=True)
#deleted = models.BooleanField(default = False)
objects = StatuteManager()
class Meta:
verbose_name_plural = 'Statutes'
def __unicode__(self):
return self.short_title
@property
def deleted(self):
return self.deprecated is not None
@property
def get_short_title(self):
return bleach.clean(self.short_title, strip=True)
@property
def get_designator(self):
return bleach.clean(self.designator, strip=True)
@property
def get_governments(self):
return self.related_statutes.all()
@property
def get_text(self):
return escape(self.text.trim())
@property
def get_days_till_due(self):
if self.days_till_due < 0:
return None
return self.days_till_due
class Holiday(BaseData):
name = models.CharField(max_length = 255) # e.g. Christmas Day 2011, Casimir Pulaski Day 2013
date = models.DateField()
class GovernmentManager(models.Manager):
def get_query_set(self):
return super(GovernmentManager, self).get_query_set().filter(deprecated__isnull=True)
class Government(BaseData):
GOV_LEVELS = (
('I', 'International'),
('S', 'Supernational'),
('0', 'Admin 0 (National)'),
('1', 'Admin 1 (State/Province)'),
('2', 'Admin 2 (County or similar)'),
('3', 'Admin 3 (City or municipality)'),
)
name = models.CharField(max_length=255)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
level = models.CharField(max_length=1, choices=GOV_LEVELS)
nation = models.ForeignKey(Nation, null=True, blank=True)
statutes = models.ManyToManyField(Statute, null=True, blank=True, related_name='related_statutes')
#deleted = models.BooleanField(default = False)
holidays = models.ManyToManyField(Holiday, null=True, blank=True)
objects = GovernmentManager()
class Meta:
verbose_name_plural = 'Governments'
def __unicode__(self):
return '%s (%s)' % (self.name, self.nation,)
@property
def get_holiday_dates(self):
""" Default to U.S. federal holidays """
utc = pytz.UTC
holidays = self.holidays.all()
if holidays:
return [holiday.date for holiday in holidays]
tz = timezone.get_current_timezone()
datetuples = [
(2013, 1, 1), # New Year's Day
(2013, 1, 21),# Martin Luther King Jr. Day
(2013, 2, 18),# Washington's Birthday
(2013, 5, 27),# Memorial Day
(2013, 7, 4), # Independence Day
(2013, 9, 2), # Labor Day
(2013, 10, 14), # Columbus Day
(2013, 11, 11), # Veterans Day
(2013, 11, 28), # Thanksgiving Day
(2013, 12, 25), # Christmas Day
(2014, 1, 1), # New Year's Day
(2014, 1, 20), # Martin Luther King Jr. Day
(2014, 2, 17), # Washington's Bday
(2014, 5, 26), # Memorial Day
(2014, 7, 4), # Independence Day
(2014, 9, 1), # Labor Day
(2014, 10, 13), # Columbus Day
(2014, 11, 11), # Veterans' Day
(2014, 11, 27), # Thanksgiving Day
(2014, 12, 25) # Christmas Day
]
return map(lambda datetuple: tz.localize(datetime.datetime(*datetuple)), datetuples)
@property
def get_statutes(self):
if self.statutes.all().count() <= 0:
return None
return self.statutes.all().order_by('-days_till_due')
@staticmethod
def get_us_gov_levels():
return {
'state': 1,
'county': 2,
'city': 3
}
| 33.066372 | 114 | 0.656631 | from django.db import models
from django_extensions.db.fields import AutoSlugField
from apps.core.models import BaseData
from django.contrib.auth.models import User, Group
from django.utils.html import escape
import pytz
import datetime
import bleach
from django.utils import timezone
class Language(BaseData):
name = models.CharField(max_length=255)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
class Meta:
verbose_name_plural = 'Languages'
def __unicode__(self):
return self.name
class AdminName(BaseData):
name = models.CharField(max_length=255)
name_plural = models.CharField(max_length=255, blank=True, null=True)
def __unicode__(self):
return self.name
class Update(BaseData):
author = models.ForeignKey(User)
pubbed = models.DateTimeField(null=True)
headline = models.CharField(max_length=1024, default="The latest")
text = models.TextField()
class FeeExemptionOtherManager(models.Manager):
def all_them(self):
return super(FeeExemptionOtherManager, self).get_query_set()
def get_query_set(self):
return super(FeeExemptionOtherManager, self).get_query_set().filter(deprecated__isnull=True)
class FeeExemptionOther(BaseData):
statute_relation_types = (
('E', 'Exemption'),
('F', 'Fee'),
('O', 'Other'),
)
name = models.CharField(max_length=512)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
description = models.TextField(blank=True, null=True)
source = models.URLField(blank=True, null=True)
typee = models.CharField(max_length=1, choices=statute_relation_types,)
objects = FeeExemptionOtherManager()
def __unicode__(self):
return self.name
@property
def deleted(self):
return self.deprecated is not None
@property
def get_name(self):
return bleach.clean(self.name, strip=True)
@property
def get_description(self):
return bleach.clean(self.description, strip=True)
@property
def get_source(self):
return bleach.clean(self.source, strip=True)
class Nation(BaseData):
name = models.CharField(max_length=255)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
primary_language = models.ForeignKey(Language, related_name='primary_language_nations', blank=True, null=True)
foi_languages = models.ManyToManyField(Language, blank=True, null=True)
admin_0_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_0_nations')
admin_1_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_1_nations')
admin_2_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_2_nations')
admin_3_name = models.ForeignKey(AdminName, null=True, blank=True, related_name='admin_3_nations')
class Meta:
verbose_name_plural = 'Nations'
def __unicode__(self):
return self.name
class StatuteManager(models.Manager):
def all_them(self):
return super(StatuteManager, self).get_query_set()
def get_query_set(self):
return super(StatuteManager, self).get_query_set().filter(deprecated__isnull=True)
class Statute(BaseData):
short_title = models.CharField(max_length=255)
designator = models.CharField(max_length=255, blank=True, null=True)
text = models.TextField(blank=True,null=True)
days_till_due = models.IntegerField(default=-1)
slug = AutoSlugField(populate_from=('short_title', ), overwrite=False)
fees_exemptions = models.ManyToManyField(FeeExemptionOther, null=True, blank=True)
updates = models.ManyToManyField(Update, null=True, blank=True)
objects = StatuteManager()
class Meta:
verbose_name_plural = 'Statutes'
def __unicode__(self):
return self.short_title
@property
def deleted(self):
return self.deprecated is not None
@property
def get_short_title(self):
return bleach.clean(self.short_title, strip=True)
@property
def get_designator(self):
return bleach.clean(self.designator, strip=True)
@property
def get_governments(self):
return self.related_statutes.all()
@property
def get_text(self):
return escape(self.text.trim())
@property
def get_days_till_due(self):
if self.days_till_due < 0:
return None
return self.days_till_due
class Holiday(BaseData):
name = models.CharField(max_length = 255)
date = models.DateField()
class GovernmentManager(models.Manager):
def get_query_set(self):
return super(GovernmentManager, self).get_query_set().filter(deprecated__isnull=True)
class Government(BaseData):
GOV_LEVELS = (
('I', 'International'),
('S', 'Supernational'),
('0', 'Admin 0 (National)'),
('1', 'Admin 1 (State/Province)'),
('2', 'Admin 2 (County or similar)'),
('3', 'Admin 3 (City or municipality)'),
)
name = models.CharField(max_length=255)
slug = AutoSlugField(populate_from=('name', ), overwrite=False)
level = models.CharField(max_length=1, choices=GOV_LEVELS)
nation = models.ForeignKey(Nation, null=True, blank=True)
statutes = models.ManyToManyField(Statute, null=True, blank=True, related_name='related_statutes')
holidays = models.ManyToManyField(Holiday, null=True, blank=True)
objects = GovernmentManager()
class Meta:
verbose_name_plural = 'Governments'
def __unicode__(self):
return '%s (%s)' % (self.name, self.nation,)
@property
def get_holiday_dates(self):
utc = pytz.UTC
holidays = self.holidays.all()
if holidays:
return [holiday.date for holiday in holidays]
tz = timezone.get_current_timezone()
datetuples = [
(2013, 1, 1),
(2013, 1, 21),# Martin Luther King Jr. Day
(2013, 2, 18),# Washington's Birthday
(2013, 5, 27),
(2013, 7, 4),
(2013, 9, 2),
(2013, 10, 14),
(2013, 11, 11),
(2013, 11, 28),
(2013, 12, 25),
(2014, 1, 1),
(2014, 1, 20), # Martin Luther King Jr. Day
(2014, 2, 17), # Washington's Bday
(2014, 5, 26),
(2014, 7, 4),
(2014, 9, 1),
(2014, 10, 13),
(2014, 11, 11),
(2014, 11, 27), # Thanksgiving Day
(2014, 12, 25) # Christmas Day
]
return map(lambda datetuple: tz.localize(datetime.datetime(*datetuple)), datetuples)
@property
def get_statutes(self):
if self.statutes.all().count() <= 0:
return None
return self.statutes.all().order_by('-days_till_due')
@staticmethod
def get_us_gov_levels():
return {
'state': 1,
'county': 2,
'city': 3
}
| true | true |
f737a0d88bd9fc37a128dea3b7b35bf6ff70fe73 | 660 | py | Python | flambda_app/aws/__init__.py | madeiramadeirabr/template-serverless-service-python | 17a57e19906f44978dad0c09cff3a16c299ff0c0 | [
"MIT"
] | null | null | null | flambda_app/aws/__init__.py | madeiramadeirabr/template-serverless-service-python | 17a57e19906f44978dad0c09cff3a16c299ff0c0 | [
"MIT"
] | 1 | 2022-03-31T17:55:16.000Z | 2022-03-31T17:59:00.000Z | flambda_app/aws/__init__.py | madeiramadeirabr/template-serverless-service-python | 17a57e19906f44978dad0c09cff3a16c299ff0c0 | [
"MIT"
] | null | null | null | """
AWS module - Keep the aws services adpaters
Version: 1.0.0
"""
def change_endpoint(cls):
endpoint_url = cls.config.get('LOCALSTACK_ENDPOINT', None)
# Fix para tratar diff entre docker/local
if endpoint_url == 'http://0.0.0.0:4566' or \
endpoint_url == 'http://localstack:4566':
old_value = endpoint_url
cls.config.set('LOCALSTACK_ENDPOINT', 'http://localhost:4566')
endpoint_url = cls.config.get('LOCALSTACK_ENDPOINT', None)
cls.logger.debug(
'Changing the endpoint from {} to {}'.format(old_value, endpoint_url))
# override the property
cls.endpoint_url = endpoint_url
| 34.736842 | 82 | 0.654545 |
def change_endpoint(cls):
endpoint_url = cls.config.get('LOCALSTACK_ENDPOINT', None)
if endpoint_url == 'http://0.0.0.0:4566' or \
endpoint_url == 'http://localstack:4566':
old_value = endpoint_url
cls.config.set('LOCALSTACK_ENDPOINT', 'http://localhost:4566')
endpoint_url = cls.config.get('LOCALSTACK_ENDPOINT', None)
cls.logger.debug(
'Changing the endpoint from {} to {}'.format(old_value, endpoint_url))
cls.endpoint_url = endpoint_url
| true | true |
f737a1e53afe9bb36a13cac2dfc94f261f176efd | 5,080 | py | Python | src/subscriber.py | gbroccolo/kafka-schema-registry-python | 65b0b7bd00295f337eda7bb02c57d99bdf5641c0 | [
"MIT"
] | null | null | null | src/subscriber.py | gbroccolo/kafka-schema-registry-python | 65b0b7bd00295f337eda7bb02c57d99bdf5641c0 | [
"MIT"
] | null | null | null | src/subscriber.py | gbroccolo/kafka-schema-registry-python | 65b0b7bd00295f337eda7bb02c57d99bdf5641c0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# =============================================================================
#
# Consume Avro messages to Kafka getting the schema from the Confluent registry
# Using Confluent Python Client
#
# =============================================================================
from confluent_kafka import avro
from confluent_kafka.avro import AvroConsumer
from confluent_kafka.avro.serializer import SerializerError
import os
from requests import get
from messages import Message
class Subscriber:
def __init__(
self,
topic,
schema_registry_url,
bootstrap_servers,
group_id
):
self.topic = topic
self.bootstrap_servers = bootstrap_servers
self.schema_registry_url = schema_registry_url
self.group_id = group_id
self.schema = self.get_last_schema_version()
# also the message key is serialised in Avro
self.schema_key = avro.loads(
"{\"namespace\": \"io.confluent.schema.keys\", \"name\": \"Key\", \"type\": \"string\"}"
)
self.consumer = AvroConsumer(
{
"bootstrap.servers": self.bootstrap_servers,
"schema.registry.url": self.schema_registry_url,
"group.id": self.group_id,
"auto.offset.reset": "latest"
},
reader_key_schema=self.schema_key,
reader_value_schema=self.schema
)
self.consumer.subscribe([self.topic])
def get_last_schema_version(self):
"""
get the schema from the schema registry
"""
versions = get(
url=f"{schema_registry_url}/subjects/{self.topic}-value/versions/"
).json()
schema = get(
url=(
f"{schema_registry_url}/subjects/{self.topic}-value/"
f"versions/{max(versions)}"
)
).json()["schema"]
return avro.loads(schema)
def to_message(self, dict_obj):
"""
messages are serialised in Avro using a dict representation of the
messages (which need to be defined as data classes). Messages are
then deserialised passing from dictionaries to the actual Message
object.
Args:
dict_obj (dict): the message as a Python dictionary
Returns:
(Message): the user-defined message object
"""
if not dict_obj:
return None
return Message(
string_key=dict_obj["stringKey"],
int_key=dict_obj["intKey"]
)
def poll(self, timeout=1.0, max_num_records_per_poll=5):
"""
wrapper generator of the `poll` method
NOTE: Confluent client doesn't have the batching
system yet implemented, we need to implement
it here
Args:
timeout (float): timeout in the poll operation
max_num_records_per_poll (int): number of polled records
required to commit
"""
num_records = 1
while True:
try:
# SIGINT can't be handled when polling, set a proper timeout
msg = self.consumer.poll(timeout)
if msg is None:
continue
yield msg.key(), self.to_message(msg.value())
num_records += 1
if num_records == max_num_records_per_poll:
self.consumer.commit()
num_records = 1
except SerializerError as e:
# try to reload the consumer with the latest schema uploaded
# in the schema registry
self.consumer = AvroConsumer(
{
"bootstrap.servers": self.bootstrap_servers,
"schema.registry.url": self.schema_registry_url,
"group.id": self.group_id,
"auto.offset.reset": "latest"
},
reader_key_schema=self.schema_key,
reader_value_schema=self.schema
)
self.consumer.subscribe([self.topic])
continue
except KeyboardInterrupt:
self.consumer.commit()
break
if __name__ == "__main__":
bootstrap_servers = os.environ.get("BOOTSTRAP_SERVERS")
topic = os.environ.get("TOPIC")
schema_registry_url = os.environ.get("SCHEMA_REGISTRY_URL")
group_id = os.environ.get("KAFKA_GROUP_ID")
timeout = float(os.environ.get("KAFKA_POLL_TIMEOUT"))
max_num_records_per_poll = int(os.environ.get("KAFKA_POLL_MAX_RECS_PER_POLL"))
subscriber = Subscriber(
topic,
schema_registry_url,
bootstrap_servers,
group_id
)
for message in subscriber.poll(timeout, max_num_records_per_poll):
print(
f"Consumed record {message[0]}:\tstring: {message[1].string_key}"
f", number: {message[1].int_key}"
)
| 30.97561 | 100 | 0.551575 |
from confluent_kafka import avro
from confluent_kafka.avro import AvroConsumer
from confluent_kafka.avro.serializer import SerializerError
import os
from requests import get
from messages import Message
class Subscriber:
def __init__(
self,
topic,
schema_registry_url,
bootstrap_servers,
group_id
):
self.topic = topic
self.bootstrap_servers = bootstrap_servers
self.schema_registry_url = schema_registry_url
self.group_id = group_id
self.schema = self.get_last_schema_version()
self.schema_key = avro.loads(
"{\"namespace\": \"io.confluent.schema.keys\", \"name\": \"Key\", \"type\": \"string\"}"
)
self.consumer = AvroConsumer(
{
"bootstrap.servers": self.bootstrap_servers,
"schema.registry.url": self.schema_registry_url,
"group.id": self.group_id,
"auto.offset.reset": "latest"
},
reader_key_schema=self.schema_key,
reader_value_schema=self.schema
)
self.consumer.subscribe([self.topic])
def get_last_schema_version(self):
versions = get(
url=f"{schema_registry_url}/subjects/{self.topic}-value/versions/"
).json()
schema = get(
url=(
f"{schema_registry_url}/subjects/{self.topic}-value/"
f"versions/{max(versions)}"
)
).json()["schema"]
return avro.loads(schema)
def to_message(self, dict_obj):
if not dict_obj:
return None
return Message(
string_key=dict_obj["stringKey"],
int_key=dict_obj["intKey"]
)
def poll(self, timeout=1.0, max_num_records_per_poll=5):
num_records = 1
while True:
try:
msg = self.consumer.poll(timeout)
if msg is None:
continue
yield msg.key(), self.to_message(msg.value())
num_records += 1
if num_records == max_num_records_per_poll:
self.consumer.commit()
num_records = 1
except SerializerError as e:
# try to reload the consumer with the latest schema uploaded
# in the schema registry
self.consumer = AvroConsumer(
{
"bootstrap.servers": self.bootstrap_servers,
"schema.registry.url": self.schema_registry_url,
"group.id": self.group_id,
"auto.offset.reset": "latest"
},
reader_key_schema=self.schema_key,
reader_value_schema=self.schema
)
self.consumer.subscribe([self.topic])
continue
except KeyboardInterrupt:
self.consumer.commit()
break
if __name__ == "__main__":
bootstrap_servers = os.environ.get("BOOTSTRAP_SERVERS")
topic = os.environ.get("TOPIC")
schema_registry_url = os.environ.get("SCHEMA_REGISTRY_URL")
group_id = os.environ.get("KAFKA_GROUP_ID")
timeout = float(os.environ.get("KAFKA_POLL_TIMEOUT"))
max_num_records_per_poll = int(os.environ.get("KAFKA_POLL_MAX_RECS_PER_POLL"))
subscriber = Subscriber(
topic,
schema_registry_url,
bootstrap_servers,
group_id
)
for message in subscriber.poll(timeout, max_num_records_per_poll):
print(
f"Consumed record {message[0]}:\tstring: {message[1].string_key}"
f", number: {message[1].int_key}"
)
| true | true |
f737a257be0d0f280b5946675a4371bfe4c4addb | 683 | py | Python | scripts/test_DDPG.py | PierreExeter/gym-reacher | d58edeb93b4b703101dc0505232c883fd012dbad | [
"MIT"
] | null | null | null | scripts/test_DDPG.py | PierreExeter/gym-reacher | d58edeb93b4b703101dc0505232c883fd012dbad | [
"MIT"
] | null | null | null | scripts/test_DDPG.py | PierreExeter/gym-reacher | d58edeb93b4b703101dc0505232c883fd012dbad | [
"MIT"
] | null | null | null | import gym
import numpy as np
import gym_reacher
from stable_baselines.ddpg.policies import MlpPolicy
from stable_baselines.common.noise import NormalActionNoise, OrnsteinUhlenbeckActionNoise, AdaptiveParamNoiseSpec
from stable_baselines import DDPG
# env = gym.make('MountainCarContinuous-v0')
env = gym.make('Reacher1Dof-v0')
# the noise objects for DDPG
n_actions = env.action_space.shape[-1]
param_noise = None
action_noise = OrnsteinUhlenbeckActionNoise(mean=np.zeros(n_actions), sigma=float(0.5) * np.ones(n_actions))
model = DDPG(MlpPolicy, env, verbose=1, param_noise=param_noise, action_noise=action_noise)
model.learn(total_timesteps=10000)
model.save("ddpg_mountain")
| 34.15 | 113 | 0.818448 | import gym
import numpy as np
import gym_reacher
from stable_baselines.ddpg.policies import MlpPolicy
from stable_baselines.common.noise import NormalActionNoise, OrnsteinUhlenbeckActionNoise, AdaptiveParamNoiseSpec
from stable_baselines import DDPG
env = gym.make('Reacher1Dof-v0')
n_actions = env.action_space.shape[-1]
param_noise = None
action_noise = OrnsteinUhlenbeckActionNoise(mean=np.zeros(n_actions), sigma=float(0.5) * np.ones(n_actions))
model = DDPG(MlpPolicy, env, verbose=1, param_noise=param_noise, action_noise=action_noise)
model.learn(total_timesteps=10000)
model.save("ddpg_mountain")
| true | true |
f737a2f63fcf5adfe979994b320fbd4e88d77b6b | 7,169 | py | Python | pnnl/AirsideRCxAgent/airside/diagnostics/common.py | jklarson/volttron-applications | 159e7ca12e3a7935c7882a29b4d9c720e1c8b689 | [
"BSD-3-Clause"
] | null | null | null | pnnl/AirsideRCxAgent/airside/diagnostics/common.py | jklarson/volttron-applications | 159e7ca12e3a7935c7882a29b4d9c720e1c8b689 | [
"BSD-3-Clause"
] | null | null | null | pnnl/AirsideRCxAgent/airside/diagnostics/common.py | jklarson/volttron-applications | 159e7ca12e3a7935c7882a29b4d9c720e1c8b689 | [
"BSD-3-Clause"
] | null | null | null | """
Copyright (c) 2017, Battelle Memorial Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
This material was prepared as an account of work sponsored by an agency of the
United States Government. Neither the United States Government nor the United
States Department of Energy, nor Battelle, nor any of their employees, nor any
jurisdiction or organization that has cooperated in the development of these
materials, makes any warranty, express or implied, or assumes any legal
liability or responsibility for the accuracy, completeness, or usefulness or
any information, apparatus, product, software, or process disclosed, or
represents that its use would not infringe privately owned rights.
Reference herein to any specific commercial product, process, or service by
trade name, trademark, manufacturer, or otherwise does not necessarily
constitute or imply its endorsement, recommendation, or favoring by the
United States Government or any agency thereof, or Battelle Memorial Institute.
The views and opinions of authors expressed herein do not necessarily state or
reflect those of the United States Government or any agency thereof.
PACIFIC NORTHWEST NATIONAL LABORATORY
operated by
BATTELLE
for the
UNITED STATES DEPARTMENT OF ENERGY
under Contract DE-AC05-76RL01830
"""
from datetime import timedelta as td
from volttron.platform.agent.math_utils import mean
DX = '/diagnostic message'
"""Common functions used across multiple algorithms."""
def create_table_key(table_name, timestamp):
return "&".join([table_name, timestamp.isoformat()])
def check_date(current_time, timestamp_array):
"""
Check current timestamp with previous timestamp to verify that there are no large missing data gaps.
:param current_time:
:param timestamp_array:
:return:
"""
if not timestamp_array:
return False
if current_time.date() != timestamp_array[-1].date():
if (timestamp_array[-1].date() + td(days=1) != current_time.date() or
(timestamp_array[-1].hour != 23 and current_time.hour == 0)):
return True
return False
def check_run_status(timestamp_array, current_time, no_required_data, minimum_diagnostic_time=None,
run_schedule="hourly", minimum_point_array=None):
"""
The diagnostics run at a regular interval (some minimum elapsed amount of time) and have a
minimum data count requirement (each time series of data must contain some minimum number of points).
:param timestamp_array:
:param current_time:
:param no_required_data:
:param minimum_diagnostic_time:
:param run_schedule:
:param minimum_point_array:
:return:
"""
def minimum_data():
min_data_array = timestamp_array if minimum_point_array is None else minimum_point_array
if len(min_data_array) < no_required_data:
return None
return True
if minimum_diagnostic_time is not None and timestamp_array:
sampling_interval = td(minutes=
round(((timestamp_array[-1] - timestamp_array[0]) / len(timestamp_array)).total_seconds() / 60))
required_time = (timestamp_array[-1] - timestamp_array[0]) + sampling_interval
if required_time >= minimum_diagnostic_time:
return minimum_data()
return False
if run_schedule == "hourly":
if timestamp_array and timestamp_array[-1].hour != current_time.hour:
return minimum_data()
elif run_schedule == "daily":
if timestamp_array and timestamp_array[-1].date() != current_time.date():
return minimum_data()
return False
def setpoint_control_check(set_point_array, point_array, setpoint_deviation_threshold, dx_name, dx_offset, dx_result):
"""
Verify that point if tracking with set point - identify potential control or sensor problems.
:param set_point_array:
:param point_array:
:param allowable_deviation:
:param dx_name:
:param dx_offset:
:param dx_result:
:return:
"""
avg_set_point = None
diagnostic_msg = {}
for key, threshold in list(setpoint_deviation_threshold.items()):
if set_point_array:
avg_set_point = sum(set_point_array)/len(set_point_array)
zipper = (set_point_array, point_array)
set_point_tracking = [abs(x - y) for x, y in zip(*zipper)]
set_point_tracking = mean(set_point_tracking)/avg_set_point*100.
if set_point_tracking > threshold:
# color_code = 'red'
msg = '{} - {}: point deviating significantly from set point.'.format(key, dx_name)
result = 1.1 + dx_offset
else:
# color_code = 'green'
msg = " {} - No problem detected for {} set".format(key, dx_name)
result = 0.0 + dx_offset
else:
# color_code = 'grey'
msg = "{} - {} set point data is not available.".format(key, dx_name)
result = 2.2 + dx_offset
dx_result.log(msg)
diagnostic_msg.update({key: result})
dx_table = {dx_name + DX: diagnostic_msg}
return avg_set_point, dx_table, dx_result
def pre_conditions(message, dx_list, analysis, cur_time, dx_result):
"""
Check for persistence of failure to meet pre-conditions for diagnostics.
:param message:
:param dx_list:
:param analysis:
:param cur_time:
:param dx_result:
:return:
"""
dx_msg = {'low': message, 'normal': message, 'high': message}
for diagnostic in dx_list:
dx_table = {diagnostic + DX: dx_msg}
table_key = create_table_key(analysis, cur_time)
dx_result.insert_table_row(table_key, dx_table)
return dx_result
| 42.420118 | 118 | 0.719487 | from datetime import timedelta as td
from volttron.platform.agent.math_utils import mean
DX = '/diagnostic message'
def create_table_key(table_name, timestamp):
return "&".join([table_name, timestamp.isoformat()])
def check_date(current_time, timestamp_array):
if not timestamp_array:
return False
if current_time.date() != timestamp_array[-1].date():
if (timestamp_array[-1].date() + td(days=1) != current_time.date() or
(timestamp_array[-1].hour != 23 and current_time.hour == 0)):
return True
return False
def check_run_status(timestamp_array, current_time, no_required_data, minimum_diagnostic_time=None,
run_schedule="hourly", minimum_point_array=None):
def minimum_data():
min_data_array = timestamp_array if minimum_point_array is None else minimum_point_array
if len(min_data_array) < no_required_data:
return None
return True
if minimum_diagnostic_time is not None and timestamp_array:
sampling_interval = td(minutes=
round(((timestamp_array[-1] - timestamp_array[0]) / len(timestamp_array)).total_seconds() / 60))
required_time = (timestamp_array[-1] - timestamp_array[0]) + sampling_interval
if required_time >= minimum_diagnostic_time:
return minimum_data()
return False
if run_schedule == "hourly":
if timestamp_array and timestamp_array[-1].hour != current_time.hour:
return minimum_data()
elif run_schedule == "daily":
if timestamp_array and timestamp_array[-1].date() != current_time.date():
return minimum_data()
return False
def setpoint_control_check(set_point_array, point_array, setpoint_deviation_threshold, dx_name, dx_offset, dx_result):
avg_set_point = None
diagnostic_msg = {}
for key, threshold in list(setpoint_deviation_threshold.items()):
if set_point_array:
avg_set_point = sum(set_point_array)/len(set_point_array)
zipper = (set_point_array, point_array)
set_point_tracking = [abs(x - y) for x, y in zip(*zipper)]
set_point_tracking = mean(set_point_tracking)/avg_set_point*100.
if set_point_tracking > threshold:
msg = '{} - {}: point deviating significantly from set point.'.format(key, dx_name)
result = 1.1 + dx_offset
else:
msg = " {} - No problem detected for {} set".format(key, dx_name)
result = 0.0 + dx_offset
else:
msg = "{} - {} set point data is not available.".format(key, dx_name)
result = 2.2 + dx_offset
dx_result.log(msg)
diagnostic_msg.update({key: result})
dx_table = {dx_name + DX: diagnostic_msg}
return avg_set_point, dx_table, dx_result
def pre_conditions(message, dx_list, analysis, cur_time, dx_result):
dx_msg = {'low': message, 'normal': message, 'high': message}
for diagnostic in dx_list:
dx_table = {diagnostic + DX: dx_msg}
table_key = create_table_key(analysis, cur_time)
dx_result.insert_table_row(table_key, dx_table)
return dx_result
| true | true |
f737a33e0dbf48ba1f1250e17fcee8894546d8d5 | 9,743 | py | Python | lib/spack/spack/test/cmd_extensions.py | VladimirUspenskii/spack | 18b83c3833c0e138a7153bbb14e68b1147b3f3d1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | lib/spack/spack/test/cmd_extensions.py | VladimirUspenskii/spack | 18b83c3833c0e138a7153bbb14e68b1147b3f3d1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6 | 2022-01-08T08:41:11.000Z | 2022-03-14T19:28:07.000Z | lib/spack/spack/test/cmd_extensions.py | foeroyingur/spack | 5300cbbb2e569190015c72d0970d25425ea38647 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
import os
import sys
import pytest
import spack.cmd
import spack.config
import spack.extensions
import spack.main
class Extension:
"""Helper class to simplify the creation of simple command extension
directory structures with a conventional format for testing.
"""
def __init__(self, name, root):
"""Create a command extension.
Args:
name (str): The name of the command extension.
root (path object): The temporary root for the command extension
(e.g. from tmpdir.mkdir()).
"""
self.name = name
self.pname = spack.cmd.python_name(name)
self.root = root
self.main = self.root.ensure(self.pname, dir=True)
self.cmd = self.main.ensure('cmd', dir=True)
def add_command(self, command_name, contents):
"""Add a command to this command extension.
Args:
command_name (str): The name of the command.
contents (str): the desired contents of the new command module
file."""
spack.cmd.require_cmd_name(command_name)
python_name = spack.cmd.python_name(command_name)
cmd = self.cmd.ensure(python_name + '.py')
cmd.write(contents)
@pytest.fixture(scope='function')
def extension_creator(tmpdir, config):
"""Create a basic extension command directory structure"""
@contextlib.contextmanager
def _ce(extension_name='testcommand'):
root = tmpdir.mkdir('spack-' + extension_name)
extension = Extension(extension_name, root)
with spack.config.override('config:extensions',
[str(extension.root)]):
yield extension
list_of_modules = list(sys.modules.keys())
try:
yield _ce
finally:
to_be_deleted = [x for x in sys.modules if x not in list_of_modules]
for module_name in to_be_deleted:
del sys.modules[module_name]
@pytest.fixture(scope='function')
def hello_world_extension(extension_creator):
"""Create an extension with a hello-world command."""
with extension_creator() as extension:
extension.add_command('hello-world', """
description = "hello world extension command"
section = "test command"
level = "long"
def setup_parser(subparser):
pass
def hello_world(parser, args):
print('Hello world!')
""")
yield extension
@pytest.fixture(scope='function')
def hello_world_cmd(hello_world_extension):
"""Create and return an invokable "hello-world" extension command."""
yield spack.main.SpackCommand('hello-world')
@pytest.fixture(scope='function')
def hello_world_with_module_in_root(extension_creator):
"""Create a "hello-world" extension command with additional code in the
root folder.
"""
@contextlib.contextmanager
def _hwwmir(extension_name=None):
with extension_creator(extension_name) \
if extension_name else \
extension_creator() as extension:
# Note that the namespace of the extension is derived from the
# fixture.
extension.add_command('hello', """
# Test an absolute import
from spack.extensions.{ext_pname}.implementation import hello_world
# Test a relative import
from ..implementation import hello_folks
description = "hello world extension command"
section = "test command"
level = "long"
# Test setting a global variable in setup_parser and retrieving
# it in the command
global_message = 'foo'
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
global global_message
sp.add_parser('world', help='Print Hello world!')
sp.add_parser('folks', help='Print Hello folks!')
sp.add_parser('global', help='Print Hello folks!')
global_message = 'bar'
def hello(parser, args):
if args.subcommand == 'world':
hello_world()
elif args.subcommand == 'folks':
hello_folks()
elif args.subcommand == 'global':
print(global_message)
""".format(ext_pname=extension.pname))
extension.main.ensure('__init__.py')
implementation \
= extension.main.ensure('implementation.py')
implementation.write("""
def hello_world():
print('Hello world!')
def hello_folks():
print('Hello folks!')
""")
yield spack.main.SpackCommand('hello')
yield _hwwmir
def test_simple_command_extension(hello_world_cmd):
"""Basic test of a functioning command."""
output = hello_world_cmd()
assert 'Hello world!' in output
def test_multi_extension_search(hello_world_extension, extension_creator):
"""Ensure we can find an extension command even if it's not in the first
place we look.
"""
with extension_creator('testcommand2'):
assert ('Hello world') in spack.main.SpackCommand('hello-world')()
def test_duplicate_module_load(hello_world_cmd, capsys):
"""Ensure duplicate module load attempts are successful.
The command module will already have been loaded once by the
hello_world_cmd fixture.
"""
parser = spack.main.make_argument_parser()
args = []
hw_cmd = spack.cmd.get_command(hello_world_cmd.command_name)
hw_cmd(parser, args)
captured = capsys.readouterr()
assert captured == ('Hello world!\n', '')
@pytest.mark.parametrize('extension_name',
[None, 'hyphenated-extension'],
ids=['simple', 'hyphenated_extension_name'])
def test_command_with_import(extension_name, hello_world_with_module_in_root):
"""Ensure we can write a functioning command with multiple imported
subcommands, including where the extension name contains a hyphen.
"""
with hello_world_with_module_in_root(extension_name) as hello_world:
output = hello_world('world')
assert 'Hello world!' in output
output = hello_world('folks')
assert 'Hello folks!' in output
output = hello_world('global')
assert 'bar' in output
def test_missing_command():
"""Ensure that we raise the expected exception if the desired command is
not present.
"""
with pytest.raises(spack.extensions.CommandNotFoundError):
spack.cmd.get_module("no-such-command")
@pytest.mark.\
parametrize('extension_path,expected_exception',
[('/my/bad/extension',
spack.extensions.ExtensionNamingError),
('', spack.extensions.ExtensionNamingError),
('/my/bad/spack--extra-hyphen',
spack.extensions.ExtensionNamingError),
('/my/good/spack-extension',
spack.extensions.CommandNotFoundError),
('/my/still/good/spack-extension/',
spack.extensions.CommandNotFoundError),
('/my/spack-hyphenated-extension',
spack.extensions.CommandNotFoundError)],
ids=['no_stem', 'vacuous', 'leading_hyphen',
'basic_good', 'trailing_slash', 'hyphenated'])
def test_extension_naming(extension_path, expected_exception, config):
"""Ensure that we are correctly validating configured extension paths
for conformity with the rules: the basename should match
``spack-<name>``; <name> may have embedded hyphens but not begin with one.
"""
with spack.config.override('config:extensions', [extension_path]):
with pytest.raises(expected_exception):
spack.cmd.get_module("no-such-command")
def test_missing_command_function(extension_creator, capsys):
"""Ensure we die as expected if a command module does not have the
expected command function defined.
"""
with extension_creator() as extension:
extension.\
add_command('bad-cmd',
"""\ndescription = "Empty command implementation"\n""")
with pytest.raises(SystemExit):
spack.cmd.get_module('bad-cmd')
capture = capsys.readouterr()
assert "must define function 'bad_cmd'." in capture[1]
def test_get_command_paths(config):
"""Exercise the construction of extension command search paths."""
extensions = ('extension-1', 'extension-2')
ext_paths = []
expected_cmd_paths = []
for ext in extensions:
ext_path = os.path.join('my', 'path', 'to', 'spack-' + ext)
ext_paths.append(ext_path)
expected_cmd_paths.append(os.path.join(ext_path,
spack.cmd.python_name(ext),
'cmd'))
with spack.config.override('config:extensions', ext_paths):
assert spack.extensions.get_command_paths() == expected_cmd_paths
@pytest.mark.parametrize('command_name,contents,exception',
[('bad-cmd', 'from oopsie.daisy import bad\n',
ImportError),
('bad-cmd', """var = bad_function_call('blech')\n""",
NameError),
('bad-cmd', ')\n', SyntaxError)],
ids=['ImportError', 'NameError', 'SyntaxError'])
def test_failing_command(command_name, contents, exception, extension_creator):
"""Ensure that the configured command fails to import with the specified
error.
"""
with extension_creator() as extension:
extension.add_command(command_name, contents)
with pytest.raises(exception):
spack.extensions.get_module(command_name)
| 35.429091 | 79 | 0.652366 |
import contextlib
import os
import sys
import pytest
import spack.cmd
import spack.config
import spack.extensions
import spack.main
class Extension:
def __init__(self, name, root):
self.name = name
self.pname = spack.cmd.python_name(name)
self.root = root
self.main = self.root.ensure(self.pname, dir=True)
self.cmd = self.main.ensure('cmd', dir=True)
def add_command(self, command_name, contents):
spack.cmd.require_cmd_name(command_name)
python_name = spack.cmd.python_name(command_name)
cmd = self.cmd.ensure(python_name + '.py')
cmd.write(contents)
@pytest.fixture(scope='function')
def extension_creator(tmpdir, config):
@contextlib.contextmanager
def _ce(extension_name='testcommand'):
root = tmpdir.mkdir('spack-' + extension_name)
extension = Extension(extension_name, root)
with spack.config.override('config:extensions',
[str(extension.root)]):
yield extension
list_of_modules = list(sys.modules.keys())
try:
yield _ce
finally:
to_be_deleted = [x for x in sys.modules if x not in list_of_modules]
for module_name in to_be_deleted:
del sys.modules[module_name]
@pytest.fixture(scope='function')
def hello_world_extension(extension_creator):
with extension_creator() as extension:
extension.add_command('hello-world', """
description = "hello world extension command"
section = "test command"
level = "long"
def setup_parser(subparser):
pass
def hello_world(parser, args):
print('Hello world!')
""")
yield extension
@pytest.fixture(scope='function')
def hello_world_cmd(hello_world_extension):
yield spack.main.SpackCommand('hello-world')
@pytest.fixture(scope='function')
def hello_world_with_module_in_root(extension_creator):
@contextlib.contextmanager
def _hwwmir(extension_name=None):
with extension_creator(extension_name) \
if extension_name else \
extension_creator() as extension:
extension.add_command('hello', """
# Test an absolute import
from spack.extensions.{ext_pname}.implementation import hello_world
# Test a relative import
from ..implementation import hello_folks
description = "hello world extension command"
section = "test command"
level = "long"
# Test setting a global variable in setup_parser and retrieving
# it in the command
global_message = 'foo'
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
global global_message
sp.add_parser('world', help='Print Hello world!')
sp.add_parser('folks', help='Print Hello folks!')
sp.add_parser('global', help='Print Hello folks!')
global_message = 'bar'
def hello(parser, args):
if args.subcommand == 'world':
hello_world()
elif args.subcommand == 'folks':
hello_folks()
elif args.subcommand == 'global':
print(global_message)
""".format(ext_pname=extension.pname))
extension.main.ensure('__init__.py')
implementation \
= extension.main.ensure('implementation.py')
implementation.write("""
def hello_world():
print('Hello world!')
def hello_folks():
print('Hello folks!')
""")
yield spack.main.SpackCommand('hello')
yield _hwwmir
def test_simple_command_extension(hello_world_cmd):
output = hello_world_cmd()
assert 'Hello world!' in output
def test_multi_extension_search(hello_world_extension, extension_creator):
with extension_creator('testcommand2'):
assert ('Hello world') in spack.main.SpackCommand('hello-world')()
def test_duplicate_module_load(hello_world_cmd, capsys):
parser = spack.main.make_argument_parser()
args = []
hw_cmd = spack.cmd.get_command(hello_world_cmd.command_name)
hw_cmd(parser, args)
captured = capsys.readouterr()
assert captured == ('Hello world!\n', '')
@pytest.mark.parametrize('extension_name',
[None, 'hyphenated-extension'],
ids=['simple', 'hyphenated_extension_name'])
def test_command_with_import(extension_name, hello_world_with_module_in_root):
with hello_world_with_module_in_root(extension_name) as hello_world:
output = hello_world('world')
assert 'Hello world!' in output
output = hello_world('folks')
assert 'Hello folks!' in output
output = hello_world('global')
assert 'bar' in output
def test_missing_command():
with pytest.raises(spack.extensions.CommandNotFoundError):
spack.cmd.get_module("no-such-command")
@pytest.mark.\
parametrize('extension_path,expected_exception',
[('/my/bad/extension',
spack.extensions.ExtensionNamingError),
('', spack.extensions.ExtensionNamingError),
('/my/bad/spack--extra-hyphen',
spack.extensions.ExtensionNamingError),
('/my/good/spack-extension',
spack.extensions.CommandNotFoundError),
('/my/still/good/spack-extension/',
spack.extensions.CommandNotFoundError),
('/my/spack-hyphenated-extension',
spack.extensions.CommandNotFoundError)],
ids=['no_stem', 'vacuous', 'leading_hyphen',
'basic_good', 'trailing_slash', 'hyphenated'])
def test_extension_naming(extension_path, expected_exception, config):
with spack.config.override('config:extensions', [extension_path]):
with pytest.raises(expected_exception):
spack.cmd.get_module("no-such-command")
def test_missing_command_function(extension_creator, capsys):
with extension_creator() as extension:
extension.\
add_command('bad-cmd',
"""\ndescription = "Empty command implementation"\n""")
with pytest.raises(SystemExit):
spack.cmd.get_module('bad-cmd')
capture = capsys.readouterr()
assert "must define function 'bad_cmd'." in capture[1]
def test_get_command_paths(config):
extensions = ('extension-1', 'extension-2')
ext_paths = []
expected_cmd_paths = []
for ext in extensions:
ext_path = os.path.join('my', 'path', 'to', 'spack-' + ext)
ext_paths.append(ext_path)
expected_cmd_paths.append(os.path.join(ext_path,
spack.cmd.python_name(ext),
'cmd'))
with spack.config.override('config:extensions', ext_paths):
assert spack.extensions.get_command_paths() == expected_cmd_paths
@pytest.mark.parametrize('command_name,contents,exception',
[('bad-cmd', 'from oopsie.daisy import bad\n',
ImportError),
('bad-cmd', """var = bad_function_call('blech')\n""",
NameError),
('bad-cmd', ')\n', SyntaxError)],
ids=['ImportError', 'NameError', 'SyntaxError'])
def test_failing_command(command_name, contents, exception, extension_creator):
with extension_creator() as extension:
extension.add_command(command_name, contents)
with pytest.raises(exception):
spack.extensions.get_module(command_name)
| true | true |
f737a379bf000ac7570c9ee10198f30b4dc9e12a | 6,924 | py | Python | tests/gold_tests/tls/tls_verify_ca_override.test.py | a-canary/trafficserver | df01ace2b0bdffd3ddcc5b2c7587b6d6fed5234c | [
"Apache-2.0"
] | 3 | 2019-10-11T06:19:16.000Z | 2020-07-24T05:46:38.000Z | tests/gold_tests/tls/tls_verify_ca_override.test.py | a-canary/trafficserver | df01ace2b0bdffd3ddcc5b2c7587b6d6fed5234c | [
"Apache-2.0"
] | 1 | 2021-02-23T12:43:22.000Z | 2021-02-23T12:57:13.000Z | tests/gold_tests/tls/tls_verify_ca_override.test.py | isabella232/trafficserver | a52bd121080dd94f757e54ed65fae2188472b004 | [
"Apache-2.0"
] | 2 | 2020-07-24T05:46:43.000Z | 2020-08-20T01:27:50.000Z | '''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Test.Summary = '''
Test tls server certificate verification options. Exercise conf_remap for ca bundle
'''
# Define default ATS
ts = Test.MakeATSProcess("ts", select_ports=True)
server1 = Test.MakeOriginServer("server1",
ssl=True,
options={"--key": "{0}/signed-foo.key".format(Test.RunDirectory),
"--cert": "{0}/signed-foo.pem".format(Test.RunDirectory)})
server2 = Test.MakeOriginServer("server2",
ssl=True,
options={"--key": "{0}/signed-foo.key".format(Test.RunDirectory),
"--cert": "{0}/signed2-foo.pem".format(Test.RunDirectory)})
request_foo_header = {"headers": "GET / HTTP/1.1\r\nHost: foo.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bad_foo_header = {"headers": "GET / HTTP/1.1\r\nHost: bad_foo.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bar_header = {"headers": "GET / HTTP/1.1\r\nHost: bar.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bad_bar_header = {"headers": "GET / HTTP/1.1\r\nHost: bad_bar.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
server1.addResponse("sessionlog.json", request_foo_header, response_header)
server1.addResponse("sessionlog.json", request_bad_foo_header, response_header)
server2.addResponse("sessionlog.json", request_bar_header, response_header)
server2.addResponse("sessionlog.json", request_bad_bar_header, response_header)
# add ssl materials like key, certificates for the server
ts.addSSLfile("ssl/signed-foo.pem")
ts.addSSLfile("ssl/signed2-foo.pem")
ts.addSSLfile("ssl/signed-foo.key")
ts.addSSLfile("ssl/server.pem")
ts.addSSLfile("ssl/server.key")
ts.addSSLfile("ssl/signer.pem")
ts.addSSLfile("ssl/signer.key")
ts.addSSLfile("ssl/signer2.pem")
ts.addSSLfile("ssl/signer2.key")
ts.Disk.remap_config.AddLine(
'map /case1 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server1.Variables.SSL_Port, ts.Variables.SSLDir, "signer.pem")
)
ts.Disk.remap_config.AddLine(
'map /badcase1 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server1.Variables.SSL_Port, ts.Variables.SSLDir, "signer2.pem")
)
ts.Disk.remap_config.AddLine(
'map /case2 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server2.Variables.SSL_Port, ts.Variables.SSLDir, "signer2.pem")
)
ts.Disk.remap_config.AddLine(
'map /badcase2 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server2.Variables.SSL_Port, ts.Variables.SSLDir, "signer.pem")
)
ts.Disk.ssl_multicert_config.AddLine(
'dest_ip=* ssl_cert_name=server.pem ssl_key_name=server.key'
)
# Case 1, global config policy=permissive properties=signature
# override for foo.com policy=enforced properties=all
ts.Disk.records_config.update({
'proxy.config.ssl.server.cert.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.private_key.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.cipher_suite': 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:AES128-GCM-SHA256:AES256-GCM-SHA384:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA:RC4-SHA:RC4-MD5:AES128-SHA:AES256-SHA:DES-CBC3-SHA!SRP:!DSS:!PSK:!aNULL:!eNULL:!SSLv2',
# set global policy
'proxy.config.ssl.client.verify.server.policy': 'ENFORCED',
'proxy.config.ssl.client.verify.server.properties': 'SIGNATURE',
'proxy.config.ssl.client.CA.cert.path': '/tmp',
'proxy.config.ssl.client.CA.cert.filename': '{0}/signer.pem'.format(ts.Variables.SSLDir),
'proxy.config.exec_thread.autoconfig.scale': 1.0,
'proxy.config.url_remap.pristine_host_hdr': 1
})
# Should succeed
tr = Test.AddTestRun("Use corrcect ca bundle for server 1")
tr.Processes.Default.Command = 'curl -k -H \"host: foo.com\" http://127.0.0.1:{0}/case1'.format(ts.Variables.port)
tr.ReturnCode = 0
tr.Setup.Copy("ssl/signed-foo.key")
tr.Setup.Copy("ssl/signed-foo.pem")
tr.Setup.Copy("ssl/signed2-foo.pem")
tr.Processes.Default.StartBefore(server1)
tr.Processes.Default.StartBefore(server2)
tr.Processes.Default.StartBefore(Test.Processes.ts)
tr.StillRunningAfter = server1
tr.StillRunningAfter = ts
# Should succed. No message
tr.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr2 = Test.AddTestRun("Use incorrect ca bundle for server 1")
tr2.Processes.Default.Command = "curl -k -H \"host: bar.com\" http://127.0.0.1:{0}/badcase1".format(ts.Variables.port)
tr2.ReturnCode = 0
tr2.StillRunningAfter = server1
tr2.StillRunningAfter = ts
# Should succeed, but will be message in log about name mismatch
tr2.Processes.Default.Streams.stdout = Testers.ContainsExpression("Could Not Connect", "Curl attempt should have succeeded")
tr2 = Test.AddTestRun("Use currect ca bundle for server 2")
tr2.Processes.Default.Command = "curl -k -H \"host: random.com\" http://127.0.0.1:{0}/case2".format(ts.Variables.port)
tr2.ReturnCode = 0
tr2.StillRunningAfter = server2
tr2.StillRunningAfter = ts
# Should succeed, but will be message in log about signature
tr2.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr3 = Test.AddTestRun("User incorrect ca bundle for server 2")
tr3.Processes.Default.Command = "curl -k -H \"host: foo.com\" http://127.0.0.1:{0}/badcase2".format(ts.Variables.port)
tr3.ReturnCode = 0
tr3.StillRunningAfter = server2
tr3.StillRunningAfter = ts
# Should succeed. No error messages
tr3.Processes.Default.Streams.stdout = Testers.ContainsExpression("Could Not Connect", "Curl attempt should have succeeded")
| 53.674419 | 332 | 0.718515 |
Test.Summary = '''
Test tls server certificate verification options. Exercise conf_remap for ca bundle
'''
ts = Test.MakeATSProcess("ts", select_ports=True)
server1 = Test.MakeOriginServer("server1",
ssl=True,
options={"--key": "{0}/signed-foo.key".format(Test.RunDirectory),
"--cert": "{0}/signed-foo.pem".format(Test.RunDirectory)})
server2 = Test.MakeOriginServer("server2",
ssl=True,
options={"--key": "{0}/signed-foo.key".format(Test.RunDirectory),
"--cert": "{0}/signed2-foo.pem".format(Test.RunDirectory)})
request_foo_header = {"headers": "GET / HTTP/1.1\r\nHost: foo.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bad_foo_header = {"headers": "GET / HTTP/1.1\r\nHost: bad_foo.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bar_header = {"headers": "GET / HTTP/1.1\r\nHost: bar.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
request_bad_bar_header = {"headers": "GET / HTTP/1.1\r\nHost: bad_bar.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
server1.addResponse("sessionlog.json", request_foo_header, response_header)
server1.addResponse("sessionlog.json", request_bad_foo_header, response_header)
server2.addResponse("sessionlog.json", request_bar_header, response_header)
server2.addResponse("sessionlog.json", request_bad_bar_header, response_header)
ts.addSSLfile("ssl/signed-foo.pem")
ts.addSSLfile("ssl/signed2-foo.pem")
ts.addSSLfile("ssl/signed-foo.key")
ts.addSSLfile("ssl/server.pem")
ts.addSSLfile("ssl/server.key")
ts.addSSLfile("ssl/signer.pem")
ts.addSSLfile("ssl/signer.key")
ts.addSSLfile("ssl/signer2.pem")
ts.addSSLfile("ssl/signer2.key")
ts.Disk.remap_config.AddLine(
'map /case1 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server1.Variables.SSL_Port, ts.Variables.SSLDir, "signer.pem")
)
ts.Disk.remap_config.AddLine(
'map /badcase1 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server1.Variables.SSL_Port, ts.Variables.SSLDir, "signer2.pem")
)
ts.Disk.remap_config.AddLine(
'map /case2 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server2.Variables.SSL_Port, ts.Variables.SSLDir, "signer2.pem")
)
ts.Disk.remap_config.AddLine(
'map /badcase2 https://127.0.0.1:{0}/ @plugin=conf_remap.so @pparam=proxy.config.ssl.client.CA.cert.filename={1}/{2}'.format(
server2.Variables.SSL_Port, ts.Variables.SSLDir, "signer.pem")
)
ts.Disk.ssl_multicert_config.AddLine(
'dest_ip=* ssl_cert_name=server.pem ssl_key_name=server.key'
)
ts.Disk.records_config.update({
'proxy.config.ssl.server.cert.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.private_key.path': '{0}'.format(ts.Variables.SSLDir),
'proxy.config.ssl.server.cipher_suite': 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:AES128-GCM-SHA256:AES256-GCM-SHA384:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:ECDHE-RSA-AES256-SHA:RC4-SHA:RC4-MD5:AES128-SHA:AES256-SHA:DES-CBC3-SHA!SRP:!DSS:!PSK:!aNULL:!eNULL:!SSLv2',
'proxy.config.ssl.client.verify.server.policy': 'ENFORCED',
'proxy.config.ssl.client.verify.server.properties': 'SIGNATURE',
'proxy.config.ssl.client.CA.cert.path': '/tmp',
'proxy.config.ssl.client.CA.cert.filename': '{0}/signer.pem'.format(ts.Variables.SSLDir),
'proxy.config.exec_thread.autoconfig.scale': 1.0,
'proxy.config.url_remap.pristine_host_hdr': 1
})
tr = Test.AddTestRun("Use corrcect ca bundle for server 1")
tr.Processes.Default.Command = 'curl -k -H \"host: foo.com\" http://127.0.0.1:{0}/case1'.format(ts.Variables.port)
tr.ReturnCode = 0
tr.Setup.Copy("ssl/signed-foo.key")
tr.Setup.Copy("ssl/signed-foo.pem")
tr.Setup.Copy("ssl/signed2-foo.pem")
tr.Processes.Default.StartBefore(server1)
tr.Processes.Default.StartBefore(server2)
tr.Processes.Default.StartBefore(Test.Processes.ts)
tr.StillRunningAfter = server1
tr.StillRunningAfter = ts
tr.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr2 = Test.AddTestRun("Use incorrect ca bundle for server 1")
tr2.Processes.Default.Command = "curl -k -H \"host: bar.com\" http://127.0.0.1:{0}/badcase1".format(ts.Variables.port)
tr2.ReturnCode = 0
tr2.StillRunningAfter = server1
tr2.StillRunningAfter = ts
tr2.Processes.Default.Streams.stdout = Testers.ContainsExpression("Could Not Connect", "Curl attempt should have succeeded")
tr2 = Test.AddTestRun("Use currect ca bundle for server 2")
tr2.Processes.Default.Command = "curl -k -H \"host: random.com\" http://127.0.0.1:{0}/case2".format(ts.Variables.port)
tr2.ReturnCode = 0
tr2.StillRunningAfter = server2
tr2.StillRunningAfter = ts
tr2.Processes.Default.Streams.stdout = Testers.ExcludesExpression("Could Not Connect", "Curl attempt should have succeeded")
tr3 = Test.AddTestRun("User incorrect ca bundle for server 2")
tr3.Processes.Default.Command = "curl -k -H \"host: foo.com\" http://127.0.0.1:{0}/badcase2".format(ts.Variables.port)
tr3.ReturnCode = 0
tr3.StillRunningAfter = server2
tr3.StillRunningAfter = ts
tr3.Processes.Default.Streams.stdout = Testers.ContainsExpression("Could Not Connect", "Curl attempt should have succeeded")
| true | true |
f737a38abbb477dfeeaca4a0361f52ed8414956b | 10,985 | py | Python | python/ray/tune/tests/test_integration_wandb.py | jianoaix/ray | 1701b923bc83905f8961c06a6a173e3eba46a936 | [
"Apache-2.0"
] | null | null | null | python/ray/tune/tests/test_integration_wandb.py | jianoaix/ray | 1701b923bc83905f8961c06a6a173e3eba46a936 | [
"Apache-2.0"
] | 41 | 2021-09-21T01:13:48.000Z | 2022-03-19T07:12:22.000Z | python/ray/tune/tests/test_integration_wandb.py | jianoaix/ray | 1701b923bc83905f8961c06a6a173e3eba46a936 | [
"Apache-2.0"
] | null | null | null | import os
import tempfile
from collections import namedtuple
from multiprocessing import Queue
import unittest
import numpy as np
from ray.tune import Trainable
from ray.tune.function_runner import wrap_function
from ray.tune.integration.wandb import (
WandbLoggerCallback,
_WandbLoggingProcess,
WANDB_ENV_VAR,
WandbTrainableMixin,
wandb_mixin,
_QueueItem,
)
from ray.tune.result import TRIAL_INFO
from ray.tune.trial import _TrialInfo
from ray.tune.utils.placement_groups import PlacementGroupFactory
class Trial(
namedtuple(
"MockTrial",
[
"config",
"trial_id",
"trial_name",
"trainable_name",
"placement_group_factory",
"logdir",
],
)
):
def __hash__(self):
return hash(self.trial_id)
def __str__(self):
return self.trial_name
class _MockWandbLoggingProcess(_WandbLoggingProcess):
def __init__(self, logdir, queue, exclude, to_config, *args, **kwargs):
super(_MockWandbLoggingProcess, self).__init__(
logdir, queue, exclude, to_config, *args, **kwargs
)
self.logs = Queue()
self.config_updates = Queue()
def run(self):
while True:
result_type, result_content = self.queue.get()
if result_type == _QueueItem.END:
break
log, config_update = self._handle_result(result_content)
self.config_updates.put(config_update)
self.logs.put(log)
class WandbTestExperimentLogger(WandbLoggerCallback):
_logger_process_cls = _MockWandbLoggingProcess
@property
def trial_processes(self):
return self._trial_processes
class _MockWandbAPI(object):
def init(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
return self
class _MockWandbTrainableMixin(WandbTrainableMixin):
_wandb = _MockWandbAPI()
class WandbTestTrainable(_MockWandbTrainableMixin, Trainable):
pass
class WandbIntegrationTest(unittest.TestCase):
def setUp(self):
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
def tearDown(self):
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
def testWandbLoggerConfig(self):
trial_config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
trial_config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
# No API key
with self.assertRaises(ValueError):
logger = WandbTestExperimentLogger(project="test_project")
logger.setup()
# API Key in config
logger = WandbTestExperimentLogger(project="test_project", api_key="1234")
logger.setup()
self.assertEqual(os.environ[WANDB_ENV_VAR], "1234")
del logger
del os.environ[WANDB_ENV_VAR]
# API Key file
with tempfile.NamedTemporaryFile("wt") as fp:
fp.write("5678")
fp.flush()
logger = WandbTestExperimentLogger(
project="test_project", api_key_file=fp.name
)
logger.setup()
self.assertEqual(os.environ[WANDB_ENV_VAR], "5678")
del logger
del os.environ[WANDB_ENV_VAR]
# API Key in env
os.environ[WANDB_ENV_VAR] = "9012"
logger = WandbTestExperimentLogger(project="test_project")
logger.setup()
del logger
# From now on, the API key is in the env variable.
logger = WandbTestExperimentLogger(project="test_project")
logger.log_trial_start(trial)
self.assertEqual(
logger.trial_processes[trial].kwargs["project"], "test_project"
)
self.assertEqual(logger.trial_processes[trial].kwargs["id"], trial.trial_id)
self.assertEqual(logger.trial_processes[trial].kwargs["name"], trial.trial_name)
self.assertEqual(
logger.trial_processes[trial].kwargs["group"], trial.trainable_name
)
self.assertIn("config", logger.trial_processes[trial]._exclude)
del logger
# log config.
logger = WandbTestExperimentLogger(project="test_project", log_config=True)
logger.log_trial_start(trial)
self.assertNotIn("config", logger.trial_processes[trial]._exclude)
self.assertNotIn("metric", logger.trial_processes[trial]._exclude)
del logger
# Exclude metric.
logger = WandbTestExperimentLogger(project="test_project", excludes=["metric"])
logger.log_trial_start(trial)
self.assertIn("config", logger.trial_processes[trial]._exclude)
self.assertIn("metric", logger.trial_processes[trial]._exclude)
del logger
def testWandbLoggerReporting(self):
trial_config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
trial_config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
logger = WandbTestExperimentLogger(
project="test_project", api_key="1234", excludes=["metric2"]
)
logger.on_trial_start(0, [], trial)
r1 = {
"metric1": 0.8,
"metric2": 1.4,
"metric3": np.asarray(32.0),
"metric4": np.float32(32.0),
"const": "text",
"config": trial_config,
}
logger.on_trial_result(0, [], trial, r1)
logged = logger.trial_processes[trial].logs.get(timeout=10)
self.assertIn("metric1", logged)
self.assertNotIn("metric2", logged)
self.assertIn("metric3", logged)
self.assertIn("metric4", logged)
self.assertNotIn("const", logged)
self.assertNotIn("config", logged)
del logger
def testWandbMixinConfig(self):
config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
trial_info = _TrialInfo(trial)
config[TRIAL_INFO] = trial_info
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
# Needs at least a project
with self.assertRaises(ValueError):
trainable = WandbTestTrainable(config)
# No API key
config["wandb"] = {"project": "test_project"}
with self.assertRaises(ValueError):
trainable = WandbTestTrainable(config)
# API Key in config
config["wandb"] = {"project": "test_project", "api_key": "1234"}
trainable = WandbTestTrainable(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "1234")
del os.environ[WANDB_ENV_VAR]
# API Key file
with tempfile.NamedTemporaryFile("wt") as fp:
fp.write("5678")
fp.flush()
config["wandb"] = {"project": "test_project", "api_key_file": fp.name}
trainable = WandbTestTrainable(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "5678")
del os.environ[WANDB_ENV_VAR]
# API Key in env
os.environ[WANDB_ENV_VAR] = "9012"
config["wandb"] = {"project": "test_project"}
trainable = WandbTestTrainable(config)
# From now on, the API key is in the env variable.
# Default configuration
config["wandb"] = {"project": "test_project"}
config[TRIAL_INFO] = trial_info
trainable = WandbTestTrainable(config)
self.assertEqual(trainable.wandb.kwargs["project"], "test_project")
self.assertEqual(trainable.wandb.kwargs["id"], trial.trial_id)
self.assertEqual(trainable.wandb.kwargs["name"], trial.trial_name)
self.assertEqual(trainable.wandb.kwargs["group"], "WandbTestTrainable")
def testWandbDecoratorConfig(self):
config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
trial_info = _TrialInfo(trial)
@wandb_mixin
def train_fn(config):
return 1
train_fn.__mixins__ = (_MockWandbTrainableMixin,)
config[TRIAL_INFO] = trial_info
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
# Needs at least a project
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(config)
# No API key
config["wandb"] = {"project": "test_project"}
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(config)
# API Key in config
config["wandb"] = {"project": "test_project", "api_key": "1234"}
wrapped = wrap_function(train_fn)(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "1234")
del os.environ[WANDB_ENV_VAR]
# API Key file
with tempfile.NamedTemporaryFile("wt") as fp:
fp.write("5678")
fp.flush()
config["wandb"] = {"project": "test_project", "api_key_file": fp.name}
wrapped = wrap_function(train_fn)(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "5678")
del os.environ[WANDB_ENV_VAR]
# API Key in env
os.environ[WANDB_ENV_VAR] = "9012"
config["wandb"] = {"project": "test_project"}
wrapped = wrap_function(train_fn)(config)
# From now on, the API key is in the env variable.
# Default configuration
config["wandb"] = {"project": "test_project"}
config[TRIAL_INFO] = trial_info
wrapped = wrap_function(train_fn)(config)
self.assertEqual(wrapped.wandb.kwargs["project"], "test_project")
self.assertEqual(wrapped.wandb.kwargs["id"], trial.trial_id)
self.assertEqual(wrapped.wandb.kwargs["name"], trial.trial_name)
def testWandbMixinRLlib(self):
"""Test compatibility with RLlib configuration dicts"""
# Local import to avoid tune dependency on rllib
try:
from ray.rllib.algorithms.ppo import PPO
except ImportError:
self.skipTest("ray[rllib] not available")
return
class WandbPPOTrainer(_MockWandbTrainableMixin, PPO):
pass
config = {
"env": "CartPole-v0",
"wandb": {
"project": "test_project",
"api_key": "1234",
},
}
# Test that trainer object can be initialized
WandbPPOTrainer(config)
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| 29.689189 | 88 | 0.602185 | import os
import tempfile
from collections import namedtuple
from multiprocessing import Queue
import unittest
import numpy as np
from ray.tune import Trainable
from ray.tune.function_runner import wrap_function
from ray.tune.integration.wandb import (
WandbLoggerCallback,
_WandbLoggingProcess,
WANDB_ENV_VAR,
WandbTrainableMixin,
wandb_mixin,
_QueueItem,
)
from ray.tune.result import TRIAL_INFO
from ray.tune.trial import _TrialInfo
from ray.tune.utils.placement_groups import PlacementGroupFactory
class Trial(
namedtuple(
"MockTrial",
[
"config",
"trial_id",
"trial_name",
"trainable_name",
"placement_group_factory",
"logdir",
],
)
):
def __hash__(self):
return hash(self.trial_id)
def __str__(self):
return self.trial_name
class _MockWandbLoggingProcess(_WandbLoggingProcess):
def __init__(self, logdir, queue, exclude, to_config, *args, **kwargs):
super(_MockWandbLoggingProcess, self).__init__(
logdir, queue, exclude, to_config, *args, **kwargs
)
self.logs = Queue()
self.config_updates = Queue()
def run(self):
while True:
result_type, result_content = self.queue.get()
if result_type == _QueueItem.END:
break
log, config_update = self._handle_result(result_content)
self.config_updates.put(config_update)
self.logs.put(log)
class WandbTestExperimentLogger(WandbLoggerCallback):
_logger_process_cls = _MockWandbLoggingProcess
@property
def trial_processes(self):
return self._trial_processes
class _MockWandbAPI(object):
def init(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
return self
class _MockWandbTrainableMixin(WandbTrainableMixin):
_wandb = _MockWandbAPI()
class WandbTestTrainable(_MockWandbTrainableMixin, Trainable):
pass
class WandbIntegrationTest(unittest.TestCase):
def setUp(self):
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
def tearDown(self):
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
def testWandbLoggerConfig(self):
trial_config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
trial_config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
with self.assertRaises(ValueError):
logger = WandbTestExperimentLogger(project="test_project")
logger.setup()
logger = WandbTestExperimentLogger(project="test_project", api_key="1234")
logger.setup()
self.assertEqual(os.environ[WANDB_ENV_VAR], "1234")
del logger
del os.environ[WANDB_ENV_VAR]
with tempfile.NamedTemporaryFile("wt") as fp:
fp.write("5678")
fp.flush()
logger = WandbTestExperimentLogger(
project="test_project", api_key_file=fp.name
)
logger.setup()
self.assertEqual(os.environ[WANDB_ENV_VAR], "5678")
del logger
del os.environ[WANDB_ENV_VAR]
os.environ[WANDB_ENV_VAR] = "9012"
logger = WandbTestExperimentLogger(project="test_project")
logger.setup()
del logger
logger = WandbTestExperimentLogger(project="test_project")
logger.log_trial_start(trial)
self.assertEqual(
logger.trial_processes[trial].kwargs["project"], "test_project"
)
self.assertEqual(logger.trial_processes[trial].kwargs["id"], trial.trial_id)
self.assertEqual(logger.trial_processes[trial].kwargs["name"], trial.trial_name)
self.assertEqual(
logger.trial_processes[trial].kwargs["group"], trial.trainable_name
)
self.assertIn("config", logger.trial_processes[trial]._exclude)
del logger
logger = WandbTestExperimentLogger(project="test_project", log_config=True)
logger.log_trial_start(trial)
self.assertNotIn("config", logger.trial_processes[trial]._exclude)
self.assertNotIn("metric", logger.trial_processes[trial]._exclude)
del logger
logger = WandbTestExperimentLogger(project="test_project", excludes=["metric"])
logger.log_trial_start(trial)
self.assertIn("config", logger.trial_processes[trial]._exclude)
self.assertIn("metric", logger.trial_processes[trial]._exclude)
del logger
def testWandbLoggerReporting(self):
trial_config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
trial_config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
logger = WandbTestExperimentLogger(
project="test_project", api_key="1234", excludes=["metric2"]
)
logger.on_trial_start(0, [], trial)
r1 = {
"metric1": 0.8,
"metric2": 1.4,
"metric3": np.asarray(32.0),
"metric4": np.float32(32.0),
"const": "text",
"config": trial_config,
}
logger.on_trial_result(0, [], trial, r1)
logged = logger.trial_processes[trial].logs.get(timeout=10)
self.assertIn("metric1", logged)
self.assertNotIn("metric2", logged)
self.assertIn("metric3", logged)
self.assertIn("metric4", logged)
self.assertNotIn("const", logged)
self.assertNotIn("config", logged)
del logger
def testWandbMixinConfig(self):
config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
trial_info = _TrialInfo(trial)
config[TRIAL_INFO] = trial_info
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
with self.assertRaises(ValueError):
trainable = WandbTestTrainable(config)
config["wandb"] = {"project": "test_project"}
with self.assertRaises(ValueError):
trainable = WandbTestTrainable(config)
config["wandb"] = {"project": "test_project", "api_key": "1234"}
trainable = WandbTestTrainable(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "1234")
del os.environ[WANDB_ENV_VAR]
with tempfile.NamedTemporaryFile("wt") as fp:
fp.write("5678")
fp.flush()
config["wandb"] = {"project": "test_project", "api_key_file": fp.name}
trainable = WandbTestTrainable(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "5678")
del os.environ[WANDB_ENV_VAR]
os.environ[WANDB_ENV_VAR] = "9012"
config["wandb"] = {"project": "test_project"}
trainable = WandbTestTrainable(config)
config["wandb"] = {"project": "test_project"}
config[TRIAL_INFO] = trial_info
trainable = WandbTestTrainable(config)
self.assertEqual(trainable.wandb.kwargs["project"], "test_project")
self.assertEqual(trainable.wandb.kwargs["id"], trial.trial_id)
self.assertEqual(trainable.wandb.kwargs["name"], trial.trial_name)
self.assertEqual(trainable.wandb.kwargs["group"], "WandbTestTrainable")
def testWandbDecoratorConfig(self):
config = {"par1": 4, "par2": 9.12345678}
trial = Trial(
config,
0,
"trial_0",
"trainable",
PlacementGroupFactory([{"CPU": 1}]),
"/tmp",
)
trial_info = _TrialInfo(trial)
@wandb_mixin
def train_fn(config):
return 1
train_fn.__mixins__ = (_MockWandbTrainableMixin,)
config[TRIAL_INFO] = trial_info
if WANDB_ENV_VAR in os.environ:
del os.environ[WANDB_ENV_VAR]
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(config)
config["wandb"] = {"project": "test_project"}
with self.assertRaises(ValueError):
wrapped = wrap_function(train_fn)(config)
config["wandb"] = {"project": "test_project", "api_key": "1234"}
wrapped = wrap_function(train_fn)(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "1234")
del os.environ[WANDB_ENV_VAR]
with tempfile.NamedTemporaryFile("wt") as fp:
fp.write("5678")
fp.flush()
config["wandb"] = {"project": "test_project", "api_key_file": fp.name}
wrapped = wrap_function(train_fn)(config)
self.assertEqual(os.environ[WANDB_ENV_VAR], "5678")
del os.environ[WANDB_ENV_VAR]
os.environ[WANDB_ENV_VAR] = "9012"
config["wandb"] = {"project": "test_project"}
wrapped = wrap_function(train_fn)(config)
config["wandb"] = {"project": "test_project"}
config[TRIAL_INFO] = trial_info
wrapped = wrap_function(train_fn)(config)
self.assertEqual(wrapped.wandb.kwargs["project"], "test_project")
self.assertEqual(wrapped.wandb.kwargs["id"], trial.trial_id)
self.assertEqual(wrapped.wandb.kwargs["name"], trial.trial_name)
def testWandbMixinRLlib(self):
try:
from ray.rllib.algorithms.ppo import PPO
except ImportError:
self.skipTest("ray[rllib] not available")
return
class WandbPPOTrainer(_MockWandbTrainableMixin, PPO):
pass
config = {
"env": "CartPole-v0",
"wandb": {
"project": "test_project",
"api_key": "1234",
},
}
WandbPPOTrainer(config)
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| true | true |
f737a5565f48a1a891a9395c0e0a47cb226cce26 | 3,038 | py | Python | differential-privacy-library-main/tests/tools/test_histogramdd.py | gonzalo-munillag/Exponential_Randomised_Response | 1ae2c867d77c6e92f1df0bb7120862e4f9aa15e4 | [
"MIT"
] | 597 | 2019-06-19T11:26:50.000Z | 2022-03-30T13:23:42.000Z | differential-privacy-library-main/tests/tools/test_histogramdd.py | gonzalo-munillag/Exponential_Randomised_Response | 1ae2c867d77c6e92f1df0bb7120862e4f9aa15e4 | [
"MIT"
] | 45 | 2019-06-20T08:03:31.000Z | 2022-03-30T14:02:02.000Z | differential-privacy-library-main/tests/tools/test_histogramdd.py | gonzalo-munillag/Exponential_Randomised_Response | 1ae2c867d77c6e92f1df0bb7120862e4f9aa15e4 | [
"MIT"
] | 163 | 2019-06-19T23:56:19.000Z | 2022-03-26T23:59:24.000Z | import numpy as np
from unittest import TestCase
from diffprivlib.accountant import BudgetAccountant
from diffprivlib.tools.histograms import histogramdd
from diffprivlib.utils import global_seed, PrivacyLeakWarning, BudgetError
class TestHistogramdd(TestCase):
def test_no_params(self):
a = np.array([1, 2, 3, 4, 5])
with self.assertWarns(PrivacyLeakWarning):
res = histogramdd(a)
self.assertIsNotNone(res)
def test_no_range(self):
a = np.array([1, 2, 3, 4, 5])
with self.assertWarns(PrivacyLeakWarning):
res = histogramdd(a, epsilon=2)
self.assertIsNotNone(res)
def test_bins_instead_of_range(self):
a = np.array([1, 2, 3, 4, 5])
res = histogramdd([a, a], epsilon=2, bins=([0, 2, 6], [0, 2, 6]))
self.assertIsNotNone(res)
def test_same_edges(self):
a = np.array([1, 2, 3, 4, 5])
_, edges = np.histogramdd(a, bins=3, range=[(0, 10)])
_, dp_edges = histogramdd(a, epsilon=1, bins=3, range=[(0, 10)])
for i in range(len(edges)):
self.assertTrue((edges[i] == dp_edges[i]).all())
def test_different_result(self):
global_seed(3141592653)
a = np.array([1, 2, 3, 4, 5])
hist, _ = np.histogramdd(a, bins=3, range=[(0, 10)])
dp_hist, _ = histogramdd(a, epsilon=0.1, bins=3, range=[(0, 10)])
# print("Non-private histogram: %s" % hist)
# print("Private histogram: %s" % dp_hist)
self.assertTrue((hist != dp_hist).any())
def test_density_1d(self):
global_seed(3141592653)
a = np.array([1, 2, 3, 4, 5])
dp_hist, _ = histogramdd(a, epsilon=10, bins=3, range=[(0, 10)], density=True)
# print(dp_hist.sum())
self.assertAlmostEqual(dp_hist.sum(), 1.0 * 3 / 10)
def test_density_2d(self):
global_seed(3141592653)
a = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]).T
dp_hist, _ = histogramdd(a, epsilon=10, bins=3, range=[(0, 10), (0, 10)], density=True)
# print(dp_hist.sum())
self.assertAlmostEqual(dp_hist.sum(), 1.0 * (3 / 10) ** 2)
def test_accountant(self):
acc = BudgetAccountant(1.5, 0)
a = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]).T
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)], density=True, accountant=acc)
with self.assertRaises(BudgetError):
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)], density=True, accountant=acc)
def test_default_accountant(self):
BudgetAccountant.pop_default()
a = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]).T
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)], density=True)
acc = BudgetAccountant.pop_default()
self.assertEqual((1, 0), acc.total())
self.assertEqual(acc.epsilon, float("inf"))
self.assertEqual(acc.delta, 1.0)
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)])
self.assertEqual((1, 0), acc.total())
| 35.741176 | 101 | 0.586241 | import numpy as np
from unittest import TestCase
from diffprivlib.accountant import BudgetAccountant
from diffprivlib.tools.histograms import histogramdd
from diffprivlib.utils import global_seed, PrivacyLeakWarning, BudgetError
class TestHistogramdd(TestCase):
def test_no_params(self):
a = np.array([1, 2, 3, 4, 5])
with self.assertWarns(PrivacyLeakWarning):
res = histogramdd(a)
self.assertIsNotNone(res)
def test_no_range(self):
a = np.array([1, 2, 3, 4, 5])
with self.assertWarns(PrivacyLeakWarning):
res = histogramdd(a, epsilon=2)
self.assertIsNotNone(res)
def test_bins_instead_of_range(self):
a = np.array([1, 2, 3, 4, 5])
res = histogramdd([a, a], epsilon=2, bins=([0, 2, 6], [0, 2, 6]))
self.assertIsNotNone(res)
def test_same_edges(self):
a = np.array([1, 2, 3, 4, 5])
_, edges = np.histogramdd(a, bins=3, range=[(0, 10)])
_, dp_edges = histogramdd(a, epsilon=1, bins=3, range=[(0, 10)])
for i in range(len(edges)):
self.assertTrue((edges[i] == dp_edges[i]).all())
def test_different_result(self):
global_seed(3141592653)
a = np.array([1, 2, 3, 4, 5])
hist, _ = np.histogramdd(a, bins=3, range=[(0, 10)])
dp_hist, _ = histogramdd(a, epsilon=0.1, bins=3, range=[(0, 10)])
self.assertTrue((hist != dp_hist).any())
def test_density_1d(self):
global_seed(3141592653)
a = np.array([1, 2, 3, 4, 5])
dp_hist, _ = histogramdd(a, epsilon=10, bins=3, range=[(0, 10)], density=True)
self.assertAlmostEqual(dp_hist.sum(), 1.0 * 3 / 10)
def test_density_2d(self):
global_seed(3141592653)
a = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]).T
dp_hist, _ = histogramdd(a, epsilon=10, bins=3, range=[(0, 10), (0, 10)], density=True)
self.assertAlmostEqual(dp_hist.sum(), 1.0 * (3 / 10) ** 2)
def test_accountant(self):
acc = BudgetAccountant(1.5, 0)
a = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]).T
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)], density=True, accountant=acc)
with self.assertRaises(BudgetError):
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)], density=True, accountant=acc)
def test_default_accountant(self):
BudgetAccountant.pop_default()
a = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]).T
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)], density=True)
acc = BudgetAccountant.pop_default()
self.assertEqual((1, 0), acc.total())
self.assertEqual(acc.epsilon, float("inf"))
self.assertEqual(acc.delta, 1.0)
histogramdd(a, epsilon=1, bins=3, range=[(0, 10), (0, 10)])
self.assertEqual((1, 0), acc.total())
| true | true |
f737a6e277c0aa1ee8dec40500f79cdf601eb1f2 | 3,439 | py | Python | dualbound/Lagrangian/spatialProjopt_Zops_numpy.py | PengningChao/emdb-sphere | d20ac81ab4fd744f87788bda46d3aa19598658ee | [
"MIT"
] | null | null | null | dualbound/Lagrangian/spatialProjopt_Zops_numpy.py | PengningChao/emdb-sphere | d20ac81ab4fd744f87788bda46d3aa19598658ee | [
"MIT"
] | null | null | null | dualbound/Lagrangian/spatialProjopt_Zops_numpy.py | PengningChao/emdb-sphere | d20ac81ab4fd744f87788bda46d3aa19598658ee | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 4 21:39:12 2020
@author: pengning
This is part of the grad/Hess engine for spatial projection versions of the
original global constraint <S|T>-<T|U|T>. The Lagrangian multipliers are distributed in
the order alphaP0_1, alphaP0_2, alphaP1_1, alphaP1_2 ... where P0 is just the identity
"""
import numpy as np
def Z_TT(Lags, O, UPlist):
#P0 is identity and UP0 is the original U matrix
ZTT = np.zeros_like(O, dtype=np.complex)
ZTT[:,:] = O[:,:]
for i in range(len(UPlist)):
SymUP = (UPlist[i]+UPlist[i].conj().T)/2
AsymUP = (UPlist[i]-UPlist[i].conj().T)/(2j)
ZTT += Lags[2*i]*SymUP + Lags[2*i+1]*AsymUP
return ZTT
def grad_Z_TT(Lags, UPlist):
gradZ = []
for i in range(len(UPlist)):
SymUP = (UPlist[i]+UPlist[i].conj().T)/2
AsymUP = (UPlist[i]-UPlist[i].conj().T)/(2j)
gradZ.append(SymUP)
gradZ.append(AsymUP)
return gradZ
def check_spatialProj_Lags_validity(Lags, Olist, UPlistlist):
modenum = len(Olist)
mineig = np.inf
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigZTT = np.linalg.eigvalsh(ZTT)
if eigZTT[0]<0:
print('mineig', eigZTT[0])
return eigZTT[0]
mineig = min(mineig,eigZTT[0])
return mineig
def find_singular_ZTT_eigv(Lags, Olist, UPlistlist):
modenum = len(Olist)
mineigw = np.inf
mineigv = np.zeros(Olist[0].shape[0])
modemineig = -1
for i in range(modenum):
ZTT = Z_TT(Lags, Olist[i], UPlistlist[i])
eigw, eigv = np.linalg.eigh(ZTT)
if eigw[0]<=0:
modemineig = i
mineigv = eigv[:,0]
return modemineig, mineigv
elif eigw[0]<mineigw:
mineigw = eigw[0]
mineigv = eigv[:,0]
modemineig = i
return modemineig, mineigv
def get_ZTT_mineig(Lags, Olist, UPlistlist, eigvals_only=False):
modenum = len(Olist)
mineigw = np.inf
modemineig = -1
if eigvals_only:
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigw = np.linalg.eigvalsh(ZTT)
if eigw[0]<=0:
return mode, eigw[0]
elif eigw[0]<mineigw:
mineigw = eigw[0]
modemineig = mode
return modemineig, mineigw
else:
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigw, eigv = np.linalg.eigh(ZTT)
if eigw[0]<=0:
return mode, eigw[0], eigv[:,0]
elif eigw[0]<mineigw:
mineigw = eigw[0]
mineigv = eigv[:,0]
modemineig = mode
return modemineig, mineigw, mineigv
def get_inc_ZTT_mineig(incLags, include, Olist, UPlistlist, eigvals_only=False):
Lags = np.zeros(len(include))
Lags[include] = incLags[:]
return get_ZTT_mineig(Lags, Olist, UPlistlist, eigvals_only=eigvals_only)
###method for finding derivatives of mineig of ZTT, to use for phase I (entering domain of duality) of optimization
def get_ZTT_mineig_grad(ZTT, gradZTT):
eigw, eigv = np.linalg.eigh(ZTT)
eiggrad = np.zeros(len(gradZTT))
for i in range(len(eiggrad)):
eiggrad[i] = np.real(np.vdot(eigv[:,0], gradZTT[i] @ eigv[:,0]))
return eiggrad
| 30.705357 | 115 | 0.592033 |
import numpy as np
def Z_TT(Lags, O, UPlist):
ZTT = np.zeros_like(O, dtype=np.complex)
ZTT[:,:] = O[:,:]
for i in range(len(UPlist)):
SymUP = (UPlist[i]+UPlist[i].conj().T)/2
AsymUP = (UPlist[i]-UPlist[i].conj().T)/(2j)
ZTT += Lags[2*i]*SymUP + Lags[2*i+1]*AsymUP
return ZTT
def grad_Z_TT(Lags, UPlist):
gradZ = []
for i in range(len(UPlist)):
SymUP = (UPlist[i]+UPlist[i].conj().T)/2
AsymUP = (UPlist[i]-UPlist[i].conj().T)/(2j)
gradZ.append(SymUP)
gradZ.append(AsymUP)
return gradZ
def check_spatialProj_Lags_validity(Lags, Olist, UPlistlist):
modenum = len(Olist)
mineig = np.inf
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigZTT = np.linalg.eigvalsh(ZTT)
if eigZTT[0]<0:
print('mineig', eigZTT[0])
return eigZTT[0]
mineig = min(mineig,eigZTT[0])
return mineig
def find_singular_ZTT_eigv(Lags, Olist, UPlistlist):
modenum = len(Olist)
mineigw = np.inf
mineigv = np.zeros(Olist[0].shape[0])
modemineig = -1
for i in range(modenum):
ZTT = Z_TT(Lags, Olist[i], UPlistlist[i])
eigw, eigv = np.linalg.eigh(ZTT)
if eigw[0]<=0:
modemineig = i
mineigv = eigv[:,0]
return modemineig, mineigv
elif eigw[0]<mineigw:
mineigw = eigw[0]
mineigv = eigv[:,0]
modemineig = i
return modemineig, mineigv
def get_ZTT_mineig(Lags, Olist, UPlistlist, eigvals_only=False):
modenum = len(Olist)
mineigw = np.inf
modemineig = -1
if eigvals_only:
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigw = np.linalg.eigvalsh(ZTT)
if eigw[0]<=0:
return mode, eigw[0]
elif eigw[0]<mineigw:
mineigw = eigw[0]
modemineig = mode
return modemineig, mineigw
else:
for mode in range(modenum):
ZTT = Z_TT(Lags, Olist[mode], UPlistlist[mode])
eigw, eigv = np.linalg.eigh(ZTT)
if eigw[0]<=0:
return mode, eigw[0], eigv[:,0]
elif eigw[0]<mineigw:
mineigw = eigw[0]
mineigv = eigv[:,0]
modemineig = mode
return modemineig, mineigw, mineigv
def get_inc_ZTT_mineig(incLags, include, Olist, UPlistlist, eigvals_only=False):
Lags = np.zeros(len(include))
Lags[include] = incLags[:]
return get_ZTT_mineig(Lags, Olist, UPlistlist, eigvals_only=eigvals_only)
return eiggrad
| true | true |
f737a7e48890e5d48eacadf678b96723ca2c86ff | 729 | py | Python | sites/stacks/api.py | bluebirdio/improbable-sites | faeb00a37f3993c93ea9cf45d43258705d7afc22 | [
"MIT"
] | null | null | null | sites/stacks/api.py | bluebirdio/improbable-sites | faeb00a37f3993c93ea9cf45d43258705d7afc22 | [
"MIT"
] | 1 | 2020-07-29T19:07:25.000Z | 2020-07-29T19:07:25.000Z | sites/stacks/api.py | bluebirdio/samey-sites | faeb00a37f3993c93ea9cf45d43258705d7afc22 | [
"MIT"
] | 1 | 2020-03-25T22:36:48.000Z | 2020-03-25T22:36:48.000Z | from typing import List
from fastapi import APIRouter
from samey.table_crud import *
from .models import *
router = APIRouter()
@router.get("/", response_model=List[Stack])
def list_stacks():
return query(tables.Stack)
@router.post("/", response_model=Stack, status_code=201)
def create_stack(stack_in: Stack):
return create(tables.Stack, stack_in)
@router.put("/{id}", response_model=Stack)
def update_stack(id: str, stack_in: Stack):
return update(tables.Stack, id, stack_in)
@router.get("/{id}", response_model=Stack)
def get_stack(id: str):
return get_or_error(tables.Stack, id, "Stack not found")
@router.delete("/{id}", status_code=204)
def delete_stack(id: str):
delete(tables.Stack, id)
| 20.828571 | 60 | 0.721536 | from typing import List
from fastapi import APIRouter
from samey.table_crud import *
from .models import *
router = APIRouter()
@router.get("/", response_model=List[Stack])
def list_stacks():
return query(tables.Stack)
@router.post("/", response_model=Stack, status_code=201)
def create_stack(stack_in: Stack):
return create(tables.Stack, stack_in)
@router.put("/{id}", response_model=Stack)
def update_stack(id: str, stack_in: Stack):
return update(tables.Stack, id, stack_in)
@router.get("/{id}", response_model=Stack)
def get_stack(id: str):
return get_or_error(tables.Stack, id, "Stack not found")
@router.delete("/{id}", status_code=204)
def delete_stack(id: str):
delete(tables.Stack, id)
| true | true |
f737a8523724bab88d445e40877fd4727dede84d | 6,084 | py | Python | getin/management/commands/getin.py | confuzeus/django-getin | 1954a546990e5e163d4dc040e4b9a6f705a7777f | [
"MIT"
] | null | null | null | getin/management/commands/getin.py | confuzeus/django-getin | 1954a546990e5e163d4dc040e4b9a6f705a7777f | [
"MIT"
] | null | null | null | getin/management/commands/getin.py | confuzeus/django-getin | 1954a546990e5e163d4dc040e4b9a6f705a7777f | [
"MIT"
] | null | null | null | from typing import Optional
from django.core.exceptions import ValidationError
from django.core.management import BaseCommand, CommandError
from django.db import IntegrityError
from django.utils.translation import gettext_lazy as _
from django_fsm import TransitionNotAllowed
from getin.models import Invitation, InvitationState
from getin.utils import email_invitation
class Command(BaseCommand):
help = _("Manage invitations")
def _create_invitations(self, count: Optional[int] = None):
def _create():
invitation = Invitation.create()
self.stdout.write(
self.style.SUCCESS(
_(f'Invitation with ID "{invitation.pk}" has been created.')
)
)
if count:
for __ in range(count):
_create()
else:
_create()
@staticmethod
def _get_invitation(id_: int):
try:
invitation = Invitation.objects.get(pk=id_)
except Invitation.DoesNotExist:
raise CommandError(_(f'Invitation with ID "{id_}" doesn\'t exist.'))
return invitation
def _state_transition(
self,
obj: Invitation,
target: InvitationState,
force: Optional[bool] = False,
**kwargs,
):
try:
if target == InvitationState.EXPIRED:
if force:
obj.force_expire()
else:
obj.expire()
elif target == InvitationState.SENT:
func = kwargs.pop("send_func")
email = kwargs.pop("email")
obj.send_invitation(func, email=email)
else:
raise CommandError(_(f'"{target}" is an unknown target state'))
obj.full_clean()
obj.save()
except TransitionNotAllowed as e:
raise CommandError(e)
except ValidationError as e:
raise CommandError(e)
except IntegrityError as e:
raise CommandError(e)
self.stdout.write(_(f'Invitation with ID "{obj.pk}" has expired.'))
def _expire_invitations(
self,
id_: Optional[int] = None,
all_: Optional[bool] = None,
state: Optional[str] = None,
force: Optional[bool] = False,
):
if id_:
invitation = self._get_invitation(id_)
self._state_transition(invitation, InvitationState.EXPIRED, force)
return
invitations = None
if all_ and state:
invitations = Invitation.objects.filter(state=state)
elif all_ and not state:
cont = input(_("This will expire all invitations. Continue? (y/n)"))
if cont.strip().lower() == "y":
invitations = Invitation.objects.all()
else:
self.stdout.write("Aborted.")
return
if invitations:
for invitation in invitations:
self._state_transition(invitation, InvitationState.EXPIRED, force)
return
self.stdout.write(self.style.WARNING(_("No invitations found.")))
def _send_invitation(self, id_, method: str = "email", **kwargs):
invitation = self._get_invitation(id_)
if method == "email":
self._state_transition(
invitation,
InvitationState.SENT,
send_func=email_invitation,
email=kwargs.pop("email"),
)
else:
raise CommandError(
_(f'I don\'t know how to send invitations using "{method}".')
)
def add_arguments(self, parser):
action_group = parser.add_mutually_exclusive_group()
action_group.add_argument(
"--create",
action="store_true",
default=False,
help=_("Create invitations."),
)
action_group.add_argument(
"--expire", action="store_true", help=_("Expire invitations.")
)
action_group.add_argument(
"--send", action="store_true", help=_("Send an invitation.")
)
action_group.add_argument(
"--force-expire",
action="store_true",
help=_("Force invitations to expire."),
)
amount_group = parser.add_mutually_exclusive_group()
amount_group.add_argument(
"--count", default=0, type=int, help=_("Number of invitations to create")
)
amount_group.add_argument(
"--id",
type=int,
help=_("The id of a specific invitation you want to act on."),
)
amount_group.add_argument(
"--all", action="store_true", help=_("Act on all invitations.")
)
parser.add_argument(
"--state",
choices=(
InvitationState.UNSENT.value,
InvitationState.SENT.value,
InvitationState.CONSUMED.value,
InvitationState.EXPIRED.value,
),
help=(_("Filter the state of the invitation(s).")),
)
parser.add_argument(
"--email", type=str, help=_("The email address to send the invitation to.")
)
def handle(self, *args, **options):
create = options.get("create")
expire = options.get("expire")
force_expire = options.get("force_expire")
send = options.get("send")
count = options.get("count")
id_ = options.get("id")
all_ = options.get("all")
state = options.get("state")
email = options.get("email")
if create:
return self._create_invitations(count)
if expire:
return self._expire_invitations(id_, all_, state)
if force_expire:
return self._expire_invitations(id_, all_, state, force=True)
if send:
if not email:
raise CommandError(_("Please provide an email address."))
return self._send_invitation(id_, email=email)
| 31.360825 | 87 | 0.559993 | from typing import Optional
from django.core.exceptions import ValidationError
from django.core.management import BaseCommand, CommandError
from django.db import IntegrityError
from django.utils.translation import gettext_lazy as _
from django_fsm import TransitionNotAllowed
from getin.models import Invitation, InvitationState
from getin.utils import email_invitation
class Command(BaseCommand):
help = _("Manage invitations")
def _create_invitations(self, count: Optional[int] = None):
def _create():
invitation = Invitation.create()
self.stdout.write(
self.style.SUCCESS(
_(f'Invitation with ID "{invitation.pk}" has been created.')
)
)
if count:
for __ in range(count):
_create()
else:
_create()
@staticmethod
def _get_invitation(id_: int):
try:
invitation = Invitation.objects.get(pk=id_)
except Invitation.DoesNotExist:
raise CommandError(_(f'Invitation with ID "{id_}" doesn\'t exist.'))
return invitation
def _state_transition(
self,
obj: Invitation,
target: InvitationState,
force: Optional[bool] = False,
**kwargs,
):
try:
if target == InvitationState.EXPIRED:
if force:
obj.force_expire()
else:
obj.expire()
elif target == InvitationState.SENT:
func = kwargs.pop("send_func")
email = kwargs.pop("email")
obj.send_invitation(func, email=email)
else:
raise CommandError(_(f'"{target}" is an unknown target state'))
obj.full_clean()
obj.save()
except TransitionNotAllowed as e:
raise CommandError(e)
except ValidationError as e:
raise CommandError(e)
except IntegrityError as e:
raise CommandError(e)
self.stdout.write(_(f'Invitation with ID "{obj.pk}" has expired.'))
def _expire_invitations(
self,
id_: Optional[int] = None,
all_: Optional[bool] = None,
state: Optional[str] = None,
force: Optional[bool] = False,
):
if id_:
invitation = self._get_invitation(id_)
self._state_transition(invitation, InvitationState.EXPIRED, force)
return
invitations = None
if all_ and state:
invitations = Invitation.objects.filter(state=state)
elif all_ and not state:
cont = input(_("This will expire all invitations. Continue? (y/n)"))
if cont.strip().lower() == "y":
invitations = Invitation.objects.all()
else:
self.stdout.write("Aborted.")
return
if invitations:
for invitation in invitations:
self._state_transition(invitation, InvitationState.EXPIRED, force)
return
self.stdout.write(self.style.WARNING(_("No invitations found.")))
def _send_invitation(self, id_, method: str = "email", **kwargs):
invitation = self._get_invitation(id_)
if method == "email":
self._state_transition(
invitation,
InvitationState.SENT,
send_func=email_invitation,
email=kwargs.pop("email"),
)
else:
raise CommandError(
_(f'I don\'t know how to send invitations using "{method}".')
)
def add_arguments(self, parser):
action_group = parser.add_mutually_exclusive_group()
action_group.add_argument(
"--create",
action="store_true",
default=False,
help=_("Create invitations."),
)
action_group.add_argument(
"--expire", action="store_true", help=_("Expire invitations.")
)
action_group.add_argument(
"--send", action="store_true", help=_("Send an invitation.")
)
action_group.add_argument(
"--force-expire",
action="store_true",
help=_("Force invitations to expire."),
)
amount_group = parser.add_mutually_exclusive_group()
amount_group.add_argument(
"--count", default=0, type=int, help=_("Number of invitations to create")
)
amount_group.add_argument(
"--id",
type=int,
help=_("The id of a specific invitation you want to act on."),
)
amount_group.add_argument(
"--all", action="store_true", help=_("Act on all invitations.")
)
parser.add_argument(
"--state",
choices=(
InvitationState.UNSENT.value,
InvitationState.SENT.value,
InvitationState.CONSUMED.value,
InvitationState.EXPIRED.value,
),
help=(_("Filter the state of the invitation(s).")),
)
parser.add_argument(
"--email", type=str, help=_("The email address to send the invitation to.")
)
def handle(self, *args, **options):
create = options.get("create")
expire = options.get("expire")
force_expire = options.get("force_expire")
send = options.get("send")
count = options.get("count")
id_ = options.get("id")
all_ = options.get("all")
state = options.get("state")
email = options.get("email")
if create:
return self._create_invitations(count)
if expire:
return self._expire_invitations(id_, all_, state)
if force_expire:
return self._expire_invitations(id_, all_, state, force=True)
if send:
if not email:
raise CommandError(_("Please provide an email address."))
return self._send_invitation(id_, email=email)
| true | true |
f737a88d3c8e49174a66091151be27bb136528e1 | 3,592 | py | Python | auxiliary.py | juniorcl/virtual-lockin-prototype | 5f75897a65620f6180f37bcaa3b4291d605aaf9f | [
"MIT"
] | 4 | 2021-03-24T20:43:18.000Z | 2021-09-09T14:12:24.000Z | auxiliary.py | juniorcl/virtual-lockin-prototype | 5f75897a65620f6180f37bcaa3b4291d605aaf9f | [
"MIT"
] | null | null | null | auxiliary.py | juniorcl/virtual-lockin-prototype | 5f75897a65620f6180f37bcaa3b4291d605aaf9f | [
"MIT"
] | 1 | 2021-05-21T04:38:04.000Z | 2021-05-21T04:38:04.000Z | ############################# Helper #################################
## This file was created to support the lock-in program ##
######################################################################
## These functions can be imported using: import lockin-auxiliary as aux
## and put aux.<name of the function>
from scipy.signal import bessel, filtfilt, butter
import numpy as np
import wave
def lowPassFilter(sinal, REFSIG, RATE, ORDER, ROLL, CUTOFF):
y_fft0, x_fft0 = freq0fftPSD(sinal, REFSIG, RATE, ROLL)
y_bessel, x_bessel = lowBesselPSD(sinal, REFSIG, RATE, CUTOFF, ORDER, ROLL)
y_butter, x_butter = lowButterPSD(sinal, REFSIG, RATE, CUTOFF, ORDER, ROLL)
return x_fft0, y_fft0, x_bessel, y_bessel, x_butter, y_butter
def refSignal(file, chunk):
wf = wave.open(file, 'rb')
sinalbit = np.frombuffer(wf.readframes(chunk), np.int16)
return inVolt(sinalbit[::2])
def rmsFunction(signal):
#Root-Mean-Square function
f = lambda i: i**2/len(signal)
soma = np.sum(list(map(f, signal)))
return np.sqrt(soma)
def sigMultiply(signal, signal_ref, roll):
#multiply the signal and referency signals
sin_psd = np.multiply(signal, signal_ref)
cos_psd = np.multiply(signal, np.roll(signal_ref, roll))
return sin_psd, cos_psd
def lowButter(data, fs, cutoff, order):
#this is a butter lowpass filter
nyq = 0.5*fs
normal_cutoff = cutoff/nyq
b, a = butter(order, normal_cutoff, btype='low', analog=False)
y = filtfilt(b, a, data)
return y
def lowBessel(data, fs, cutoff, order):
#this is a bessel lowpass filter
nyq = 0.5*fs
normal_cutoff = cutoff/nyq
b, a = bessel(order, normal_cutoff, btype='low', analog=False)
y = filtfilt(b, a, data)
return y
def inVolt(signal):
#converts bits to volts
slope = 1.4286015335045335e-4 #slope found with minor error: 7.672327425854542e-09
intercept = 20.975684328898847e-4 #intercept is the same of slope
f = lambda bit: round(slope*bit + intercept, 6)
return list(map(f, signal)) #6 decimal places
def fftFunction(signal, rate):
signal_len = len(signal)
fft = np.abs(np.fft.rfft(signal))/signal_len
freqs = np.fft.rfftfreq(signal_len)*rate
return fft, freqs
def freq0fftPSD(signal, signal_ref, rate, roll):
#get just the amplitude at freq = 0
sin_psd, cos_psd = sigMultiply(signal, signal_ref, roll)
sin_psd_fft, _ = fftFunction(sin_psd, rate)
cos_psd_fft, _ = fftFunction(cos_psd, rate)
return sin_psd_fft[0], cos_psd_fft[0]
def fftPSD(signal, signal_ref, freq, rate, roll):
#get the amplitude at freq = 0 and 2 * freq
sin_psd, cos_psd = sigMultiply(signal, signal_ref, roll)
sin_psd_fft, sin_psd_freqs = fftFunction(sin_psd, rate)
cos_psd_fft, cos_psd_freqs = fftFunction(cos_psd, rate)
y = sin_psd_fft[0] + dict(zip(sin_psd_freqs, sin_psd_fft))[2*freq]
x = cos_psd_fft[0] + dict(zip(cos_psd_freqs, cos_psd_fft))[2*freq]
return y, x
def lowButterPSD(signal, signal_ref, rate, cutoff, order, roll):
#PSD using the butter low pass filter
sin_psd, cos_psd = sigMultiply(signal, signal_ref, roll)
y = rmsFunction(lowButter(sin_psd, rate, cutoff, order))
x = rmsFunction(lowButter(cos_psd, rate, cutoff, order))
return y, x
def lowBesselPSD(signal, signal_ref, rate, cutoff, order, roll):
#PSD using the bessel low pass filter
sin_psd, cos_psd = sigMultiply(signal, signal_ref, roll)
y = rmsFunction(lowBessel(sin_psd, rate, cutoff, order))
x = rmsFunction(lowBessel(cos_psd, rate, cutoff, order))
return y, x
| 38.623656 | 86 | 0.673998 | true | true | |
f737a8dc6d561a8a791109dbdd40a63238b10f36 | 2,578 | py | Python | test_ntpx.py | devsysenv/tests | 6aa7635b281f2e186daa6375abb8050e44c02d6a | [
"MIT"
] | null | null | null | test_ntpx.py | devsysenv/tests | 6aa7635b281f2e186daa6375abb8050e44c02d6a | [
"MIT"
] | null | null | null | test_ntpx.py | devsysenv/tests | 6aa7635b281f2e186daa6375abb8050e44c02d6a | [
"MIT"
] | 1 | 2022-03-09T12:23:26.000Z | 2022-03-09T12:23:26.000Z | import pytest
import os
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
from pathlib import Path
from dselib.context import DSEContext
from dselib.ntpx import NTPX
def context(varfile=None):
"""returns the DSE context object for this script."""
try:
myself = __file__
except NameError:
myself = sys.argv[0]
return DSEContext(myself, varfile)
me = context('test_ntpx')
def test_1():
a = NTPX('.')
b = NTPX('./fu')
c = NTPX('./fu.bar')
d = NTPX('fu.bar')
e = NTPX('/fu.bar')
def check_it(input, ntobj, cwd):
# logger.info(f"{cwd=}::{str(cwd)=}::{os.sep=}::{str(cwd.parent)[2 if os.name == 'nt' else 0:]}")
suffix = lambda x: '' if str(x.parent)[2 if os.name == 'nt' else 0:] == os.sep else os.sep
path = lambda x: str(x.parent)[2 if os.name == 'nt' else 0:] + suffix(x)
# logger.info(f"{cwd=}::{suffix(cwd)=}::justpath={path(cwd)=}")
# the path_suffix is os.sep unless we are already at the root directory
# logger.info(f"{os.path.split(cwd)=}::{str(cwd.parent)=}")
path_suffix = '' if str(cwd.parent)[2 if os.name == 'nt' else 0:] == os.sep else os.sep
assert ntobj.format('dpnx') == str(cwd)
assert ntobj.format('d') == cwd.drive
assert ntobj.format('p') == path(cwd) #str(cwd.parent)[2 if os.name == 'nt' else 0:] + path_suffix
assert ntobj.format('n') == cwd.stem
assert ntobj.format('x') == cwd.suffix
assert ntobj.drive == cwd.drive
assert ntobj.path == path(cwd) #str(cwd.parent)[2 if os.name == 'nt' else 0:] + path_suffix
assert ntobj.name == cwd.stem
assert ntobj.ext == cwd.suffix
# logger.info(f"ntobj.all::{ntobj.all()[:5]}")
# logger.info(f"otherexpr::{(str(cwd), cwd.drive, path(cwd), cwd.stem, cwd.suffix)}")
# assert ntobj.all()[:5] == (str(cwd), cwd.drive, path(cwd), cwd.stem, cwd.suffix)
assert ntobj.all()[:5] == (str(cwd), cwd.drive, path(cwd), cwd.stem, cwd.suffix)
assert ntobj.full == cwd
# assert ntobj == str(cwd) # C:\Users\user\dse\test == C:\\Users\\user\\dse\\test
logger.info(f"NTPX('{input}') has passed. fully qualified is {ntobj.full}. formatted is {ntobj.format('dpnx')}")
check_it('.', a, Path('.').resolve())
check_it('./fu', b, Path('./fu').resolve())
check_it('./fu.bar', c, Path('fu.bar').resolve())
check_it('fu.bar', d, Path('fu.bar').resolve())
check_it('/fu.bar', e, Path('/fu.bar').resolve())
| 40.920635 | 120 | 0.590768 | import pytest
import os
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
from pathlib import Path
from dselib.context import DSEContext
from dselib.ntpx import NTPX
def context(varfile=None):
try:
myself = __file__
except NameError:
myself = sys.argv[0]
return DSEContext(myself, varfile)
me = context('test_ntpx')
def test_1():
a = NTPX('.')
b = NTPX('./fu')
c = NTPX('./fu.bar')
d = NTPX('fu.bar')
e = NTPX('/fu.bar')
def check_it(input, ntobj, cwd):
suffix = lambda x: '' if str(x.parent)[2 if os.name == 'nt' else 0:] == os.sep else os.sep
path = lambda x: str(x.parent)[2 if os.name == 'nt' else 0:] + suffix(x)
path_suffix = '' if str(cwd.parent)[2 if os.name == 'nt' else 0:] == os.sep else os.sep
assert ntobj.format('dpnx') == str(cwd)
assert ntobj.format('d') == cwd.drive
assert ntobj.format('p') == path(cwd)
assert ntobj.format('n') == cwd.stem
assert ntobj.format('x') == cwd.suffix
assert ntobj.drive == cwd.drive
assert ntobj.path == path(cwd)
assert ntobj.name == cwd.stem
assert ntobj.ext == cwd.suffix
assert ntobj.all()[:5] == (str(cwd), cwd.drive, path(cwd), cwd.stem, cwd.suffix)
assert ntobj.full == cwd
ly qualified is {ntobj.full}. formatted is {ntobj.format('dpnx')}")
check_it('.', a, Path('.').resolve())
check_it('./fu', b, Path('./fu').resolve())
check_it('./fu.bar', c, Path('fu.bar').resolve())
check_it('fu.bar', d, Path('fu.bar').resolve())
check_it('/fu.bar', e, Path('/fu.bar').resolve())
| true | true |
f737a99452fae6bff69ab801de12cfdae34aa357 | 388 | py | Python | tests/unit/conftest.py | keang/dredis | 520b3c10a1cee6de9d0f73cd2c43298ce3f9598a | [
"MIT"
] | 53 | 2018-09-19T15:19:09.000Z | 2022-03-06T17:05:32.000Z | tests/unit/conftest.py | keang/dredis | 520b3c10a1cee6de9d0f73cd2c43298ce3f9598a | [
"MIT"
] | 31 | 2018-09-19T16:45:46.000Z | 2021-05-05T15:12:20.000Z | tests/unit/conftest.py | keang/dredis | 520b3c10a1cee6de9d0f73cd2c43298ce3f9598a | [
"MIT"
] | 5 | 2018-09-19T16:42:25.000Z | 2022-03-07T11:36:57.000Z | import pytest
from dredis import config
from dredis.db import DB_MANAGER
from dredis.keyspace import Keyspace
@pytest.fixture
def keyspace():
DB_MANAGER.setup_dbs('', backend='memory', backend_options={})
original_configs = config.get_all('*')
yield Keyspace()
for option, value in zip(original_configs[0::2], original_configs[1::2]):
config.set(option, value)
| 25.866667 | 77 | 0.726804 | import pytest
from dredis import config
from dredis.db import DB_MANAGER
from dredis.keyspace import Keyspace
@pytest.fixture
def keyspace():
DB_MANAGER.setup_dbs('', backend='memory', backend_options={})
original_configs = config.get_all('*')
yield Keyspace()
for option, value in zip(original_configs[0::2], original_configs[1::2]):
config.set(option, value)
| true | true |
f737ac68734a06a4c23d97260750274f29045fd2 | 2,310 | py | Python | cosine_categorize.py | yuyunliuhen/automatic-text-categorization | 6f8ca4f26d2ac684439cc265a4ec468ad9d30d20 | [
"MIT"
] | null | null | null | cosine_categorize.py | yuyunliuhen/automatic-text-categorization | 6f8ca4f26d2ac684439cc265a4ec468ad9d30d20 | [
"MIT"
] | null | null | null | cosine_categorize.py | yuyunliuhen/automatic-text-categorization | 6f8ca4f26d2ac684439cc265a4ec468ad9d30d20 | [
"MIT"
] | null | null | null | # encoding=utf-8
import os
import sys
import math
from util_tool import *
import numpy
def categorization_files(path):
files = search_directory(path,'vec')
for input_name in files:
categorization_file(input_name)
# compute only once, the same to them if using topic model for sample feather
break
def categorization_file(vec_file):
handle_froms = open(vec_file,'r')
final_file = vec_file.replace('vec','final')
handle_final = open(final_file,'w')
result_list = []
total = 0
for from_line in handle_froms:
from_data = from_line.split()
handle_tos = open(vec_file,'r')
for to_line in handle_tos:
to_data = to_line.split()
if from_data[0] == to_data[0]:
continue
if from_data[0].split('/')[2][0:7] == to_data[0].split('/')[2][0:7]:
total += 1
# the first element is file name, skip it
len_from_data = len(from_data) - 1
len_to_data = len(to_data) - 1
from_vec = transfer_vec(from_data[1:len_from_data])
to_vec = transfer_vec(to_data[1:len_to_data])
cosine_value = compute_cosine_value(from_vec,to_vec)
tmp = [from_data[0],to_data[0],cosine_value]
result_list.append(tmp)
accuracy_count = 0
result_list = sorted(result_list,key=lambda x:x[2],reverse=True)
for result in result_list:
if result[0].split('/')[2][0:7] == result[1].split('/')[2][0:7] and result[2] > 0:
accuracy_count += 1
accuracy_rate = round(round(float(accuracy_count) / float(total),4) * 100 ,4)
handle_final.write("total: " + str(total) + " accuracy_count: " + str(accuracy_count) + " accuracy_rate: " + str(accuracy_rate) + "%\n")
for result in result_list:
handle_final.write(result[0] + "\t" + result[1] + "\t" + str(result[2]) + "\n")
handle_final.close()
def transfer_vec(vec):
# conver string to int
vec = [ int (vec) for vec in vec if vec ]
# conver array to vector, if not do this, TypeError: can't multiply sequence by non-int of type 'list'
vec = numpy.array(vec)
return vec
def compute_cosine_value(vec_a,vec_b):
# cos(a,b)=a*b/(|a|+|b|)
numerator = numpy.sum(vec_a*vec_b)
denominator = float(numpy.sqrt(sum(numpy.square(vec_a))) * numpy.sqrt(sum(numpy.square(vec_b))))
if 0 == denominator:
return 0
theta = round(numerator / denominator,4)
return theta
#categorization_file("./text/C00000810.vec")
categorization_files("./text")
| 32.083333 | 138 | 0.698701 |
import os
import sys
import math
from util_tool import *
import numpy
def categorization_files(path):
files = search_directory(path,'vec')
for input_name in files:
categorization_file(input_name)
break
def categorization_file(vec_file):
handle_froms = open(vec_file,'r')
final_file = vec_file.replace('vec','final')
handle_final = open(final_file,'w')
result_list = []
total = 0
for from_line in handle_froms:
from_data = from_line.split()
handle_tos = open(vec_file,'r')
for to_line in handle_tos:
to_data = to_line.split()
if from_data[0] == to_data[0]:
continue
if from_data[0].split('/')[2][0:7] == to_data[0].split('/')[2][0:7]:
total += 1
len_from_data = len(from_data) - 1
len_to_data = len(to_data) - 1
from_vec = transfer_vec(from_data[1:len_from_data])
to_vec = transfer_vec(to_data[1:len_to_data])
cosine_value = compute_cosine_value(from_vec,to_vec)
tmp = [from_data[0],to_data[0],cosine_value]
result_list.append(tmp)
accuracy_count = 0
result_list = sorted(result_list,key=lambda x:x[2],reverse=True)
for result in result_list:
if result[0].split('/')[2][0:7] == result[1].split('/')[2][0:7] and result[2] > 0:
accuracy_count += 1
accuracy_rate = round(round(float(accuracy_count) / float(total),4) * 100 ,4)
handle_final.write("total: " + str(total) + " accuracy_count: " + str(accuracy_count) + " accuracy_rate: " + str(accuracy_rate) + "%\n")
for result in result_list:
handle_final.write(result[0] + "\t" + result[1] + "\t" + str(result[2]) + "\n")
handle_final.close()
def transfer_vec(vec):
vec = [ int (vec) for vec in vec if vec ]
vec = numpy.array(vec)
return vec
def compute_cosine_value(vec_a,vec_b):
# cos(a,b)=a*b/(|a|+|b|)
numerator = numpy.sum(vec_a*vec_b)
denominator = float(numpy.sqrt(sum(numpy.square(vec_a))) * numpy.sqrt(sum(numpy.square(vec_b))))
if 0 == denominator:
return 0
theta = round(numerator / denominator,4)
return theta
#categorization_file("./text/C00000810.vec")
categorization_files("./text")
| true | true |
f737ac9bf0007cd0dd76c2426a0b677304646108 | 2,821 | py | Python | EP2/tests.py | LucasHaug/MAP3121 | 90b69c5db20e6d56c0c3e3dd969d9e41d804e9be | [
"MIT"
] | null | null | null | EP2/tests.py | LucasHaug/MAP3121 | 90b69c5db20e6d56c0c3e3dd969d9e41d804e9be | [
"MIT"
] | null | null | null | EP2/tests.py | LucasHaug/MAP3121 | 90b69c5db20e6d56c0c3e3dd969d9e41d804e9be | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import numpy as np
from random import random
import crank_nicolson
#################################################
### Functions Definitions
#################################################
def get_data(test_letter):
if test_letter == "a":
ut_array, uk_matrix, x_array, N = test_a()
elif test_letter == "b":
ut_array, uk_matrix, x_array, N = test_b()
elif test_letter == "c":
ut_array, uk_matrix, x_array, N = test_c()
else:
ut_array, uk_matrix, x_array, N = test_d()
return ut_array, uk_matrix, x_array, N
def test_a():
N = 128
# Create heat sources positions array
heat_sources_positions_array = [0.35]
# Calculate uk matrix
uk_matrix, scale_array = crank_nicolson.generate_uk(heat_sources_positions_array, N)
uk_matrix = np.delete(uk_matrix, [0, N], axis=1)
# Calculate ut array
ut_array = np.array(uk_matrix[0]) * 7
# Delete extremes from scale array
scale_array = np.delete(scale_array, [0, N])
return ut_array, uk_matrix, scale_array, N
def test_b():
N = 128
# Create heat sources positions array
heat_sources_positions_array = [0.15, 0.3, 0.7, 0.8]
# Calculate uk matrix
uk_matrix, scale_array = crank_nicolson.generate_uk(heat_sources_positions_array, N)
uk_matrix = np.delete(uk_matrix, [0, N], axis=1)
# Calculate ut array
ut_array = (np.array(uk_matrix[0]) * 2.3 + np.array(uk_matrix[1]) * 3.7 +
np.array(uk_matrix[2]) * 0.3 + np.array(uk_matrix[3]) * 4.2)
# Delete extremes from scale array
scale_array = np.delete(scale_array, [0, N])
return ut_array, uk_matrix, scale_array, N
def test_c():
# Configuration
N = int(input("Insira o valor de N: "))
mesh_size = 2048
mesh_relation = int(mesh_size / N)
test_file_name = "teste.txt"
test_file = open(test_file_name, "r")
file_lines = test_file.readlines()
test_file.close()
# Create heat sources positions array
heat_sources_positions_array = [float(item) for item in (file_lines.pop(0).split())]
# Calculate uk matrix
uk_matrix, scale_array = crank_nicolson.generate_uk(heat_sources_positions_array, N)
uk_matrix = np.delete(uk_matrix, [0, N], axis=1)
# Create ut array
ut_array = np.zeros(N - 1, dtype=float)
for i in range(0, N - 1):
ut_array[i] = file_lines[(i + 1) * mesh_relation]
# Delete extremes from scale array
scale_array = np.delete(scale_array, [0, N])
return ut_array, uk_matrix, scale_array, N
def test_d():
ut_array, uk_matrix, scale_array, N = test_c()
ε = 0.01
for i in range(0, N - 1):
random_num = (random() - 0.5) * 2
ut_array[i] *= (1 + random_num * ε)
return ut_array, uk_matrix, scale_array, N
| 24.318966 | 88 | 0.628501 |
import numpy as np
from random import random
import crank_nicolson
| true | true |
f737ad79cff0ce6f5e5975799c84ba766e23e3f3 | 845 | py | Python | C3_Decorator_Pattern/StarBuzzWithSize/Condiments/Mocha.py | sarada92/Design_Pattern | f817206a6f995bc6f534d7cabb3a290955f37d33 | [
"MIT"
] | 1 | 2022-02-06T15:42:09.000Z | 2022-02-06T15:42:09.000Z | C3_Decorator_Pattern/StarBuzzWithSize/Condiments/Mocha.py | sarada92/Design_Pattern | f817206a6f995bc6f534d7cabb3a290955f37d33 | [
"MIT"
] | null | null | null | C3_Decorator_Pattern/StarBuzzWithSize/Condiments/Mocha.py | sarada92/Design_Pattern | f817206a6f995bc6f534d7cabb3a290955f37d33 | [
"MIT"
] | null | null | null | from C3_Decorator_Pattern.StarBuzzWithSize.Beverages.Beverages import Beverages
from C3_Decorator_Pattern.StarBuzzWithSize.Beverages.Size import Size
from C3_Decorator_Pattern.StarBuzzWithSize.Condiments.Condiments import Condiments
class Mocha(Condiments):
def __init__(self, beverage: Beverages):
self.beverage = beverage
def get_description(self):
return self.beverage.get_description() + ', Mocha'
def cost(self):
cost = self.beverage.cost()
size = self.beverage.get_size()
if size == Size.TALL:
print("Mocha cost", cost + 0.5)
cost += 0.5
elif size == Size.GRANDE:
print("Mocha cost", cost + 1.0)
cost += 1
elif size == Size.VENTI:
print("Mocha cost", cost + 2.0)
cost += 2
return cost
| 30.178571 | 82 | 0.631953 | from C3_Decorator_Pattern.StarBuzzWithSize.Beverages.Beverages import Beverages
from C3_Decorator_Pattern.StarBuzzWithSize.Beverages.Size import Size
from C3_Decorator_Pattern.StarBuzzWithSize.Condiments.Condiments import Condiments
class Mocha(Condiments):
def __init__(self, beverage: Beverages):
self.beverage = beverage
def get_description(self):
return self.beverage.get_description() + ', Mocha'
def cost(self):
cost = self.beverage.cost()
size = self.beverage.get_size()
if size == Size.TALL:
print("Mocha cost", cost + 0.5)
cost += 0.5
elif size == Size.GRANDE:
print("Mocha cost", cost + 1.0)
cost += 1
elif size == Size.VENTI:
print("Mocha cost", cost + 2.0)
cost += 2
return cost
| true | true |
f737ade0a9714933c4aad3a91a63848554ae09dd | 1,851 | py | Python | Fusion/modules/Fusion/KHR1/Brains/__init__.py | roadnarrows-robotics/rnr-sdk | aee20c65b49fb3eedf924c5c2ec9f19f4f1a1b29 | [
"MIT"
] | null | null | null | Fusion/modules/Fusion/KHR1/Brains/__init__.py | roadnarrows-robotics/rnr-sdk | aee20c65b49fb3eedf924c5c2ec9f19f4f1a1b29 | [
"MIT"
] | null | null | null | Fusion/modules/Fusion/KHR1/Brains/__init__.py | roadnarrows-robotics/rnr-sdk | aee20c65b49fb3eedf924c5c2ec9f19f4f1a1b29 | [
"MIT"
] | null | null | null | ################################################################################
#
# __init__.py
#
""" KHR-1 Brains Subpackage
Public Modules:
Author: Robin D. Knight
Email: robin.knight@roadnarrowsrobotics.com
URL: http://www.roadnarrowsrobotics.com
Date: 2007.11.28
Copyright (C) 2007. RoadNarrows LLC.
"""
#
# All Rights Reserved
#
# Permission is hereby granted, without written agreement and without
# license or royalty fees, to use, copy, modify, and distribute this
# software and its documentation for any purpose, provided that
# (1) The above copyright notice and the following two paragraphs
# appear in all copies of the source code and (2) redistributions
# including binaries reproduces these notices in the supporting
# documentation. Substantial modifications to this software may be
# copyrighted by their authors and need not follow the licensing terms
# described here, provided that the new terms are clearly indicated in
# all files where they apply.
#
# IN NO EVENT SHALL THE AUTHOR, ROADNARROWS LLC, OR ANY MEMBERS/EMPLOYEES
# OF ROADNARROW LLC OR DISTRIBUTORS OF THIS SOFTWARE BE LIABLE TO ANY
# PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL
# DAMAGES ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
# EVEN IF THE AUTHORS OR ANY OF THE ABOVE PARTIES HAVE BEEN ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHOR AND ROADNARROWS LLC SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN
# "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE NO OBLIGATION TO
# PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
#
################################################################################
__all__ = [
]
| 37.77551 | 80 | 0.709346 | true | true | |
f737b0421678ce0526ad89662eaedf78229a1d21 | 7,911 | py | Python | Submissions/Submission S3.py | tomeberle/Business-analytics | 5051d5a5f096fa451cc5e201f08b4681335c68ae | [
"MIT"
] | null | null | null | Submissions/Submission S3.py | tomeberle/Business-analytics | 5051d5a5f096fa451cc5e201f08b4681335c68ae | [
"MIT"
] | null | null | null | Submissions/Submission S3.py | tomeberle/Business-analytics | 5051d5a5f096fa451cc5e201f08b4681335c68ae | [
"MIT"
] | null | null | null | '''
Exercise 6: Scrap more information about TripAdvisor reviews
url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
Please write a code that prints out
review content,
numeric rating,
title,
date,
reviewer's username
of ALL the 10 reviews on the FIRST page of a particular restaurant
'''
#this is Patrick's comment
import csv
import requests
from bs4 import BeautifulSoup
def scrapecontent(url):
scrape_response = requests.get(url)
print(scrape_response.status_code)
if scrape_response.status_code == 200:
soup = BeautifulSoup(scrape_response.text)
return soup
else:
print('Error accessing url: ', scrape_response.status_code)
return None
def main():
scrape_url = 'https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html'
ret_soup = scrapecontent(scrape_url)
# print(ret_soup.find_all("div", class_="prw_rup prw_reviews_review_resp"))
if ret_soup:
count = 1
for rev_data in ret_soup.find_all("div", class_="prw_rup prw_reviews_review_resp"):
print(rev_data)
print('review number: ', count)
title = rev_data.find('span', class_='noQuotes')
print('title: ', title.text)
review = rev_data.find('p', class_='partial_entry')
print('review content: ', review.text)
rating = rev_data.find('span', class_='ui_bubble_rating')
print('numeric rating: ', int(int(rating['class'][1][7:])/10))
date = rev_data.find('span', class_='ratingDate')
print('date: ', date['title'])
username = rev_data.find('div', class_='info_text pointer_cursor')
print("reviewer's username: ", username.text)
count += 1
print('\n')
main()
'''
Excercise 7: Predict the sentiment (positive, negative, neutral) of review text
url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
for ALL the 10 reviews on the FIRST page of a particular restaurant:
# Using the corpus of word sentiment in the word_sentiment.csv file,
# calculate the sentiment of review texts.*
If the sentiment score is positive, the sentiment is positive;
if the sentiment score is negative, the sentiment is negative;
if the sentiment score is zero, the sentiment is neutral.
'''
SENTIMENT_CSV = "/content/word_sentiment.csv"
NEGATIVE_WORDS = ["not", "don't", "doesn't"]
def word_sentiment(word):
with open(SENTIMENT_CSV, 'rt', encoding='utf-8') as senti_data:
sentiment = csv.reader(senti_data)
for data_row in sentiment:
if data_row[0] == word.lower():
sentiment_val = data_row[1]
return sentiment_val
return 0
def sentiment(sentence):
sentiment = 0
words_list = sentence.split()
for word in words_list:
previous_index = words_list.index(word) - 1
if words_list[previous_index] in NEGATIVE_WORDS:
sentiment = sentiment + -1 * int(word_sentiment(word))
else:
sentiment = sentiment + int(word_sentiment(word))
return sentiment
scrape_url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
response = requests.get(scrape_url)
print(response.status_code)
def review_sentiment():
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
count = 1
print("These are the sentiments of each review: ")
for review in soup.find_all('p', class_='partial_entry'):
pure_review = review.text.lower()
review_sentiment = sentiment(pure_review)
if review_sentiment > 0:
print("The sentiment of review ", count, "is positive")
elif review_sentiment == 0:
print("The sentiment of review ", count, "is neutral")
else:
print("The sentiment of review ", count, "is negative")
count += 1
review_sentiment()
'''
Excercise 8: Predict the sentiment (positive, negative, neutral) of review text
and compare with the ground true (the actual review rating)
url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
for ALL the 10 reviews on the FIRST page of a particular restaurant:
Using the corpus of word sentiment in the word_sentiment.csv file,
calculate the sentiment of review texts as the predicted sentiment:
If the sentiment score is positive, the sentiment is positive;
if the sentiment score is negative, the sentiment is negative;
if the sentiment score is zero, the sentiment is neutral.
Scrap the review rating of the reviews, and get the ground truth
if the rating is greater than 3, the sentiment is positive;
if the rating is less than 3, the sentiment is negative;
if the rating is equal to 3, the sentiment is neutral.
Question: Compute the prediction accuracy (hit rate) for the 10 reviews, i.e.,
how many times the predictions are correct??
'''
SENTIMENT_CSV = "/content/word_sentiment.csv"
NEGATIVE_WORDS = ["not", "don't", "doesn't"]
def word_sentiment(word):
"""This function uses the word_sentiment.csv file to find the sentiment of the word
entered"""
with open(SENTIMENT_CSV, 'rt', encoding='utf-8') as senti_data:
sentiment = csv.reader(senti_data)
for data_row in sentiment:
if data_row[0] == word.lower():
sentiment_val = data_row[1]
return sentiment_val
return 0
def sentiment(sentence):
sentiment = 0
words_list = sentence.split()
for word in words_list:
previous_index = words_list.index(word) - 1
if words_list[previous_index] in NEGATIVE_WORDS:
sentiment = sentiment + -1 * int(word_sentiment(word))
else:
sentiment = sentiment + int(word_sentiment(word))
return sentiment
scrape_url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
response = requests.get(scrape_url)
print(response.status_code)
def accuracy():
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
review_list = []
for review in soup.find_all('p', class_='partial_entry'):
pure_review = review.text.lower()
review_sentiment = sentiment(pure_review)
if review_sentiment > 0:
review_list.append('positive')
elif review_sentiment == 0:
review_list.append('neutral')
else:
review_list.append('negative')
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
rating_list = []
for review in soup.find_all('div', class_='ui_column is-9'):
for rating in review.find_all('span', class_='ui_bubble_rating'):
actual_rating = int(int(rating['class'][1][7:])/10)
if actual_rating > 3:
rating_list.append('positive')
elif actual_rating == 3:
rating_list.append('neutral')
else:
rating_list.append('negative')
matches = len([i for i, j in zip(rating_list, review_list) if i == j])
accuracy = matches / len(rating_list)
print('The prediction accuracy is', accuracy)
accuracy()
| 35.635135 | 163 | 0.667046 |
import csv
import requests
from bs4 import BeautifulSoup
def scrapecontent(url):
scrape_response = requests.get(url)
print(scrape_response.status_code)
if scrape_response.status_code == 200:
soup = BeautifulSoup(scrape_response.text)
return soup
else:
print('Error accessing url: ', scrape_response.status_code)
return None
def main():
scrape_url = 'https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html'
ret_soup = scrapecontent(scrape_url)
# print(ret_soup.find_all("div", class_="prw_rup prw_reviews_review_resp"))
if ret_soup:
count = 1
for rev_data in ret_soup.find_all("div", class_="prw_rup prw_reviews_review_resp"):
print(rev_data)
print('review number: ', count)
title = rev_data.find('span', class_='noQuotes')
print('title: ', title.text)
review = rev_data.find('p', class_='partial_entry')
print('review content: ', review.text)
rating = rev_data.find('span', class_='ui_bubble_rating')
print('numeric rating: ', int(int(rating['class'][1][7:])/10))
date = rev_data.find('span', class_='ratingDate')
print('date: ', date['title'])
username = rev_data.find('div', class_='info_text pointer_cursor')
print("reviewer's username: ", username.text)
count += 1
print('\n')
main()
SENTIMENT_CSV = "/content/word_sentiment.csv"
NEGATIVE_WORDS = ["not", "don't", "doesn't"]
def word_sentiment(word):
with open(SENTIMENT_CSV, 'rt', encoding='utf-8') as senti_data:
sentiment = csv.reader(senti_data)
for data_row in sentiment:
if data_row[0] == word.lower():
sentiment_val = data_row[1]
return sentiment_val
return 0
def sentiment(sentence):
sentiment = 0
words_list = sentence.split()
for word in words_list:
previous_index = words_list.index(word) - 1
if words_list[previous_index] in NEGATIVE_WORDS:
sentiment = sentiment + -1 * int(word_sentiment(word))
else:
sentiment = sentiment + int(word_sentiment(word))
return sentiment
scrape_url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
response = requests.get(scrape_url)
print(response.status_code)
def review_sentiment():
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
count = 1
print("These are the sentiments of each review: ")
for review in soup.find_all('p', class_='partial_entry'):
pure_review = review.text.lower()
review_sentiment = sentiment(pure_review)
if review_sentiment > 0:
print("The sentiment of review ", count, "is positive")
elif review_sentiment == 0:
print("The sentiment of review ", count, "is neutral")
else:
print("The sentiment of review ", count, "is negative")
count += 1
review_sentiment()
SENTIMENT_CSV = "/content/word_sentiment.csv"
NEGATIVE_WORDS = ["not", "don't", "doesn't"]
def word_sentiment(word):
with open(SENTIMENT_CSV, 'rt', encoding='utf-8') as senti_data:
sentiment = csv.reader(senti_data)
for data_row in sentiment:
if data_row[0] == word.lower():
sentiment_val = data_row[1]
return sentiment_val
return 0
def sentiment(sentence):
sentiment = 0
words_list = sentence.split()
for word in words_list:
previous_index = words_list.index(word) - 1
if words_list[previous_index] in NEGATIVE_WORDS:
sentiment = sentiment + -1 * int(word_sentiment(word))
else:
sentiment = sentiment + int(word_sentiment(word))
return sentiment
scrape_url = "https://www.tripadvisor.com/Restaurant_Review-g227613-d3531819-Reviews-Le_Jardin_Napolitain-Jouy_en_Josas_Versailles_Yvelines_Ile_de_France.html"
response = requests.get(scrape_url)
print(response.status_code)
def accuracy():
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
review_list = []
for review in soup.find_all('p', class_='partial_entry'):
pure_review = review.text.lower()
review_sentiment = sentiment(pure_review)
if review_sentiment > 0:
review_list.append('positive')
elif review_sentiment == 0:
review_list.append('neutral')
else:
review_list.append('negative')
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
rating_list = []
for review in soup.find_all('div', class_='ui_column is-9'):
for rating in review.find_all('span', class_='ui_bubble_rating'):
actual_rating = int(int(rating['class'][1][7:])/10)
if actual_rating > 3:
rating_list.append('positive')
elif actual_rating == 3:
rating_list.append('neutral')
else:
rating_list.append('negative')
matches = len([i for i, j in zip(rating_list, review_list) if i == j])
accuracy = matches / len(rating_list)
print('The prediction accuracy is', accuracy)
accuracy()
| true | true |
f737b1c85cd2bef4bff4f7967cf88e8033a6f031 | 541 | py | Python | src/models/workspace.py | Mtortolani/slack-backend | 11b1650c111eb163a8ef3bf75a33fb4aeeccf300 | [
"MIT"
] | null | null | null | src/models/workspace.py | Mtortolani/slack-backend | 11b1650c111eb163a8ef3bf75a33fb4aeeccf300 | [
"MIT"
] | null | null | null | src/models/workspace.py | Mtortolani/slack-backend | 11b1650c111eb163a8ef3bf75a33fb4aeeccf300 | [
"MIT"
] | null | null | null | class Workspace:
def __init__(self, name: str = None):
#PK
self.name = name
self.members = []
self.member_ids= []
self.roles = {} #{owners:[user1], administrators:[user5, user8], etc}
self.channels = []
#name
def getName(self):
return self.name
def setName(self, name: str):
self.name = name
#members
def getMembers(self):
return self.members
def addMembers(self, user_id: int):
self.members.add(user_id)
| 23.521739 | 77 | 0.539741 | class Workspace:
def __init__(self, name: str = None):
self.name = name
self.members = []
self.member_ids= []
self.roles = {}
self.channels = []
def getName(self):
return self.name
def setName(self, name: str):
self.name = name
def getMembers(self):
return self.members
def addMembers(self, user_id: int):
self.members.add(user_id)
| true | true |
f737b23216d8b6b6784f9aaad24bb8f9007c3315 | 11,315 | py | Python | tests/core/test_fileio.py | madhukarkm/NeMo | 648c97f076147684bee6aaada209f2f20adcaf5d | [
"Apache-2.0"
] | 4,145 | 2019-09-13T08:29:43.000Z | 2022-03-31T18:31:44.000Z | tests/core/test_fileio.py | madhukarkm/NeMo | 648c97f076147684bee6aaada209f2f20adcaf5d | [
"Apache-2.0"
] | 2,031 | 2019-09-17T16:51:39.000Z | 2022-03-31T23:52:41.000Z | tests/core/test_fileio.py | madhukarkm/NeMo | 648c97f076147684bee6aaada209f2f20adcaf5d | [
"Apache-2.0"
] | 1,041 | 2019-09-13T10:08:21.000Z | 2022-03-30T06:37:38.000Z | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import numpy as np
import pytest
import torch
from omegaconf import DictConfig, OmegaConf
from nemo.collections.asr.models import EncDecCTCModel
try:
from eff.cookbooks import NeMoCookbook
_EFF_PRESENT_ = True
except ImportError:
_EFF_PRESENT_ = False
# A decorator marking the EFF requirement.
requires_eff = pytest.mark.skipif(not _EFF_PRESENT_, reason="Export File Format library required to run test")
@pytest.fixture()
def asr_model():
preprocessor = {'cls': 'nemo.collections.asr.modules.AudioToMelSpectrogramPreprocessor', 'params': dict({})}
encoder = {
'cls': 'nemo.collections.asr.modules.ConvASREncoder',
'params': {
'feat_in': 64,
'activation': 'relu',
'conv_mask': True,
'jasper': [
{
'filters': 1024,
'repeat': 1,
'kernel': [1],
'stride': [1],
'dilation': [1],
'dropout': 0.0,
'residual': False,
'separable': True,
'se': True,
'se_context_size': -1,
}
],
},
}
decoder = {
'cls': 'nemo.collections.asr.modules.ConvASRDecoder',
'params': {
'feat_in': 1024,
'num_classes': 28,
'vocabulary': [
' ',
'a',
'b',
'c',
'd',
'e',
'f',
'g',
'h',
'i',
'j',
'k',
'l',
'm',
'n',
'o',
'p',
'q',
'r',
's',
't',
'u',
'v',
'w',
'x',
'y',
'z',
"'",
],
},
}
modelConfig = DictConfig(
{'preprocessor': DictConfig(preprocessor), 'encoder': DictConfig(encoder), 'decoder': DictConfig(decoder)}
)
model_instance = EncDecCTCModel(cfg=modelConfig)
return model_instance
class TestFileIO:
@pytest.mark.unit
def test_to_from_config_file(self, asr_model):
"""" Test makes sure that the second instance created with the same configuration (BUT NOT checkpoint)
has different weights. """
with tempfile.NamedTemporaryFile() as fp:
yaml_filename = fp.name
asr_model.to_config_file(path2yaml_file=yaml_filename)
next_instance = EncDecCTCModel.from_config_file(path2yaml_file=yaml_filename)
assert isinstance(next_instance, EncDecCTCModel)
assert len(next_instance.decoder.vocabulary) == 28
assert asr_model.num_weights == next_instance.num_weights
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = next_instance.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert not np.array_equal(w1, w2)
@pytest.mark.unit
def test_save_restore_from_nemo_file(self, asr_model):
"""" Test makes sure that the second instance created from the same configuration AND checkpoint
has the same weights. """
with tempfile.NamedTemporaryFile() as fp:
filename = fp.name
# Save model (with random artifact).
with tempfile.NamedTemporaryFile() as artifact:
asr_model.register_artifact(config_path="abc", src=artifact.name)
asr_model.save_to(save_path=filename)
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=filename)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
@requires_eff
@pytest.mark.unit
def test_eff_save_restore_from_nemo_file_encrypted(self, asr_model):
"""" Test makes sure that after encrypted save-restore the model has the same weights. """
with tempfile.NamedTemporaryFile() as fp:
filename = fp.name
# Set key - use checkpoint encryption.
NeMoCookbook.set_encryption_key("test_key")
# Save model (with random artifact).
with tempfile.NamedTemporaryFile() as artifact:
asr_model.register_artifact(config_path="abc", src=artifact.name)
asr_model.save_to(save_path=filename)
# Try to restore the encrypted archive (weights) without the encryption key.
NeMoCookbook.set_encryption_key(None)
with pytest.raises(PermissionError):
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=filename)
# Restore the model.
NeMoCookbook.set_encryption_key("test_key")
asr_model3 = EncDecCTCModel.restore_from(restore_path=filename)
# Reset encryption so it won't mess up with other save/restore.
NeMoCookbook.set_encryption_key(None)
assert asr_model.num_weights == asr_model3.num_weights
@pytest.mark.unit
def test_save_restore_from_nemo_file_with_override(self, asr_model, tmpdir):
"""" Test makes sure that the second instance created from the same configuration AND checkpoint
has the same weights.
Args:
tmpdir: fixture providing a temporary directory unique to the test invocation.
"""
# Name of the archive in tmp folder.
filename = os.path.join(tmpdir, "eff.nemo")
# Get path where the command is executed - the artifacts will be "retrieved" there.
# (original .nemo behavior)
cwd = os.getcwd()
with tempfile.NamedTemporaryFile(mode='a+') as conf_fp:
# Create a "random artifact".
with tempfile.NamedTemporaryFile(mode="w", delete=False) as artifact:
artifact.write("magic content 42")
# Remember the filename of the artifact.
_, artifact_filename = os.path.split(artifact.name)
# Add artifact to model.
asr_model.register_artifact(config_path="abc", src=artifact.name)
# Save model (with "random artifact").
asr_model.save_to(save_path=filename)
# Modify config slightly
cfg = asr_model.cfg
cfg.encoder.activation = 'swish'
yaml_cfg = OmegaConf.to_yaml(cfg)
conf_fp.write(yaml_cfg)
conf_fp.seek(0)
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=filename, override_config_path=conf_fp.name)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
assert asr_model2.cfg.encoder.activation == 'swish'
@pytest.mark.unit
def test_save_model_level_pt_ckpt(self, asr_model):
with tempfile.TemporaryDirectory() as ckpt_dir:
nemo_file = os.path.join(ckpt_dir, 'asr.nemo')
asr_model.save_to(nemo_file)
# Save model level PT checkpoint
asr_model.extract_state_dict_from(nemo_file, ckpt_dir)
ckpt_path = os.path.join(ckpt_dir, 'model_weights.ckpt')
assert os.path.exists(ckpt_path)
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=nemo_file)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
# Change weights values
asr_model2.encoder.encoder[0].mconv[0].conv.weight.data += 1.0
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert not np.array_equal(w1, w2)
# Restore from checkpoint
asr_model2.load_state_dict(torch.load(ckpt_path))
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
@pytest.mark.unit
def test_save_module_level_pt_ckpt(self, asr_model):
with tempfile.TemporaryDirectory() as ckpt_dir:
nemo_file = os.path.join(ckpt_dir, 'asr.nemo')
asr_model.save_to(nemo_file)
# Save model level PT checkpoint
asr_model.extract_state_dict_from(nemo_file, ckpt_dir, split_by_module=True)
encoder_path = os.path.join(ckpt_dir, 'encoder.ckpt')
decoder_path = os.path.join(ckpt_dir, 'decoder.ckpt')
preprocessor_path = os.path.join(ckpt_dir, 'preprocessor.ckpt')
assert os.path.exists(encoder_path)
assert os.path.exists(decoder_path)
assert os.path.exists(preprocessor_path)
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=nemo_file)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
# Change weights values
asr_model2.encoder.encoder[0].mconv[0].conv.weight.data += 1.0
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert not np.array_equal(w1, w2)
# Restore from checkpoint
asr_model2.encoder.load_state_dict(torch.load(encoder_path))
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
| 37.716667 | 114 | 0.60601 |
import os
import tempfile
import numpy as np
import pytest
import torch
from omegaconf import DictConfig, OmegaConf
from nemo.collections.asr.models import EncDecCTCModel
try:
from eff.cookbooks import NeMoCookbook
_EFF_PRESENT_ = True
except ImportError:
_EFF_PRESENT_ = False
requires_eff = pytest.mark.skipif(not _EFF_PRESENT_, reason="Export File Format library required to run test")
@pytest.fixture()
def asr_model():
preprocessor = {'cls': 'nemo.collections.asr.modules.AudioToMelSpectrogramPreprocessor', 'params': dict({})}
encoder = {
'cls': 'nemo.collections.asr.modules.ConvASREncoder',
'params': {
'feat_in': 64,
'activation': 'relu',
'conv_mask': True,
'jasper': [
{
'filters': 1024,
'repeat': 1,
'kernel': [1],
'stride': [1],
'dilation': [1],
'dropout': 0.0,
'residual': False,
'separable': True,
'se': True,
'se_context_size': -1,
}
],
},
}
decoder = {
'cls': 'nemo.collections.asr.modules.ConvASRDecoder',
'params': {
'feat_in': 1024,
'num_classes': 28,
'vocabulary': [
' ',
'a',
'b',
'c',
'd',
'e',
'f',
'g',
'h',
'i',
'j',
'k',
'l',
'm',
'n',
'o',
'p',
'q',
'r',
's',
't',
'u',
'v',
'w',
'x',
'y',
'z',
"'",
],
},
}
modelConfig = DictConfig(
{'preprocessor': DictConfig(preprocessor), 'encoder': DictConfig(encoder), 'decoder': DictConfig(decoder)}
)
model_instance = EncDecCTCModel(cfg=modelConfig)
return model_instance
class TestFileIO:
@pytest.mark.unit
def test_to_from_config_file(self, asr_model):
with tempfile.NamedTemporaryFile() as fp:
yaml_filename = fp.name
asr_model.to_config_file(path2yaml_file=yaml_filename)
next_instance = EncDecCTCModel.from_config_file(path2yaml_file=yaml_filename)
assert isinstance(next_instance, EncDecCTCModel)
assert len(next_instance.decoder.vocabulary) == 28
assert asr_model.num_weights == next_instance.num_weights
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = next_instance.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert not np.array_equal(w1, w2)
@pytest.mark.unit
def test_save_restore_from_nemo_file(self, asr_model):
with tempfile.NamedTemporaryFile() as fp:
filename = fp.name
# Save model (with random artifact).
with tempfile.NamedTemporaryFile() as artifact:
asr_model.register_artifact(config_path="abc", src=artifact.name)
asr_model.save_to(save_path=filename)
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=filename)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
@requires_eff
@pytest.mark.unit
def test_eff_save_restore_from_nemo_file_encrypted(self, asr_model):
with tempfile.NamedTemporaryFile() as fp:
filename = fp.name
# Set key - use checkpoint encryption.
NeMoCookbook.set_encryption_key("test_key")
# Save model (with random artifact).
with tempfile.NamedTemporaryFile() as artifact:
asr_model.register_artifact(config_path="abc", src=artifact.name)
asr_model.save_to(save_path=filename)
# Try to restore the encrypted archive (weights) without the encryption key.
NeMoCookbook.set_encryption_key(None)
with pytest.raises(PermissionError):
# Restore the model.
asr_model2 = EncDecCTCModel.restore_from(restore_path=filename)
# Restore the model.
NeMoCookbook.set_encryption_key("test_key")
asr_model3 = EncDecCTCModel.restore_from(restore_path=filename)
# Reset encryption so it won't mess up with other save/restore.
NeMoCookbook.set_encryption_key(None)
assert asr_model.num_weights == asr_model3.num_weights
@pytest.mark.unit
def test_save_restore_from_nemo_file_with_override(self, asr_model, tmpdir):
filename = os.path.join(tmpdir, "eff.nemo")
cwd = os.getcwd()
with tempfile.NamedTemporaryFile(mode='a+') as conf_fp:
with tempfile.NamedTemporaryFile(mode="w", delete=False) as artifact:
artifact.write("magic content 42")
_, artifact_filename = os.path.split(artifact.name)
asr_model.register_artifact(config_path="abc", src=artifact.name)
asr_model.save_to(save_path=filename)
cfg = asr_model.cfg
cfg.encoder.activation = 'swish'
yaml_cfg = OmegaConf.to_yaml(cfg)
conf_fp.write(yaml_cfg)
conf_fp.seek(0)
asr_model2 = EncDecCTCModel.restore_from(restore_path=filename, override_config_path=conf_fp.name)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
assert asr_model2.cfg.encoder.activation == 'swish'
@pytest.mark.unit
def test_save_model_level_pt_ckpt(self, asr_model):
with tempfile.TemporaryDirectory() as ckpt_dir:
nemo_file = os.path.join(ckpt_dir, 'asr.nemo')
asr_model.save_to(nemo_file)
asr_model.extract_state_dict_from(nemo_file, ckpt_dir)
ckpt_path = os.path.join(ckpt_dir, 'model_weights.ckpt')
assert os.path.exists(ckpt_path)
asr_model2 = EncDecCTCModel.restore_from(restore_path=nemo_file)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
asr_model2.encoder.encoder[0].mconv[0].conv.weight.data += 1.0
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert not np.array_equal(w1, w2)
asr_model2.load_state_dict(torch.load(ckpt_path))
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
@pytest.mark.unit
def test_save_module_level_pt_ckpt(self, asr_model):
with tempfile.TemporaryDirectory() as ckpt_dir:
nemo_file = os.path.join(ckpt_dir, 'asr.nemo')
asr_model.save_to(nemo_file)
asr_model.extract_state_dict_from(nemo_file, ckpt_dir, split_by_module=True)
encoder_path = os.path.join(ckpt_dir, 'encoder.ckpt')
decoder_path = os.path.join(ckpt_dir, 'decoder.ckpt')
preprocessor_path = os.path.join(ckpt_dir, 'preprocessor.ckpt')
assert os.path.exists(encoder_path)
assert os.path.exists(decoder_path)
assert os.path.exists(preprocessor_path)
asr_model2 = EncDecCTCModel.restore_from(restore_path=nemo_file)
assert len(asr_model.decoder.vocabulary) == len(asr_model2.decoder.vocabulary)
assert asr_model.num_weights == asr_model2.num_weights
asr_model2.encoder.encoder[0].mconv[0].conv.weight.data += 1.0
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert not np.array_equal(w1, w2)
asr_model2.encoder.load_state_dict(torch.load(encoder_path))
w1 = asr_model.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
w2 = asr_model2.encoder.encoder[0].mconv[0].conv.weight.data.detach().cpu().numpy()
assert np.array_equal(w1, w2)
| true | true |
f737b3caeee0ca40e8c1f398ae9b0c41b9ee4c20 | 9,682 | py | Python | google/cloud/bigquery/reservation_v1/services/reservation_service/pagers.py | shollyman/python-bigquery-reservation | f65f8c2d3f1e7a259f2a474e7b6895a27a25c22a | [
"Apache-2.0"
] | null | null | null | google/cloud/bigquery/reservation_v1/services/reservation_service/pagers.py | shollyman/python-bigquery-reservation | f65f8c2d3f1e7a259f2a474e7b6895a27a25c22a | [
"Apache-2.0"
] | null | null | null | google/cloud/bigquery/reservation_v1/services/reservation_service/pagers.py | shollyman/python-bigquery-reservation | f65f8c2d3f1e7a259f2a474e7b6895a27a25c22a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, Callable, Iterable
from google.cloud.bigquery.reservation_v1.types import reservation
class ListReservationsPager:
"""A pager for iterating through ``list_reservations`` requests.
This class thinly wraps an initial
:class:`~.reservation.ListReservationsResponse` object, and
provides an ``__iter__`` method to iterate through its
``reservations`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListReservations`` requests and continue to iterate
through the ``reservations`` field on the
corresponding responses.
All the usual :class:`~.reservation.ListReservationsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
[reservation.ListReservationsRequest], reservation.ListReservationsResponse
],
request: reservation.ListReservationsRequest,
response: reservation.ListReservationsResponse,
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (:class:`~.reservation.ListReservationsRequest`):
The initial request object.
response (:class:`~.reservation.ListReservationsResponse`):
The initial response object.
"""
self._method = method
self._request = reservation.ListReservationsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.ListReservationsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.Reservation]:
for page in self.pages:
yield from page.reservations
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCapacityCommitmentsPager:
"""A pager for iterating through ``list_capacity_commitments`` requests.
This class thinly wraps an initial
:class:`~.reservation.ListCapacityCommitmentsResponse` object, and
provides an ``__iter__`` method to iterate through its
``capacity_commitments`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListCapacityCommitments`` requests and continue to iterate
through the ``capacity_commitments`` field on the
corresponding responses.
All the usual :class:`~.reservation.ListCapacityCommitmentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
[reservation.ListCapacityCommitmentsRequest],
reservation.ListCapacityCommitmentsResponse,
],
request: reservation.ListCapacityCommitmentsRequest,
response: reservation.ListCapacityCommitmentsResponse,
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (:class:`~.reservation.ListCapacityCommitmentsRequest`):
The initial request object.
response (:class:`~.reservation.ListCapacityCommitmentsResponse`):
The initial response object.
"""
self._method = method
self._request = reservation.ListCapacityCommitmentsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.ListCapacityCommitmentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.CapacityCommitment]:
for page in self.pages:
yield from page.capacity_commitments
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListAssignmentsPager:
"""A pager for iterating through ``list_assignments`` requests.
This class thinly wraps an initial
:class:`~.reservation.ListAssignmentsResponse` object, and
provides an ``__iter__`` method to iterate through its
``assignments`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListAssignments`` requests and continue to iterate
through the ``assignments`` field on the
corresponding responses.
All the usual :class:`~.reservation.ListAssignmentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
[reservation.ListAssignmentsRequest], reservation.ListAssignmentsResponse
],
request: reservation.ListAssignmentsRequest,
response: reservation.ListAssignmentsResponse,
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (:class:`~.reservation.ListAssignmentsRequest`):
The initial request object.
response (:class:`~.reservation.ListAssignmentsResponse`):
The initial response object.
"""
self._method = method
self._request = reservation.ListAssignmentsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.ListAssignmentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.Assignment]:
for page in self.pages:
yield from page.assignments
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class SearchAssignmentsPager:
"""A pager for iterating through ``search_assignments`` requests.
This class thinly wraps an initial
:class:`~.reservation.SearchAssignmentsResponse` object, and
provides an ``__iter__`` method to iterate through its
``assignments`` field.
If there are more pages, the ``__iter__`` method will make additional
``SearchAssignments`` requests and continue to iterate
through the ``assignments`` field on the
corresponding responses.
All the usual :class:`~.reservation.SearchAssignmentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
[reservation.SearchAssignmentsRequest],
reservation.SearchAssignmentsResponse,
],
request: reservation.SearchAssignmentsRequest,
response: reservation.SearchAssignmentsResponse,
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (:class:`~.reservation.SearchAssignmentsRequest`):
The initial request object.
response (:class:`~.reservation.SearchAssignmentsResponse`):
The initial response object.
"""
self._method = method
self._request = reservation.SearchAssignmentsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.SearchAssignmentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.Assignment]:
for page in self.pages:
yield from page.assignments
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| 37.382239 | 87 | 0.679715 |
from typing import Any, Callable, Iterable
from google.cloud.bigquery.reservation_v1.types import reservation
class ListReservationsPager:
def __init__(
self,
method: Callable[
[reservation.ListReservationsRequest], reservation.ListReservationsResponse
],
request: reservation.ListReservationsRequest,
response: reservation.ListReservationsResponse,
):
self._method = method
self._request = reservation.ListReservationsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.ListReservationsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.Reservation]:
for page in self.pages:
yield from page.reservations
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListCapacityCommitmentsPager:
def __init__(
self,
method: Callable[
[reservation.ListCapacityCommitmentsRequest],
reservation.ListCapacityCommitmentsResponse,
],
request: reservation.ListCapacityCommitmentsRequest,
response: reservation.ListCapacityCommitmentsResponse,
):
self._method = method
self._request = reservation.ListCapacityCommitmentsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.ListCapacityCommitmentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.CapacityCommitment]:
for page in self.pages:
yield from page.capacity_commitments
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListAssignmentsPager:
def __init__(
self,
method: Callable[
[reservation.ListAssignmentsRequest], reservation.ListAssignmentsResponse
],
request: reservation.ListAssignmentsRequest,
response: reservation.ListAssignmentsResponse,
):
self._method = method
self._request = reservation.ListAssignmentsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.ListAssignmentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.Assignment]:
for page in self.pages:
yield from page.assignments
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class SearchAssignmentsPager:
def __init__(
self,
method: Callable[
[reservation.SearchAssignmentsRequest],
reservation.SearchAssignmentsResponse,
],
request: reservation.SearchAssignmentsRequest,
response: reservation.SearchAssignmentsResponse,
):
self._method = method
self._request = reservation.SearchAssignmentsRequest(request)
self._response = response
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[reservation.SearchAssignmentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request)
yield self._response
def __iter__(self) -> Iterable[reservation.Assignment]:
for page in self.pages:
yield from page.assignments
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
| true | true |
f737b7c98d2bcbb0c3045657d24aa21a13df6ece | 2,194 | py | Python | huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/create_binding_device_response.py | githubmilesma/huaweicloud-sdk-python-v3 | 9d9449ed68a609ca65f0aa50b5b2a1c28445bf03 | [
"Apache-2.0"
] | 1 | 2021-04-16T07:59:28.000Z | 2021-04-16T07:59:28.000Z | huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/create_binding_device_response.py | Lencof/huaweicloud-sdk-python-v3 | d13dc4e2830a83e295be6e4de021999b3376e34e | [
"Apache-2.0"
] | null | null | null | huaweicloud-sdk-iam/huaweicloudsdkiam/v3/model/create_binding_device_response.py | Lencof/huaweicloud-sdk-python-v3 | d13dc4e2830a83e295be6e4de021999b3376e34e | [
"Apache-2.0"
] | 1 | 2022-01-17T02:24:18.000Z | 2022-01-17T02:24:18.000Z | # coding: utf-8
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class CreateBindingDeviceResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""CreateBindingDeviceResponse - a model defined in huaweicloud sdk"""
super().__init__()
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateBindingDeviceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.756098 | 78 | 0.536007 |
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class CreateBindingDeviceResponse(SdkResponse):
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
super().__init__()
self.discriminator = None
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, CreateBindingDeviceResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f737b8065f4c2bf05f757290c8a3988ddb4eac32 | 3,071 | py | Python | sample-demo/venv/Lib/site-packages/rx/core/operators/merge_scan.py | rupc/bsp-protos | 58833e7ab9ff53f3633708fb5f95edfdd152c5ea | [
"Apache-2.0"
] | null | null | null | sample-demo/venv/Lib/site-packages/rx/core/operators/merge_scan.py | rupc/bsp-protos | 58833e7ab9ff53f3633708fb5f95edfdd152c5ea | [
"Apache-2.0"
] | null | null | null | sample-demo/venv/Lib/site-packages/rx/core/operators/merge_scan.py | rupc/bsp-protos | 58833e7ab9ff53f3633708fb5f95edfdd152c5ea | [
"Apache-2.0"
] | null | null | null | from typing import Any, Callable
from rx import defer, from_future, of
from rx.core import Observable
from rx.core.typing import Accumulator
from rx.disposable import CompositeDisposable, SingleAssignmentDisposable
from rx.internal.concurrency import synchronized
from rx.internal.utils import NotSet, is_future
def _merge_scan(accumulator: Accumulator, seed: Any = NotSet) -> Callable[[Observable], Observable]:
def merge_scan(source: Observable) -> Observable:
"""Partially applied merge_scan operator.
Applies an accumulator function, which returns an observable sequence,
over an observable sequence and returns each intermediate result.
Examples:
>>> scanned = merge_scan(source)
Args:
source: The observable source to scan.
Returns:
An observable sequence containing the accumulated values.
"""
def subscribe(observer, scheduler=None):
accumulator_value = [seed]
active = [False]
group = CompositeDisposable()
is_stopped = [False]
queue = []
def subscribe(xs):
subscription = SingleAssignmentDisposable()
group.add(subscription)
@synchronized(source.lock)
def on_next(next_accumulator_value):
accumulator_value[0] = next_accumulator_value
observer.on_next(next_accumulator_value)
@synchronized(source.lock)
def on_completed():
group.remove(subscription)
if queue:
s = queue.pop(0)
subscribe(s)
else:
active[0] = False
if is_stopped[0]:
observer.on_completed()
on_error = synchronized(source.lock)(observer.on_error)
subscription.disposable = xs.subscribe_(on_next, on_error, on_completed, scheduler)
def on_next(value):
def accumulate():
has_accumulator_value = accumulator_value[0] is not NotSet
if has_accumulator_value:
acc_source = accumulator(accumulator_value[0], value)
return from_future(acc_source) if is_future(acc_source) else acc_source
else:
return of(value)
accumulator_source = defer(lambda _: accumulate())
if not active[0]:
active[0] = True
subscribe(accumulator_source)
else:
queue.append(accumulator_source)
def on_completed():
is_stopped[0] = True
if not active[0]:
observer.on_completed()
group.add(source.subscribe_(on_next, observer.on_error, on_completed, scheduler))
return group
return Observable(subscribe)
return merge_scan
| 36.559524 | 100 | 0.569196 | from typing import Any, Callable
from rx import defer, from_future, of
from rx.core import Observable
from rx.core.typing import Accumulator
from rx.disposable import CompositeDisposable, SingleAssignmentDisposable
from rx.internal.concurrency import synchronized
from rx.internal.utils import NotSet, is_future
def _merge_scan(accumulator: Accumulator, seed: Any = NotSet) -> Callable[[Observable], Observable]:
def merge_scan(source: Observable) -> Observable:
def subscribe(observer, scheduler=None):
accumulator_value = [seed]
active = [False]
group = CompositeDisposable()
is_stopped = [False]
queue = []
def subscribe(xs):
subscription = SingleAssignmentDisposable()
group.add(subscription)
@synchronized(source.lock)
def on_next(next_accumulator_value):
accumulator_value[0] = next_accumulator_value
observer.on_next(next_accumulator_value)
@synchronized(source.lock)
def on_completed():
group.remove(subscription)
if queue:
s = queue.pop(0)
subscribe(s)
else:
active[0] = False
if is_stopped[0]:
observer.on_completed()
on_error = synchronized(source.lock)(observer.on_error)
subscription.disposable = xs.subscribe_(on_next, on_error, on_completed, scheduler)
def on_next(value):
def accumulate():
has_accumulator_value = accumulator_value[0] is not NotSet
if has_accumulator_value:
acc_source = accumulator(accumulator_value[0], value)
return from_future(acc_source) if is_future(acc_source) else acc_source
else:
return of(value)
accumulator_source = defer(lambda _: accumulate())
if not active[0]:
active[0] = True
subscribe(accumulator_source)
else:
queue.append(accumulator_source)
def on_completed():
is_stopped[0] = True
if not active[0]:
observer.on_completed()
group.add(source.subscribe_(on_next, observer.on_error, on_completed, scheduler))
return group
return Observable(subscribe)
return merge_scan
| true | true |
f737b98756fe61b6b9a43df10824c321be9da7d3 | 28,010 | py | Python | discord/channel.py | Werseter/discord.py | 00a659c6526b2445162b52eaf970adbd22c6d35d | [
"MIT"
] | null | null | null | discord/channel.py | Werseter/discord.py | 00a659c6526b2445162b52eaf970adbd22c6d35d | [
"MIT"
] | null | null | null | discord/channel.py | Werseter/discord.py | 00a659c6526b2445162b52eaf970adbd22c6d35d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2017 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from .permissions import Permissions
from .enums import ChannelType, try_enum
from .mixins import Hashable
from . import utils
from .errors import ClientException, NoMoreItems
from .webhook import Webhook
import discord.abc
import time
import asyncio
__all__ = ['TextChannel', 'VoiceChannel', 'DMChannel', 'CategoryChannel', 'GroupChannel', '_channel_factory']
async def _single_delete_strategy(messages):
for m in messages:
await m.delete()
class TextChannel(discord.abc.Messageable, discord.abc.GuildChannel, Hashable):
"""Represents a Discord guild text channel.
.. container:: operations
.. describe:: x == y
Checks if two channels are equal.
.. describe:: x != y
Checks if two channels are not equal.
.. describe:: hash(x)
Returns the channel's hash.
.. describe:: str(x)
Returns the channel's name.
Attributes
-----------
name: :class:`str`
The channel name.
guild: :class:`Guild`
The guild the channel belongs to.
id: :class:`int`
The channel ID.
category_id: :class:`int`
The category channel ID this channel belongs to.
topic: Optional[:class:`str`]
The channel's topic. None if it doesn't exist.
position: :class:`int`
The position in the channel list. This is a number that starts at 0. e.g. the
top channel is position 0.
"""
__slots__ = ('name', 'id', 'guild', 'topic', '_state', 'nsfw',
'category_id', 'position', '_overwrites')
def __init__(self, *, state, guild, data):
self._state = state
self.id = int(data['id'])
self._update(guild, data)
def __repr__(self):
return '<TextChannel id={0.id} name={0.name!r} position={0.position}>'.format(self)
def _update(self, guild, data):
self.guild = guild
self.name = data['name']
self.category_id = utils._get_as_snowflake(data, 'parent_id')
self.topic = data.get('topic')
self.position = data['position']
self.nsfw = data.get('nsfw', False)
self._fill_overwrites(data)
async def _get_channel(self):
return self
def permissions_for(self, member):
base = super().permissions_for(member)
# text channels do not have voice related permissions
denied = Permissions.voice()
base.value &= ~denied.value
return base
permissions_for.__doc__ = discord.abc.GuildChannel.permissions_for.__doc__
@property
def members(self):
"""Returns a :class:`list` of :class:`Member` that can see this channel."""
return [m for m in self.guild.members if self.permissions_for(m).read_messages]
def is_nsfw(self):
"""Checks if the channel is NSFW."""
n = self.name
return self.nsfw or n == 'nsfw' or n[:5] == 'nsfw-'
async def edit(self, *, reason=None, **options):
"""|coro|
Edits the channel.
You must have the :attr:`~Permissions.manage_channels` permission to
use this.
Parameters
----------
name: str
The new channel name.
topic: str
The new channel's topic.
position: int
The new channel's position.
nsfw: bool
To mark the channel as NSFW or not.
sync_permissions: bool
Whether to sync permissions with the channel's new or pre-existing
category. Defaults to ``False``.
category: Optional[:class:`CategoryChannel`]
The new category for this channel. Can be ``None`` to remove the
category.
reason: Optional[str]
The reason for editing this channel. Shows up on the audit log.
Raises
------
InvalidArgument
If position is less than 0 or greater than the number of channels.
Forbidden
You do not have permissions to edit the channel.
HTTPException
Editing the channel failed.
"""
await self._edit(options, reason=reason)
async def delete_messages(self, messages):
"""|coro|
Deletes a list of messages. This is similar to :meth:`Message.delete`
except it bulk deletes multiple messages.
As a special case, if the number of messages is 0, then nothing
is done. If the number of messages is 1 then single message
delete is done. If it's more than two, then bulk delete is used.
You cannot bulk delete more than 100 messages or messages that
are older than 14 days old.
You must have the :attr:`~Permissions.manage_messages` permission to
use this.
Usable only by bot accounts.
Parameters
-----------
messages: Iterable[:class:`abc.Snowflake`]
An iterable of messages denoting which ones to bulk delete.
Raises
------
ClientException
The number of messages to delete was more than 100.
Forbidden
You do not have proper permissions to delete the messages or
you're not using a bot account.
HTTPException
Deleting the messages failed.
"""
if not isinstance(messages, (list, tuple)):
messages = list(messages)
if len(messages) == 0:
return # do nothing
if len(messages) == 1:
message_id = messages[0].id
await self._state.http.delete_message(self.id, message_id)
return
if len(messages) > 100:
raise ClientException('Can only bulk delete messages up to 100 messages')
message_ids = [m.id for m in messages]
await self._state.http.delete_messages(self.id, message_ids)
async def purge(self, *, limit=100, check=None, before=None, after=None, around=None, reverse=False, bulk=True):
"""|coro|
Purges a list of messages that meet the criteria given by the predicate
``check``. If a ``check`` is not provided then all messages are deleted
without discrimination.
You must have the :attr:`~Permissions.manage_messages` permission to
delete messages even if they are your own (unless you are a user
account). The :attr:`~Permissions.read_message_history` permission is
also needed to retrieve message history.
Internally, this employs a different number of strategies depending
on the conditions met such as if a bulk delete is possible or if
the account is a user bot or not.
Parameters
-----------
limit: int
The number of messages to search through. This is not the number
of messages that will be deleted, though it can be.
check: predicate
The function used to check if a message should be deleted.
It must take a :class:`Message` as its sole parameter.
before
Same as ``before`` in :meth:`history`.
after
Same as ``after`` in :meth:`history`.
around
Same as ``around`` in :meth:`history`.
reverse
Same as ``reverse`` in :meth:`history`.
bulk: bool
If True, use bulk delete. bulk=False is useful for mass-deleting
a bot's own messages without manage_messages. When True, will fall
back to single delete if current account is a user bot, or if
messages are older than two weeks.
Raises
-------
Forbidden
You do not have proper permissions to do the actions required.
HTTPException
Purging the messages failed.
Examples
---------
Deleting bot's messages ::
def is_me(m):
return m.author == client.user
deleted = await channel.purge(limit=100, check=is_me)
await channel.send('Deleted {} message(s)'.format(len(deleted)))
Returns
--------
list
The list of messages that were deleted.
"""
if check is None:
check = lambda m: True
iterator = self.history(limit=limit, before=before, after=after, reverse=reverse, around=around)
ret = []
count = 0
minimum_time = int((time.time() - 14 * 24 * 60 * 60) * 1000.0 - 1420070400000) << 22
strategy = self.delete_messages if self._state.is_bot and bulk else _single_delete_strategy
while True:
try:
msg = await iterator.next()
except NoMoreItems:
# no more messages to poll
if count >= 2:
# more than 2 messages -> bulk delete
to_delete = ret[-count:]
await strategy(to_delete)
elif count == 1:
# delete a single message
await ret[-1].delete()
return ret
else:
if count == 100:
# we've reached a full 'queue'
to_delete = ret[-100:]
await strategy(to_delete)
count = 0
await asyncio.sleep(1)
if check(msg):
if msg.id < minimum_time:
# older than 14 days old
if count == 1:
await ret[-1].delete()
elif count >= 2:
to_delete = ret[-count:]
await strategy(to_delete)
count = 0
strategy = _single_delete_strategy
count += 1
ret.append(msg)
async def webhooks(self):
"""|coro|
Gets the list of webhooks from this channel.
Requires :attr:`~.Permissions.manage_webhooks` permissions.
Raises
-------
Forbidden
You don't have permissions to get the webhooks.
Returns
--------
List[:class:`Webhook`]
The webhooks for this channel.
"""
data = await self._state.http.channel_webhooks(self.id)
return [Webhook.from_state(d, state=self._state) for d in data]
async def create_webhook(self, *, name, avatar=None):
"""|coro|
Creates a webhook for this channel.
Requires :attr:`~.Permissions.manage_webhooks` permissions.
Parameters
-------------
name: str
The webhook's name.
avatar: Optional[bytes]
A *bytes-like* object representing the webhook's default avatar.
This operates similarly to :meth:`~ClientUser.edit`.
Raises
-------
HTTPException
Creating the webhook failed.
Forbidden
You do not have permissions to create a webhook.
Returns
--------
:class:`Webhook`
The created webhook.
"""
if avatar is not None:
avatar = utils._bytes_to_base64_data(avatar)
data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar)
return Webhook.from_state(data, state=self._state)
class VoiceChannel(discord.abc.Connectable, discord.abc.GuildChannel, Hashable):
"""Represents a Discord guild voice channel.
.. container:: operations
.. describe:: x == y
Checks if two channels are equal.
.. describe:: x != y
Checks if two channels are not equal.
.. describe:: hash(x)
Returns the channel's hash.
.. describe:: str(x)
Returns the channel's name.
Attributes
-----------
name: :class:`str`
The channel name.
guild: :class:`Guild`
The guild the channel belongs to.
id: :class:`int`
The channel ID.
category_id: :class:`int`
The category channel ID this channel belongs to.
position: :class:`int`
The position in the channel list. This is a number that starts at 0. e.g. the
top channel is position 0.
bitrate: :class:`int`
The channel's preferred audio bitrate in bits per second.
user_limit: :class:`int`
The channel's limit for number of members that can be in a voice channel.
"""
__slots__ = ('name', 'id', 'guild', 'bitrate', 'user_limit',
'_state', 'position', '_overwrites', 'category_id')
def __init__(self, *, state, guild, data):
self._state = state
self.id = int(data['id'])
self._update(guild, data)
def __repr__(self):
return '<VoiceChannel id={0.id} name={0.name!r} position={0.position}>'.format(self)
def _get_voice_client_key(self):
return self.guild.id, 'guild_id'
def _get_voice_state_pair(self):
return self.guild.id, self.id
def _update(self, guild, data):
self.guild = guild
self.name = data['name']
self.category_id = utils._get_as_snowflake(data, 'parent_id')
self.position = data['position']
self.bitrate = data.get('bitrate')
self.user_limit = data.get('user_limit')
self._fill_overwrites(data)
@property
def members(self):
"""Returns a list of :class:`Member` that are currently inside this voice channel."""
ret = []
for user_id, state in self.guild._voice_states.items():
if state.channel.id == self.id:
member = self.guild.get_member(user_id)
if member is not None:
ret.append(member)
return ret
async def edit(self, *, reason=None, **options):
"""|coro|
Edits the channel.
You must have the :attr:`~Permissions.manage_channels` permission to
use this.
Parameters
----------
name: str
The new channel's name.
bitrate: int
The new channel's bitrate.
user_limit: int
The new channel's user limit.
position: int
The new channel's position.
sync_permissions: bool
Whether to sync permissions with the channel's new or pre-existing
category. Defaults to ``False``.
category: Optional[:class:`CategoryChannel`]
The new category for this channel. Can be ``None`` to remove the
category.
reason: Optional[str]
The reason for editing this channel. Shows up on the audit log.
Raises
------
Forbidden
You do not have permissions to edit the channel.
HTTPException
Editing the channel failed.
"""
await self._edit(options, reason=reason)
class CategoryChannel(discord.abc.GuildChannel, Hashable):
"""Represents a Discord channel category.
These are useful to group channels to logical compartments.
.. container:: operations
.. describe:: x == y
Checks if two channels are equal.
.. describe:: x != y
Checks if two channels are not equal.
.. describe:: hash(x)
Returns the category's hash.
.. describe:: str(x)
Returns the category's name.
Attributes
-----------
name: :class:`str`
The category name.
guild: :class:`Guild`
The guild the category belongs to.
id: :class:`int`
The category channel ID.
position: :class:`int`
The position in the category list. This is a number that starts at 0. e.g. the
top category is position 0.
"""
__slots__ = ('name', 'id', 'guild', 'nsfw', '_state', 'position', '_overwrites', 'category_id')
def __init__(self, *, state, guild, data):
self._state = state
self.id = int(data['id'])
self._update(guild, data)
def __repr__(self):
return '<CategoryChannel id={0.id} name={0.name!r} position={0.position}>'.format(self)
def _update(self, guild, data):
self.guild = guild
self.name = data['name']
self.category_id = utils._get_as_snowflake(data, 'parent_id')
self.nsfw = data.get('nsfw', False)
self.position = data['position']
self._fill_overwrites(data)
def is_nsfw(self):
"""Checks if the category is NSFW."""
n = self.name
return self.nsfw or n == 'nsfw' or n[:5] == 'nsfw-'
async def edit(self, *, reason=None, **options):
"""|coro|
Edits the channel.
You must have the :attr:`~Permissions.manage_channels` permission to
use this.
Parameters
----------
name: str
The new category's name.
position: int
The new category's position.
nsfw: bool
To mark the category as NSFW or not.
reason: Optional[str]
The reason for editing this category. Shows up on the audit log.
Raises
------
InvalidArgument
If position is less than 0 or greater than the number of categories.
Forbidden
You do not have permissions to edit the category.
HTTPException
Editing the category failed.
"""
try:
position = options.pop('position')
except KeyError:
pass
else:
await self._move(position, reason=reason)
self.position = position
if options:
data = await self._state.http.edit_channel(self.id, reason=reason, **options)
self._update(self.guild, data)
@property
def channels(self):
"""List[:class:`abc.GuildChannel`]: Returns the channels that are under this category.
These are sorted by the official Discord UI, which places voice channels below the text channels.
"""
def comparator(channel):
return (not isinstance(channel, TextChannel), channel.position)
ret = [c for c in self.guild.channels if c.category_id == self.id]
ret.sort(key=comparator)
return ret
class DMChannel(discord.abc.Messageable, Hashable):
"""Represents a Discord direct message channel.
.. container:: operations
.. describe:: x == y
Checks if two channels are equal.
.. describe:: x != y
Checks if two channels are not equal.
.. describe:: hash(x)
Returns the channel's hash.
.. describe:: str(x)
Returns a string representation of the channel
Attributes
----------
recipient: :class:`User`
The user you are participating with in the direct message channel.
me: :class:`ClientUser`
The user presenting yourself.
id: :class:`int`
The direct message channel ID.
"""
__slots__ = ('id', 'recipient', 'me', '_state')
def __init__(self, *, me, state, data):
self._state = state
self.recipient = state.store_user(data['recipients'][0])
self.me = me
self.id = int(data['id'])
async def _get_channel(self):
return self
def __str__(self):
return 'Direct Message with %s' % self.recipient
def __repr__(self):
return '<DMChannel id={0.id} recipient={0.recipient!r}>'.format(self)
@property
def created_at(self):
"""Returns the direct message channel's creation time in UTC."""
return utils.snowflake_time(self.id)
def permissions_for(self, user=None):
"""Handles permission resolution for a :class:`User`.
This function is there for compatibility with other channel types.
Actual direct messages do not really have the concept of permissions.
This returns all the Text related permissions set to true except:
- send_tts_messages: You cannot send TTS messages in a DM.
- manage_messages: You cannot delete others messages in a DM.
Parameters
-----------
user: :class:`User`
The user to check permissions for. This parameter is ignored
but kept for compatibility.
Returns
--------
:class:`Permissions`
The resolved permissions.
"""
base = Permissions.text()
base.send_tts_messages = False
base.manage_messages = False
return base
class GroupChannel(discord.abc.Messageable, Hashable):
"""Represents a Discord group channel.
.. container:: operations
.. describe:: x == y
Checks if two channels are equal.
.. describe:: x != y
Checks if two channels are not equal.
.. describe:: hash(x)
Returns the channel's hash.
.. describe:: str(x)
Returns a string representation of the channel
Attributes
----------
recipients: :class:`list` of :class:`User`
The users you are participating with in the group channel.
me: :class:`ClientUser`
The user presenting yourself.
id: :class:`int`
The group channel ID.
owner: :class:`User`
The user that owns the group channel.
icon: Optional[:class:`str`]
The group channel's icon hash if provided.
name: Optional[:class:`str`]
The group channel's name if provided.
"""
__slots__ = ('id', 'recipients', 'owner', 'icon', 'name', 'me', '_state')
def __init__(self, *, me, state, data):
self._state = state
self.id = int(data['id'])
self.me = me
self._update_group(data)
def _update_group(self, data):
owner_id = utils._get_as_snowflake(data, 'owner_id')
self.icon = data.get('icon')
self.name = data.get('name')
try:
self.recipients = [self._state.store_user(u) for u in data['recipients']]
except KeyError:
pass
if owner_id == self.me.id:
self.owner = self.me
else:
self.owner = utils.find(lambda u: u.id == owner_id, self.recipients)
async def _get_channel(self):
return self
def __str__(self):
if self.name:
return self.name
if len(self.recipients) == 0:
return 'Unnamed'
return ', '.join(map(lambda x: x.name, self.recipients))
def __repr__(self):
return '<GroupChannel id={0.id} name={0.name!r}>'.format(self)
@property
def icon_url(self):
"""Returns the channel's icon URL if available or an empty string otherwise."""
if self.icon is None:
return ''
return 'https://cdn.discordapp.com/channel-icons/{0.id}/{0.icon}.jpg'.format(self)
@property
def created_at(self):
"""Returns the channel's creation time in UTC."""
return utils.snowflake_time(self.id)
def permissions_for(self, user):
"""Handles permission resolution for a :class:`User`.
This function is there for compatibility with other channel types.
Actual direct messages do not really have the concept of permissions.
This returns all the Text related permissions set to true except:
- send_tts_messages: You cannot send TTS messages in a DM.
- manage_messages: You cannot delete others messages in a DM.
This also checks the kick_members permission if the user is the owner.
Parameters
-----------
user: :class:`User`
The user to check permissions for.
Returns
--------
:class:`Permissions`
The resolved permissions for the user.
"""
base = Permissions.text()
base.send_tts_messages = False
base.manage_messages = False
base.mention_everyone = True
if user.id == self.owner.id:
base.kick_members = True
return base
async def add_recipients(self, *recipients):
"""|coro|
Adds recipients to this group.
A group can only have a maximum of 10 members.
Attempting to add more ends up in an exception. To
add a recipient to the group, you must have a relationship
with the user of type :attr:`RelationshipType.friend`.
Parameters
-----------
\*recipients: :class:`User`
An argument list of users to add to this group.
Raises
-------
HTTPException
Adding a recipient to this group failed.
"""
# TODO: wait for the corresponding WS event
req = self._state.http.add_group_recipient
for recipient in recipients:
await req(self.id, recipient.id)
async def remove_recipients(self, *recipients):
"""|coro|
Removes recipients from this group.
Parameters
-----------
\*recipients: :class:`User`
An argument list of users to remove from this group.
Raises
-------
HTTPException
Removing a recipient from this group failed.
"""
# TODO: wait for the corresponding WS event
req = self._state.http.remove_group_recipient
for recipient in recipients:
await req(self.id, recipient.id)
async def edit(self, **fields):
"""|coro|
Edits the group.
Parameters
-----------
name: Optional[str]
The new name to change the group to.
Could be ``None`` to remove the name.
icon: Optional[bytes]
A bytes-like object representing the new icon.
Could be ``None`` to remove the icon.
Raises
-------
HTTPException
Editing the group failed.
"""
try:
icon_bytes = fields['icon']
except KeyError:
pass
else:
if icon_bytes is not None:
fields['icon'] = utils._bytes_to_base64_data(icon_bytes)
data = await self._state.http.edit_group(self.id, **fields)
self._update_group(data)
async def leave(self):
"""|coro|
Leave the group.
If you are the only one in the group, this deletes it as well.
Raises
-------
HTTPException
Leaving the group failed.
"""
await self._state.http.leave_group(self.id)
def _channel_factory(channel_type):
value = try_enum(ChannelType, channel_type)
if value is ChannelType.text:
return TextChannel, value
elif value is ChannelType.voice:
return VoiceChannel, value
elif value is ChannelType.private:
return DMChannel, value
elif value is ChannelType.category:
return CategoryChannel, value
elif value is ChannelType.group:
return GroupChannel, value
else:
return None, value
| 30.478781 | 116 | 0.58804 |
from .permissions import Permissions
from .enums import ChannelType, try_enum
from .mixins import Hashable
from . import utils
from .errors import ClientException, NoMoreItems
from .webhook import Webhook
import discord.abc
import time
import asyncio
__all__ = ['TextChannel', 'VoiceChannel', 'DMChannel', 'CategoryChannel', 'GroupChannel', '_channel_factory']
async def _single_delete_strategy(messages):
for m in messages:
await m.delete()
class TextChannel(discord.abc.Messageable, discord.abc.GuildChannel, Hashable):
__slots__ = ('name', 'id', 'guild', 'topic', '_state', 'nsfw',
'category_id', 'position', '_overwrites')
def __init__(self, *, state, guild, data):
self._state = state
self.id = int(data['id'])
self._update(guild, data)
def __repr__(self):
return '<TextChannel id={0.id} name={0.name!r} position={0.position}>'.format(self)
def _update(self, guild, data):
self.guild = guild
self.name = data['name']
self.category_id = utils._get_as_snowflake(data, 'parent_id')
self.topic = data.get('topic')
self.position = data['position']
self.nsfw = data.get('nsfw', False)
self._fill_overwrites(data)
async def _get_channel(self):
return self
def permissions_for(self, member):
base = super().permissions_for(member)
denied = Permissions.voice()
base.value &= ~denied.value
return base
permissions_for.__doc__ = discord.abc.GuildChannel.permissions_for.__doc__
@property
def members(self):
return [m for m in self.guild.members if self.permissions_for(m).read_messages]
def is_nsfw(self):
n = self.name
return self.nsfw or n == 'nsfw' or n[:5] == 'nsfw-'
async def edit(self, *, reason=None, **options):
await self._edit(options, reason=reason)
async def delete_messages(self, messages):
if not isinstance(messages, (list, tuple)):
messages = list(messages)
if len(messages) == 0:
return
if len(messages) == 1:
message_id = messages[0].id
await self._state.http.delete_message(self.id, message_id)
return
if len(messages) > 100:
raise ClientException('Can only bulk delete messages up to 100 messages')
message_ids = [m.id for m in messages]
await self._state.http.delete_messages(self.id, message_ids)
async def purge(self, *, limit=100, check=None, before=None, after=None, around=None, reverse=False, bulk=True):
if check is None:
check = lambda m: True
iterator = self.history(limit=limit, before=before, after=after, reverse=reverse, around=around)
ret = []
count = 0
minimum_time = int((time.time() - 14 * 24 * 60 * 60) * 1000.0 - 1420070400000) << 22
strategy = self.delete_messages if self._state.is_bot and bulk else _single_delete_strategy
while True:
try:
msg = await iterator.next()
except NoMoreItems:
if count >= 2:
to_delete = ret[-count:]
await strategy(to_delete)
elif count == 1:
await ret[-1].delete()
return ret
else:
if count == 100:
to_delete = ret[-100:]
await strategy(to_delete)
count = 0
await asyncio.sleep(1)
if check(msg):
if msg.id < minimum_time:
# older than 14 days old
if count == 1:
await ret[-1].delete()
elif count >= 2:
to_delete = ret[-count:]
await strategy(to_delete)
count = 0
strategy = _single_delete_strategy
count += 1
ret.append(msg)
async def webhooks(self):
data = await self._state.http.channel_webhooks(self.id)
return [Webhook.from_state(d, state=self._state) for d in data]
async def create_webhook(self, *, name, avatar=None):
if avatar is not None:
avatar = utils._bytes_to_base64_data(avatar)
data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar)
return Webhook.from_state(data, state=self._state)
class VoiceChannel(discord.abc.Connectable, discord.abc.GuildChannel, Hashable):
__slots__ = ('name', 'id', 'guild', 'bitrate', 'user_limit',
'_state', 'position', '_overwrites', 'category_id')
def __init__(self, *, state, guild, data):
self._state = state
self.id = int(data['id'])
self._update(guild, data)
def __repr__(self):
return '<VoiceChannel id={0.id} name={0.name!r} position={0.position}>'.format(self)
def _get_voice_client_key(self):
return self.guild.id, 'guild_id'
def _get_voice_state_pair(self):
return self.guild.id, self.id
def _update(self, guild, data):
self.guild = guild
self.name = data['name']
self.category_id = utils._get_as_snowflake(data, 'parent_id')
self.position = data['position']
self.bitrate = data.get('bitrate')
self.user_limit = data.get('user_limit')
self._fill_overwrites(data)
@property
def members(self):
ret = []
for user_id, state in self.guild._voice_states.items():
if state.channel.id == self.id:
member = self.guild.get_member(user_id)
if member is not None:
ret.append(member)
return ret
async def edit(self, *, reason=None, **options):
await self._edit(options, reason=reason)
class CategoryChannel(discord.abc.GuildChannel, Hashable):
__slots__ = ('name', 'id', 'guild', 'nsfw', '_state', 'position', '_overwrites', 'category_id')
def __init__(self, *, state, guild, data):
self._state = state
self.id = int(data['id'])
self._update(guild, data)
def __repr__(self):
return '<CategoryChannel id={0.id} name={0.name!r} position={0.position}>'.format(self)
def _update(self, guild, data):
self.guild = guild
self.name = data['name']
self.category_id = utils._get_as_snowflake(data, 'parent_id')
self.nsfw = data.get('nsfw', False)
self.position = data['position']
self._fill_overwrites(data)
def is_nsfw(self):
n = self.name
return self.nsfw or n == 'nsfw' or n[:5] == 'nsfw-'
async def edit(self, *, reason=None, **options):
try:
position = options.pop('position')
except KeyError:
pass
else:
await self._move(position, reason=reason)
self.position = position
if options:
data = await self._state.http.edit_channel(self.id, reason=reason, **options)
self._update(self.guild, data)
@property
def channels(self):
def comparator(channel):
return (not isinstance(channel, TextChannel), channel.position)
ret = [c for c in self.guild.channels if c.category_id == self.id]
ret.sort(key=comparator)
return ret
class DMChannel(discord.abc.Messageable, Hashable):
__slots__ = ('id', 'recipient', 'me', '_state')
def __init__(self, *, me, state, data):
self._state = state
self.recipient = state.store_user(data['recipients'][0])
self.me = me
self.id = int(data['id'])
async def _get_channel(self):
return self
def __str__(self):
return 'Direct Message with %s' % self.recipient
def __repr__(self):
return '<DMChannel id={0.id} recipient={0.recipient!r}>'.format(self)
@property
def created_at(self):
return utils.snowflake_time(self.id)
def permissions_for(self, user=None):
base = Permissions.text()
base.send_tts_messages = False
base.manage_messages = False
return base
class GroupChannel(discord.abc.Messageable, Hashable):
__slots__ = ('id', 'recipients', 'owner', 'icon', 'name', 'me', '_state')
def __init__(self, *, me, state, data):
self._state = state
self.id = int(data['id'])
self.me = me
self._update_group(data)
def _update_group(self, data):
owner_id = utils._get_as_snowflake(data, 'owner_id')
self.icon = data.get('icon')
self.name = data.get('name')
try:
self.recipients = [self._state.store_user(u) for u in data['recipients']]
except KeyError:
pass
if owner_id == self.me.id:
self.owner = self.me
else:
self.owner = utils.find(lambda u: u.id == owner_id, self.recipients)
async def _get_channel(self):
return self
def __str__(self):
if self.name:
return self.name
if len(self.recipients) == 0:
return 'Unnamed'
return ', '.join(map(lambda x: x.name, self.recipients))
def __repr__(self):
return '<GroupChannel id={0.id} name={0.name!r}>'.format(self)
@property
def icon_url(self):
if self.icon is None:
return ''
return 'https://cdn.discordapp.com/channel-icons/{0.id}/{0.icon}.jpg'.format(self)
@property
def created_at(self):
return utils.snowflake_time(self.id)
def permissions_for(self, user):
base = Permissions.text()
base.send_tts_messages = False
base.manage_messages = False
base.mention_everyone = True
if user.id == self.owner.id:
base.kick_members = True
return base
async def add_recipients(self, *recipients):
# TODO: wait for the corresponding WS event
req = self._state.http.add_group_recipient
for recipient in recipients:
await req(self.id, recipient.id)
async def remove_recipients(self, *recipients):
# TODO: wait for the corresponding WS event
req = self._state.http.remove_group_recipient
for recipient in recipients:
await req(self.id, recipient.id)
async def edit(self, **fields):
try:
icon_bytes = fields['icon']
except KeyError:
pass
else:
if icon_bytes is not None:
fields['icon'] = utils._bytes_to_base64_data(icon_bytes)
data = await self._state.http.edit_group(self.id, **fields)
self._update_group(data)
async def leave(self):
await self._state.http.leave_group(self.id)
def _channel_factory(channel_type):
value = try_enum(ChannelType, channel_type)
if value is ChannelType.text:
return TextChannel, value
elif value is ChannelType.voice:
return VoiceChannel, value
elif value is ChannelType.private:
return DMChannel, value
elif value is ChannelType.category:
return CategoryChannel, value
elif value is ChannelType.group:
return GroupChannel, value
else:
return None, value
| true | true |
f737b9a7130a9573def136ba81888a2c4a8f66e0 | 1,133 | py | Python | main.py | adv1996/nba-historical-betting | a732cf5fcf5dcde7002dc40a88578935d96d8de8 | [
"MIT"
] | null | null | null | main.py | adv1996/nba-historical-betting | a732cf5fcf5dcde7002dc40a88578935d96d8de8 | [
"MIT"
] | null | null | null | main.py | adv1996/nba-historical-betting | a732cf5fcf5dcde7002dc40a88578935d96d8de8 | [
"MIT"
] | null | null | null | from scrape_utils import retrieveWebpage
from sportsreview_importer import retrieveLinks, bulkDownloadData, collateData
def saveSportReviewWebpage():
baseURL = 'https://www.sportsbookreviewsonline.com/scoresoddsarchives/nba/nbaoddsarchives.htm'
saveFile = 'data/sportsbookreview_nba_odds_archive.html'
element = '/html/body/table[2]/tbody/tr[1]/td[2]/table/tbody/tr[2]/td/ul/li[1]/a'
retrieveWebpage(baseURL, saveFile, element)
def retrieveSportsReviewLinks():
webpage = 'data/sportsbookreview_nba_odds_archive.html'
base = 'https://www.sportsbookreviewsonline.com/scoresoddsarchives/nba/'
saveFile = 'data/sportsbookreview_downloadable_archive_links.json'
retrieveLinks(webpage, base, saveFile)
def bulkDownloadAllDataFile():
saveFile = 'data/sportsbookreview_downloadable_archive_links.json'
bulkDownloadData(saveFile)
def collateDownloadedData():
saveFile = 'data/sportsbookreview_downloadable_archive_links.json'
collateData(saveFile)
def main():
# saveSportReviewWebpage()
# retrieveSportsReviewLinks()
# bulkDownloadAllDataFile()
collateDownloadedData()
if __name__ == "__main__":
main() | 36.548387 | 96 | 0.802295 | from scrape_utils import retrieveWebpage
from sportsreview_importer import retrieveLinks, bulkDownloadData, collateData
def saveSportReviewWebpage():
baseURL = 'https://www.sportsbookreviewsonline.com/scoresoddsarchives/nba/nbaoddsarchives.htm'
saveFile = 'data/sportsbookreview_nba_odds_archive.html'
element = '/html/body/table[2]/tbody/tr[1]/td[2]/table/tbody/tr[2]/td/ul/li[1]/a'
retrieveWebpage(baseURL, saveFile, element)
def retrieveSportsReviewLinks():
webpage = 'data/sportsbookreview_nba_odds_archive.html'
base = 'https://www.sportsbookreviewsonline.com/scoresoddsarchives/nba/'
saveFile = 'data/sportsbookreview_downloadable_archive_links.json'
retrieveLinks(webpage, base, saveFile)
def bulkDownloadAllDataFile():
saveFile = 'data/sportsbookreview_downloadable_archive_links.json'
bulkDownloadData(saveFile)
def collateDownloadedData():
saveFile = 'data/sportsbookreview_downloadable_archive_links.json'
collateData(saveFile)
def main():
collateDownloadedData()
if __name__ == "__main__":
main() | true | true |
f737ba2babbb7e43a3920947a653abf898d03b6b | 5,901 | py | Python | dodo.py | Johann150/forget | 88760deb6f897a2a4ec1772d6dc93f0ebe7a8ac5 | [
"ISC"
] | null | null | null | dodo.py | Johann150/forget | 88760deb6f897a2a4ec1772d6dc93f0ebe7a8ac5 | [
"ISC"
] | null | null | null | dodo.py | Johann150/forget | 88760deb6f897a2a4ec1772d6dc93f0ebe7a8ac5 | [
"ISC"
] | null | null | null | from doit import create_after
from glob import glob
from itertools import chain
def reltouch(source_filename, dest_filename):
from os import stat, utime
stat_res = stat(source_filename)
utime(dest_filename, ns=(stat_res.st_atime_ns, stat_res.st_mtime_ns))
def resize_image(basename, width, image_format):
from PIL import Image
with Image.open('assets/{}.png'.format(basename)) as im:
if 'A' in im.getbands() and image_format != 'jpeg':
im = im.convert('RGBA')
else:
im = im.convert('RGB')
height = im.height * width // im.width
new = im.resize((width, height), resample=Image.LANCZOS)
if image_format == 'jpeg':
kwargs = dict(
optimize=True,
progressive=True,
quality=80,
)
elif image_format == 'webp':
kwargs = dict(
quality=79,
)
elif image_format == 'png':
kwargs = dict(
optimize=True,
)
new.save('static/{}-{}.{}'.format(basename, width, image_format),
**kwargs)
reltouch('assets/{}.png'.format(basename),
'static/{}-{}.{}'.format(basename, width, image_format))
def task_logotype():
"""resize and convert logotype"""
widths = (200, 400, 600, 800)
image_formats = ('jpeg', 'webp')
for width in widths:
for image_format in image_formats:
yield dict(
name='{}.{}'.format(width, image_format),
actions=[(resize_image,
('logotype', width, image_format))],
targets=[f'static/logotype-{width}.{image_format}'],
file_dep=['assets/logotype.png'],
clean=True,
)
def task_service_icon():
"""resize and convert service icons"""
widths = (20, 40, 80)
formats = ('webp', 'png')
for width in widths:
for image_format in formats:
for basename in ('twitter', 'mastodon', 'misskey'):
yield dict(
name='{}-{}.{}'.format(basename, width, image_format),
actions=[(resize_image, (basename, width, image_format))],
targets=[
'static/{}-{}.{}'.format(basename, width,
image_format)],
file_dep=['assets/{}.png'.format(basename)],
clean=True,
)
def task_copy():
"copy assets verbatim"
assets = ('icon.png', 'logotype.png')
def do_the_thing(src, dst):
from shutil import copy
copy(src, dst)
reltouch(src, dst)
for asset in assets:
src = 'assets/{}'.format(asset)
dst = 'static/{}'.format(asset)
yield dict(
name=asset,
actions=[(do_the_thing, (src, dst))],
targets=[dst],
file_dep=[src],
clean=True,
)
def task_minify_css():
"""minify css file with csscompressor"""
from csscompressor import compress
def minify():
with open('assets/styles.css') as in_:
with open('static/styles.css', 'w') as out:
out.write(compress(in_.read()))
reltouch('assets/styles.css', 'static/styles.css')
return dict(
actions=[minify],
targets=['static/styles.css'],
file_dep=['assets/styles.css'],
clean=True,
)
def task_rollup():
"""rollup javascript bundle"""
filenames = ['settings.js', 'instance_buttons.js']
for filename in filenames:
src = 'assets/{}'.format(filename)
dst = 'static/{}'.format(filename)
name = filename.split('.')[0]
yield dict(
name=filename,
file_dep=list(chain(
# fuck it
glob('assets/*.js'),
glob('components/*.html'))) + ['rollup.config.js'],
targets=[dst],
clean=True,
actions=[
['node_modules/.bin/rollup', '-c',
'-i', src, '-o', dst, '-n', name, '-f', 'iife'],
],
)
@create_after('logotype')
@create_after('service_icon')
@create_after('copy')
@create_after('minify_css')
@create_after('rollup')
def task_compress():
"""
make gzip and brotli compressed versions of each
static file for the server to lazily serve
"""
files = chain(
glob('static/*.css'),
glob('static/*.js'),
glob('static/*.jpeg'),
glob('static/*.png'),
glob('static/*.webp'),
)
def compress_brotli(filename):
import brotli
with open(filename, 'rb') as in_:
with open(filename + '.br', 'wb') as out:
out.write(brotli.compress(in_.read()))
reltouch(filename, filename+'.br')
def compress_gzip(filename):
import gzip
with open(filename, 'rb') as in_:
with gzip.open(filename + '.gz', 'wb') as out:
out.write(in_.read())
reltouch(filename, filename+'.gz')
for filename in files:
yield dict(
file_dep=(filename,),
targets=(filename+'.br',),
name=filename+'.br',
actions=[(compress_brotli, (filename,))],
clean=True,
)
yield dict(
file_dep=(filename,),
targets=(filename+'.gz',),
name=filename+'.gz',
actions=[(compress_gzip, (filename,))],
clean=True,
)
if __name__ == '__main__':
import doit
doit.run(globals())
| 30.734375 | 78 | 0.497373 | from doit import create_after
from glob import glob
from itertools import chain
def reltouch(source_filename, dest_filename):
from os import stat, utime
stat_res = stat(source_filename)
utime(dest_filename, ns=(stat_res.st_atime_ns, stat_res.st_mtime_ns))
def resize_image(basename, width, image_format):
from PIL import Image
with Image.open('assets/{}.png'.format(basename)) as im:
if 'A' in im.getbands() and image_format != 'jpeg':
im = im.convert('RGBA')
else:
im = im.convert('RGB')
height = im.height * width // im.width
new = im.resize((width, height), resample=Image.LANCZOS)
if image_format == 'jpeg':
kwargs = dict(
optimize=True,
progressive=True,
quality=80,
)
elif image_format == 'webp':
kwargs = dict(
quality=79,
)
elif image_format == 'png':
kwargs = dict(
optimize=True,
)
new.save('static/{}-{}.{}'.format(basename, width, image_format),
**kwargs)
reltouch('assets/{}.png'.format(basename),
'static/{}-{}.{}'.format(basename, width, image_format))
def task_logotype():
widths = (200, 400, 600, 800)
image_formats = ('jpeg', 'webp')
for width in widths:
for image_format in image_formats:
yield dict(
name='{}.{}'.format(width, image_format),
actions=[(resize_image,
('logotype', width, image_format))],
targets=[f'static/logotype-{width}.{image_format}'],
file_dep=['assets/logotype.png'],
clean=True,
)
def task_service_icon():
widths = (20, 40, 80)
formats = ('webp', 'png')
for width in widths:
for image_format in formats:
for basename in ('twitter', 'mastodon', 'misskey'):
yield dict(
name='{}-{}.{}'.format(basename, width, image_format),
actions=[(resize_image, (basename, width, image_format))],
targets=[
'static/{}-{}.{}'.format(basename, width,
image_format)],
file_dep=['assets/{}.png'.format(basename)],
clean=True,
)
def task_copy():
assets = ('icon.png', 'logotype.png')
def do_the_thing(src, dst):
from shutil import copy
copy(src, dst)
reltouch(src, dst)
for asset in assets:
src = 'assets/{}'.format(asset)
dst = 'static/{}'.format(asset)
yield dict(
name=asset,
actions=[(do_the_thing, (src, dst))],
targets=[dst],
file_dep=[src],
clean=True,
)
def task_minify_css():
from csscompressor import compress
def minify():
with open('assets/styles.css') as in_:
with open('static/styles.css', 'w') as out:
out.write(compress(in_.read()))
reltouch('assets/styles.css', 'static/styles.css')
return dict(
actions=[minify],
targets=['static/styles.css'],
file_dep=['assets/styles.css'],
clean=True,
)
def task_rollup():
filenames = ['settings.js', 'instance_buttons.js']
for filename in filenames:
src = 'assets/{}'.format(filename)
dst = 'static/{}'.format(filename)
name = filename.split('.')[0]
yield dict(
name=filename,
file_dep=list(chain(
glob('assets/*.js'),
glob('components/*.html'))) + ['rollup.config.js'],
targets=[dst],
clean=True,
actions=[
['node_modules/.bin/rollup', '-c',
'-i', src, '-o', dst, '-n', name, '-f', 'iife'],
],
)
@create_after('logotype')
@create_after('service_icon')
@create_after('copy')
@create_after('minify_css')
@create_after('rollup')
def task_compress():
files = chain(
glob('static/*.css'),
glob('static/*.js'),
glob('static/*.jpeg'),
glob('static/*.png'),
glob('static/*.webp'),
)
def compress_brotli(filename):
import brotli
with open(filename, 'rb') as in_:
with open(filename + '.br', 'wb') as out:
out.write(brotli.compress(in_.read()))
reltouch(filename, filename+'.br')
def compress_gzip(filename):
import gzip
with open(filename, 'rb') as in_:
with gzip.open(filename + '.gz', 'wb') as out:
out.write(in_.read())
reltouch(filename, filename+'.gz')
for filename in files:
yield dict(
file_dep=(filename,),
targets=(filename+'.br',),
name=filename+'.br',
actions=[(compress_brotli, (filename,))],
clean=True,
)
yield dict(
file_dep=(filename,),
targets=(filename+'.gz',),
name=filename+'.gz',
actions=[(compress_gzip, (filename,))],
clean=True,
)
if __name__ == '__main__':
import doit
doit.run(globals())
| true | true |
f737ba2f0bccbc310ba711faf8a0f36a0ce418ea | 1,182 | py | Python | tests/test_model_artifact.py | jmonsalverodilla/house_prices_regression_model | 28fd24e777fcf838acffda6ea669e1339d92819d | [
"MIT"
] | null | null | null | tests/test_model_artifact.py | jmonsalverodilla/house_prices_regression_model | 28fd24e777fcf838acffda6ea669e1339d92819d | [
"MIT"
] | null | null | null | tests/test_model_artifact.py | jmonsalverodilla/house_prices_regression_model | 28fd24e777fcf838acffda6ea669e1339d92819d | [
"MIT"
] | null | null | null | #Imports
from house_prices_regression_model.train_pipeline import run_training
from house_prices_regression_model.processing.data_manager import load_pipeline
from house_prices_regression_model.config.core import ROOT, TRAINED_MODEL_DIR,load_config_file,SETTINGS_PATH
from house_prices_regression_model import __version__ as VERSION
from pathlib import Path
import sklearn
#Config files
config = load_config_file(SETTINGS_PATH)
PIPELINE_ARTIFACT_NAME = config["PIPELINE_ARTIFACT_NAME"]
def test_model_save_load():
"""
Tests for the model saving process
"""
run_training()
# =================================
# TEST SUITE
# =================================
# Check the model file is created/saved in the directory
PATH = ROOT / TRAINED_MODEL_DIR/ f"{PIPELINE_ARTIFACT_NAME}_v{VERSION}.pkl"
assert Path.exists(PATH)
# Check that the model file can be loaded properly
# (by type checking that it is a sklearn linear regression estimator)
pipeline_file_name = f"{PIPELINE_ARTIFACT_NAME}_v{VERSION}.pkl"
loaded_model = load_pipeline(file_name=pipeline_file_name)
assert isinstance(loaded_model, sklearn.pipeline.Pipeline) | 38.129032 | 108 | 0.747039 |
from house_prices_regression_model.train_pipeline import run_training
from house_prices_regression_model.processing.data_manager import load_pipeline
from house_prices_regression_model.config.core import ROOT, TRAINED_MODEL_DIR,load_config_file,SETTINGS_PATH
from house_prices_regression_model import __version__ as VERSION
from pathlib import Path
import sklearn
config = load_config_file(SETTINGS_PATH)
PIPELINE_ARTIFACT_NAME = config["PIPELINE_ARTIFACT_NAME"]
def test_model_save_load():
run_training()
PATH = ROOT / TRAINED_MODEL_DIR/ f"{PIPELINE_ARTIFACT_NAME}_v{VERSION}.pkl"
assert Path.exists(PATH)
pipeline_file_name = f"{PIPELINE_ARTIFACT_NAME}_v{VERSION}.pkl"
loaded_model = load_pipeline(file_name=pipeline_file_name)
assert isinstance(loaded_model, sklearn.pipeline.Pipeline) | true | true |
f737bb8aeb211e1b90f393d6e19101cec3f831a3 | 5,413 | py | Python | docs/conf.py | susmitpy/modin | c7d7b492e52fcc4aa36af2a210312101bbada06e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | docs/conf.py | susmitpy/modin | c7d7b492e52fcc4aa36af2a210312101bbada06e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | docs/conf.py | susmitpy/modin | c7d7b492e52fcc4aa36af2a210312101bbada06e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Project information -----------------------------------------------------
import sys
import os
import types
import ray
# stub ray.remote to be a no-op so it doesn't shadow docstrings
def noop_decorator(*args, **kwargs):
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
# This is the case where the decorator is just @ray.remote without parameters.
return args[0]
return lambda cls_or_func: cls_or_func
ray.remote = noop_decorator
# fake modules if they're missing
for mod_name in ("cudf", "cupy", "pyarrow.gandiva", "omniscidbe"):
try:
__import__(mod_name)
except ImportError:
sys.modules[mod_name] = types.ModuleType(
mod_name, f"fake {mod_name} for building docs"
)
if not hasattr(sys.modules["cudf"], "DataFrame"):
sys.modules["cudf"].DataFrame = type("DataFrame", (object,), {})
if not hasattr(sys.modules["omniscidbe"], "PyDbEngine"):
sys.modules["omniscidbe"].PyDbEngine = type("PyDbEngine", (object,), {})
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import modin
from modin.config.__main__ import export_config_help
configs_file_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), "flow/modin/configs_help.csv")
)
# Export configs help to create configs table in the docs/flow/modin/config.rst
export_config_help(configs_file_path)
project = "Modin"
copyright = "2018-2022, Modin"
author = "Modin contributors"
# The short X.Y version
version = "{}".format(modin.__version__)
# The full version, including alpha/beta/rc tags
release = version
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.mathjax",
"sphinx.ext.githubpages",
"sphinx.ext.graphviz",
"sphinxcontrib.plantuml",
"sphinx_issues",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# -- Options for HTML output -------------------------------------------------
# Maps git branches to Sphinx themes
default_html_theme = "pydata_sphinx_theme"
current_branch = "nature"
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "pydata_sphinx_theme"
html_favicon = "img/MODIN_ver2.ico"
html_logo = "img/MODIN_ver2.png"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"sidebarwidth": 270,
"collapse_navigation": False,
"navigation_depth": 4,
"show_toc_level": 2,
"github_url": "https://github.com/modin-project/modin",
"icon_links": [
{
"name": "PyPI",
"url": "https://pypi.org/project/modin",
"icon": "fab fa-python",
},
{
"name": "conda-forge",
"url": "https://anaconda.org/conda-forge/modin",
"icon": "fas fa-circle-notch",
},
{
"name": "Join the Slack",
"url": "https://modin.org/slack.html",
"icon": "fab fa-slack",
},
{
"name": "Discourse",
"url": "https://discuss.modin.org/",
"icon": "fab fa-discourse",
},
{
"name": "Mailing List",
"url": "https://groups.google.com/forum/#!forum/modin-dev",
"icon": "fas fa-envelope-square",
},
],
}
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# The default pydata_sphinx_theme sidebar templates are
# sidebar-nav-bs.html and search-field.html.
html_sidebars = {}
issues_github_path = "modin-project/modin"
| 30.581921 | 86 | 0.656752 |
import sys
import os
import types
import ray
def noop_decorator(*args, **kwargs):
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
# This is the case where the decorator is just @ray.remote without parameters.
return args[0]
return lambda cls_or_func: cls_or_func
ray.remote = noop_decorator
# fake modules if they're missing
for mod_name in ("cudf", "cupy", "pyarrow.gandiva", "omniscidbe"):
try:
__import__(mod_name)
except ImportError:
sys.modules[mod_name] = types.ModuleType(
mod_name, f"fake {mod_name} for building docs"
)
if not hasattr(sys.modules["cudf"], "DataFrame"):
sys.modules["cudf"].DataFrame = type("DataFrame", (object,), {})
if not hasattr(sys.modules["omniscidbe"], "PyDbEngine"):
sys.modules["omniscidbe"].PyDbEngine = type("PyDbEngine", (object,), {})
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import modin
from modin.config.__main__ import export_config_help
configs_file_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), "flow/modin/configs_help.csv")
)
export_config_help(configs_file_path)
project = "Modin"
copyright = "2018-2022, Modin"
author = "Modin contributors"
version = "{}".format(modin.__version__)
release = version
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.mathjax",
"sphinx.ext.githubpages",
"sphinx.ext.graphviz",
"sphinxcontrib.plantuml",
"sphinx_issues",
]
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
pygments_style = "sphinx"
default_html_theme = "pydata_sphinx_theme"
current_branch = "nature"
html_theme = "pydata_sphinx_theme"
html_favicon = "img/MODIN_ver2.ico"
html_logo = "img/MODIN_ver2.png"
html_theme_options = {
"sidebarwidth": 270,
"collapse_navigation": False,
"navigation_depth": 4,
"show_toc_level": 2,
"github_url": "https://github.com/modin-project/modin",
"icon_links": [
{
"name": "PyPI",
"url": "https://pypi.org/project/modin",
"icon": "fab fa-python",
},
{
"name": "conda-forge",
"url": "https://anaconda.org/conda-forge/modin",
"icon": "fas fa-circle-notch",
},
{
"name": "Join the Slack",
"url": "https://modin.org/slack.html",
"icon": "fab fa-slack",
},
{
"name": "Discourse",
"url": "https://discuss.modin.org/",
"icon": "fab fa-discourse",
},
{
"name": "Mailing List",
"url": "https://groups.google.com/forum/#!forum/modin-dev",
"icon": "fas fa-envelope-square",
},
],
}
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# The default pydata_sphinx_theme sidebar templates are
# sidebar-nav-bs.html and search-field.html.
html_sidebars = {}
issues_github_path = "modin-project/modin"
| true | true |
f737bc1d64812e1d2042f738413b35599b3c150d | 321 | py | Python | python/pluralsight-python-fundamental/palindrome/setup.py | renweizhukov/learning-ml | 0047d09ee2924f010dd868a454aedb7e098d6d74 | [
"Apache-2.0"
] | null | null | null | python/pluralsight-python-fundamental/palindrome/setup.py | renweizhukov/learning-ml | 0047d09ee2924f010dd868a454aedb7e098d6d74 | [
"Apache-2.0"
] | null | null | null | python/pluralsight-python-fundamental/palindrome/setup.py | renweizhukov/learning-ml | 0047d09ee2924f010dd868a454aedb7e098d6d74 | [
"Apache-2.0"
] | null | null | null | from distutils.core import setup
setup(
name='palindrome',
version='1.0',
py_modules=['palindrome'],
# metadata
author='Austin Bingham',
author_email='austin@sixty-north.com',
description='A module for finding palindromic numbers',
license='Public domain',
keywords='example',
)
| 21.4 | 59 | 0.666667 | from distutils.core import setup
setup(
name='palindrome',
version='1.0',
py_modules=['palindrome'],
author='Austin Bingham',
author_email='austin@sixty-north.com',
description='A module for finding palindromic numbers',
license='Public domain',
keywords='example',
)
| true | true |
f737bc61cb4d804038027420d7e398ec2bd3b8f3 | 8,097 | py | Python | CloneHero/clone_hero_to_generic.py | Crazychicken563/RhythmGameCharterAI | d9647007010ecc9a7ecc93d998527e578d4b12c6 | [
"MIT"
] | null | null | null | CloneHero/clone_hero_to_generic.py | Crazychicken563/RhythmGameCharterAI | d9647007010ecc9a7ecc93d998527e578d4b12c6 | [
"MIT"
] | null | null | null | CloneHero/clone_hero_to_generic.py | Crazychicken563/RhythmGameCharterAI | d9647007010ecc9a7ecc93d998527e578d4b12c6 | [
"MIT"
] | null | null | null | import os
import re
import pickle as pkl
import soundfile as sf
import numpy as np
def safeAdd(src, key, val):
if key in src:
src[key].update(val)
else:
src[key] = val
source_dir = "clone_hero_data/clonehero-win64/songs"
def main():
for (dirpath, dirnames, filenames) in os.walk(source_dir):
name = os.path.relpath(dirpath, source_dir)
audioFilePath = None
if not filenames:
continue
if "notes.mid" in filenames:
print("we aren't parsing midi files right now")
continue
if not "notes.chart" in filenames:
print("Chart data not found! " + name)
print(filenames)
continue
else:
print("Parsing " + name)
foundOGG = False
for filename in filenames:
if (filename.endswith(".ogg")):
foundOGG = True
audioFilePath = os.path.abspath(source_dir + "\\" + name + "\\" + os.path.join(filename))
if foundOGG == False:
print("NO AUDIO FILE FOUND")
continue
with open(os.path.join(dirpath, "notes.chart"), encoding="utf-8") as notes:
scanningHeader = False
currSong = None
currSongName = None
try:
currLine = notes.readline().strip()
except UnicodeDecodeError as e:
print(e)
continue
while currLine:
if scanningHeader:
if currLine == "}":
scanningHeader = False
samplerate = currSong['sr']
songlength = currSong['sd'].shape[0]/samplerate
# yeah not dealing with 48000 right now
if samplerate == 44100:
try:
os.mkdir("clone_hero_data/output/"+currSongName)
timestamps = list(currSong['ts'].keys())
for i in range(0, len(timestamps)) :
timestamps[i] = int(timestamps[i])
timestamps.sort()
print(name, samplerate)
beatrate = 441
mapping = np.zeros(int(np.ceil(songlength*beatrate)))
currBPM = 0
for timestamp in timestamps:
data = currSong['ts'][str(timestamp)]
#print("{}".format(data))
if "B" in data:
currBPM = data["B"]
print("currBPM {}".format(currBPM))
time = float(timestamp)/float(currBPM) * 60 #static "60" BPM to match up to music
if "N" in data:
#mapping[int(np.round(time*beatrate)), data["N"]["v"]] = 1
mapping[int(np.round(time*beatrate))] = data["N"]["v"] + 1
#print(int(np.round(time*beatrate)))
for time in range(int(np.floor(songlength))):
songwindow = currSong['sd'][time*samplerate:(time+1)*samplerate]
mapwindow = mapping[time*beatrate:(time+1)*beatrate]
with open("clone_hero_data/output/"+currSongName+"/"+str(time)+".pkl", 'wb+') as f:
pkl.dump({'name':name, 'time':time, 'window':songwindow, 'label':mapwindow}, f)
except:
print("We done Fucked up :(")
for timestamp in currSong['ts']:
currSong['ts'][timestamp].pop("N", None)
currSong['ts'][timestamp].pop("S", None)
for timestamp in list(currSong['ts'].keys()):
if len(currSong['ts'][timestamp].keys()) == 0:
currSong['ts'].pop(str(timestamp))
print("end of header for {}".format(currSongName))
else:
(timestamp, data) = currLine.split("=")
timestamp = timestamp.strip()
datums = data.strip().split(" ")
if datums[0] == "N":
#These are the only things we care about for now
value = int(datums[1].strip())
duration = datums[2].strip()
if value <= 4:
# mnd will always be defined by this point since scanningHeader
# can never be true without mnd being instantiated
safeAdd(currSong['ts'], str(timestamp), {
"N": {
'v': value,
'd': int(duration)
}
})
#else:
#print("Unknown value note {}".format(datums))
elif datums[0] == "S":
# augments over 4 denote a unique type of note / note modifier
# augment 7 means that the previous note has star power.
# other augments currently unknown...
#print("star power for duration: {}".format(duration))
safeAdd(currSong['ts'], str(timestamp), {
"S": {
'v': 2,
'd': int(duration)
}
})
else:
#if any(header in currLine for header in ["[Song]"]):
# print("Found Song header")
if any(header in currLine for header in ["[SyncTrack]"]):
notes.readline() #Skip the "{"
print(audioFilePath)
songdata, samplerate = sf.read(audioFilePath)
print("sample rate: {}".format(samplerate))
currSong = {
'ts': {},
'sd': np.asarray(songdata),
'sr': samplerate
}
currLine = notes.readline().strip()
while currLine != "}":
(timestamp, data) = currLine.split("=")
timestamp = timestamp.strip()
datums = data.strip().split(" ")
if datums[0] == "B":
#print("{}".format(datums))
#print(currSong)
safeAdd(currSong['ts'], str(timestamp), {
"B": int(datums[1].strip())
})
currLine = notes.readline().strip()
elif any(header in currLine for header in ["[ExpertSingle]", "[HardSingle]", "[MediumSingle]", "[EasySingle]"]):
print("Now scanning " + currLine)
notes.readline() #Skip the "{"
scanningHeader = True
mergedPathIntoName = name.replace("\\", "_")
currSongName = os.path.join(currLine + "_" + mergedPathIntoName)
print(currSongName)
currLine = notes.readline().strip()
main() | 49.981481 | 132 | 0.395579 | import os
import re
import pickle as pkl
import soundfile as sf
import numpy as np
def safeAdd(src, key, val):
if key in src:
src[key].update(val)
else:
src[key] = val
source_dir = "clone_hero_data/clonehero-win64/songs"
def main():
for (dirpath, dirnames, filenames) in os.walk(source_dir):
name = os.path.relpath(dirpath, source_dir)
audioFilePath = None
if not filenames:
continue
if "notes.mid" in filenames:
print("we aren't parsing midi files right now")
continue
if not "notes.chart" in filenames:
print("Chart data not found! " + name)
print(filenames)
continue
else:
print("Parsing " + name)
foundOGG = False
for filename in filenames:
if (filename.endswith(".ogg")):
foundOGG = True
audioFilePath = os.path.abspath(source_dir + "\\" + name + "\\" + os.path.join(filename))
if foundOGG == False:
print("NO AUDIO FILE FOUND")
continue
with open(os.path.join(dirpath, "notes.chart"), encoding="utf-8") as notes:
scanningHeader = False
currSong = None
currSongName = None
try:
currLine = notes.readline().strip()
except UnicodeDecodeError as e:
print(e)
continue
while currLine:
if scanningHeader:
if currLine == "}":
scanningHeader = False
samplerate = currSong['sr']
songlength = currSong['sd'].shape[0]/samplerate
# yeah not dealing with 48000 right now
if samplerate == 44100:
try:
os.mkdir("clone_hero_data/output/"+currSongName)
timestamps = list(currSong['ts'].keys())
for i in range(0, len(timestamps)) :
timestamps[i] = int(timestamps[i])
timestamps.sort()
print(name, samplerate)
beatrate = 441
mapping = np.zeros(int(np.ceil(songlength*beatrate)))
currBPM = 0
for timestamp in timestamps:
data = currSong['ts'][str(timestamp)]
#print("{}".format(data))
if "B" in data:
currBPM = data["B"]
print("currBPM {}".format(currBPM))
time = float(timestamp)/float(currBPM) * 60 #static "60" BPM to match up to music
if "N" in data:
#mapping[int(np.round(time*beatrate)), data["N"]["v"]] = 1
mapping[int(np.round(time*beatrate))] = data["N"]["v"] + 1
#print(int(np.round(time*beatrate)))
for time in range(int(np.floor(songlength))):
songwindow = currSong['sd'][time*samplerate:(time+1)*samplerate]
mapwindow = mapping[time*beatrate:(time+1)*beatrate]
with open("clone_hero_data/output/"+currSongName+"/"+str(time)+".pkl", 'wb+') as f:
pkl.dump({'name':name, 'time':time, 'window':songwindow, 'label':mapwindow}, f)
except:
print("We done Fucked up :(")
for timestamp in currSong['ts']:
currSong['ts'][timestamp].pop("N", None)
currSong['ts'][timestamp].pop("S", None)
for timestamp in list(currSong['ts'].keys()):
if len(currSong['ts'][timestamp].keys()) == 0:
currSong['ts'].pop(str(timestamp))
print("end of header for {}".format(currSongName))
else:
(timestamp, data) = currLine.split("=")
timestamp = timestamp.strip()
datums = data.strip().split(" ")
if datums[0] == "N":
#These are the only things we care about for now
value = int(datums[1].strip())
duration = datums[2].strip()
if value <= 4:
# mnd will always be defined by this point since scanningHeader
# can never be true without mnd being instantiated
safeAdd(currSong['ts'], str(timestamp), {
"N": {
'v': value,
'd': int(duration)
}
})
#else:
#print("Unknown value note {}".format(datums))
elif datums[0] == "S":
# augments over 4 denote a unique type of note / note modifier
# augment 7 means that the previous note has star power.
# other augments currently unknown...
#print("star power for duration: {}".format(duration))
safeAdd(currSong['ts'], str(timestamp), {
"S": {
'v': 2,
'd': int(duration)
}
})
else:
#if any(header in currLine for header in ["[Song]"]):
# print("Found Song header")
if any(header in currLine for header in ["[SyncTrack]"]):
notes.readline() #Skip the "{"
print(audioFilePath)
songdata, samplerate = sf.read(audioFilePath)
print("sample rate: {}".format(samplerate))
currSong = {
'ts': {},
'sd': np.asarray(songdata),
'sr': samplerate
}
currLine = notes.readline().strip()
while currLine != "}":
(timestamp, data) = currLine.split("=")
timestamp = timestamp.strip()
datums = data.strip().split(" ")
if datums[0] == "B":
#print("{}".format(datums))
#print(currSong)
safeAdd(currSong['ts'], str(timestamp), {
"B": int(datums[1].strip())
})
currLine = notes.readline().strip()
elif any(header in currLine for header in ["[ExpertSingle]", "[HardSingle]", "[MediumSingle]", "[EasySingle]"]):
print("Now scanning " + currLine)
notes.readline() #Skip the "{"
scanningHeader = True
mergedPathIntoName = name.replace("\\", "_")
currSongName = os.path.join(currLine + "_" + mergedPathIntoName)
print(currSongName)
currLine = notes.readline().strip()
main() | true | true |
f737bc631fb6a75ccc6157f533c72fd643ddf548 | 2,622 | py | Python | utils/siScore_utils.py | DonghyunAhn/sadvirus | cdcc98812d613962a7003ff0c6013d0805bde024 | [
"MIT"
] | null | null | null | utils/siScore_utils.py | DonghyunAhn/sadvirus | cdcc98812d613962a7003ff0c6013d0805bde024 | [
"MIT"
] | null | null | null | utils/siScore_utils.py | DonghyunAhn/sadvirus | cdcc98812d613962a7003ff0c6013d0805bde024 | [
"MIT"
] | null | null | null | import glob
import torch
import numpy as np
from skimage import io, transform
from torchvision import transforms
import torchvision.transforms.functional as F
from torch.utils.data import Dataset
from PIL import Image
import random
class ClusterDataset(Dataset):
def __init__(self, cluster_list, dir_name, transform=None):
self.file_list = []
self.transform = transform
for cluster_num in cluster_list:
self.file_list.extend(glob.glob('../data/{}/{}/*.png'.format(dir_name, cluster_num)))
def __len__(self):
return len(self.file_list)
def __getitem__(self, idx):
image = io.imread(self.file_list[idx]) / 255.0
if self.transform:
image = self.transform(np.stack([image])).squeeze()
return image
class RandomRotate(object):
def __call__(self, images):
rotated = np.stack([self.random_rotate(x) for x in images])
return rotated
def random_rotate(self, image):
rand_num = np.random.randint(0, 4)
if rand_num == 0:
return np.rot90(image, k=1, axes=(0, 1))
elif rand_num == 1:
return np.rot90(image, k=2, axes=(0, 1))
elif rand_num == 2:
return np.rot90(image, k=3, axes=(0, 1))
else:
return image
class Normalize(object):
def __init__(self, mean, std, inplace=False):
self.mean = mean
self.std = std
self.inplace = inplace
def __call__(self, images):
normalized = np.stack([F.normalize(x, self.mean, self.std, self.inplace) for x in images])
return normalized
class Grayscale(object):
def __init__(self, prob = 1):
self.prob = prob
def __call__(self, images):
random_num = np.random.randint(100, size=1)[0]
if random_num <= self.prob * 100:
gray_images = (images[:, 0, :, :] + images[:, 1, :, :] + images[:, 2, :, :]) / 3
gray_scaled = gray_images.unsqueeze(1).repeat(1, 3, 1, 1)
return gray_scaled
else:
return images
class ToTensor(object):
def __call__(self, images):
images = images.transpose((0, 3, 1, 2))
return torch.from_numpy(images).float()
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count | 28.5 | 99 | 0.580854 | import glob
import torch
import numpy as np
from skimage import io, transform
from torchvision import transforms
import torchvision.transforms.functional as F
from torch.utils.data import Dataset
from PIL import Image
import random
class ClusterDataset(Dataset):
def __init__(self, cluster_list, dir_name, transform=None):
self.file_list = []
self.transform = transform
for cluster_num in cluster_list:
self.file_list.extend(glob.glob('../data/{}/{}/*.png'.format(dir_name, cluster_num)))
def __len__(self):
return len(self.file_list)
def __getitem__(self, idx):
image = io.imread(self.file_list[idx]) / 255.0
if self.transform:
image = self.transform(np.stack([image])).squeeze()
return image
class RandomRotate(object):
def __call__(self, images):
rotated = np.stack([self.random_rotate(x) for x in images])
return rotated
def random_rotate(self, image):
rand_num = np.random.randint(0, 4)
if rand_num == 0:
return np.rot90(image, k=1, axes=(0, 1))
elif rand_num == 1:
return np.rot90(image, k=2, axes=(0, 1))
elif rand_num == 2:
return np.rot90(image, k=3, axes=(0, 1))
else:
return image
class Normalize(object):
def __init__(self, mean, std, inplace=False):
self.mean = mean
self.std = std
self.inplace = inplace
def __call__(self, images):
normalized = np.stack([F.normalize(x, self.mean, self.std, self.inplace) for x in images])
return normalized
class Grayscale(object):
def __init__(self, prob = 1):
self.prob = prob
def __call__(self, images):
random_num = np.random.randint(100, size=1)[0]
if random_num <= self.prob * 100:
gray_images = (images[:, 0, :, :] + images[:, 1, :, :] + images[:, 2, :, :]) / 3
gray_scaled = gray_images.unsqueeze(1).repeat(1, 3, 1, 1)
return gray_scaled
else:
return images
class ToTensor(object):
def __call__(self, images):
images = images.transpose((0, 3, 1, 2))
return torch.from_numpy(images).float()
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count | true | true |
f737bca3cf5991e819f9ca0c711d9f13118a3909 | 378 | py | Python | tests/admin_client/test_get_server.py | constructpm/pysyncgateway | 653db702b2f872e18fa15ab41920276ffc07aa45 | [
"Apache-2.0"
] | 2 | 2018-04-04T17:13:25.000Z | 2018-07-21T13:30:42.000Z | tests/admin_client/test_get_server.py | constructpm/pysyncgateway | 653db702b2f872e18fa15ab41920276ffc07aa45 | [
"Apache-2.0"
] | 14 | 2018-03-22T11:35:28.000Z | 2021-11-12T17:46:54.000Z | tests/admin_client/test_get_server.py | constructpm/pysyncgateway | 653db702b2f872e18fa15ab41920276ffc07aa45 | [
"Apache-2.0"
] | 1 | 2018-06-15T13:37:00.000Z | 2018-06-15T13:37:00.000Z | def test(admin_client, syncgateway_version_str):
result = admin_client.get_server()
assert isinstance(result, dict)
assert sorted(list(result)) == [
"ADMIN",
"couchdb",
"vendor",
"version",
]
assert result["ADMIN"] is True
assert result["version"].startswith("Couchbase Sync Gateway/{}(".format(syncgateway_version_str))
| 29.076923 | 101 | 0.645503 | def test(admin_client, syncgateway_version_str):
result = admin_client.get_server()
assert isinstance(result, dict)
assert sorted(list(result)) == [
"ADMIN",
"couchdb",
"vendor",
"version",
]
assert result["ADMIN"] is True
assert result["version"].startswith("Couchbase Sync Gateway/{}(".format(syncgateway_version_str))
| true | true |
f737bcb56b7ef2f3a6e20d6da0892e43a2c83a82 | 17,863 | py | Python | env/Lib/site-packages/IPython/core/shellapp.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 2 | 2022-02-26T11:19:40.000Z | 2022-03-28T08:23:25.000Z | env/Lib/site-packages/IPython/core/shellapp.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | null | null | null | env/Lib/site-packages/IPython/core/shellapp.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 1 | 2022-03-28T09:19:34.000Z | 2022-03-28T09:19:34.000Z | # encoding: utf-8
"""
A mixin for :class:`~IPython.core.application.Application` classes that
launch InteractiveShell instances, load extensions, etc.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import glob
from itertools import chain
import os
import sys
from traitlets.config.application import boolean_flag
from traitlets.config.configurable import Configurable
from traitlets.config.loader import Config
from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS
from IPython.core import pylabtools
from IPython.utils.contexts import preserve_keys
from IPython.utils.path import filefind
import traitlets
from traitlets import (
Unicode, Instance, List, Bool, CaselessStrEnum, observe,
DottedObjectName,
)
from IPython.terminal import pt_inputhooks
#-----------------------------------------------------------------------------
# Aliases and Flags
#-----------------------------------------------------------------------------
gui_keys = tuple(sorted(pt_inputhooks.backends) + sorted(pt_inputhooks.aliases))
backend_keys = sorted(pylabtools.backends.keys())
backend_keys.insert(0, 'auto')
shell_flags = {}
addflag = lambda *args: shell_flags.update(boolean_flag(*args))
addflag('autoindent', 'InteractiveShell.autoindent',
'Turn on autoindenting.', 'Turn off autoindenting.'
)
addflag('automagic', 'InteractiveShell.automagic',
"""Turn on the auto calling of magic commands. Type %%magic at the
IPython prompt for more information.""",
'Turn off the auto calling of magic commands.'
)
addflag('pdb', 'InteractiveShell.pdb',
"Enable auto calling the pdb debugger after every exception.",
"Disable auto calling the pdb debugger after every exception."
)
addflag('pprint', 'PlainTextFormatter.pprint',
"Enable auto pretty printing of results.",
"Disable auto pretty printing of results."
)
addflag('color-info', 'InteractiveShell.color_info',
"""IPython can display information about objects via a set of functions,
and optionally can use colors for this, syntax highlighting
source code and various other elements. This is on by default, but can cause
problems with some pagers. If you see such problems, you can disable the
colours.""",
"Disable using colors for info related things."
)
addflag('ignore-cwd', 'InteractiveShellApp.ignore_cwd',
"Exclude the current working directory from sys.path",
"Include the current working directory in sys.path",
)
nosep_config = Config()
nosep_config.InteractiveShell.separate_in = ''
nosep_config.InteractiveShell.separate_out = ''
nosep_config.InteractiveShell.separate_out2 = ''
shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.")
shell_flags['pylab'] = (
{'InteractiveShellApp' : {'pylab' : 'auto'}},
"""Pre-load matplotlib and numpy for interactive use with
the default matplotlib backend."""
)
shell_flags['matplotlib'] = (
{'InteractiveShellApp' : {'matplotlib' : 'auto'}},
"""Configure matplotlib for interactive use with
the default matplotlib backend."""
)
# it's possible we don't want short aliases for *all* of these:
shell_aliases = dict(
autocall='InteractiveShell.autocall',
colors='InteractiveShell.colors',
logfile='InteractiveShell.logfile',
logappend='InteractiveShell.logappend',
c='InteractiveShellApp.code_to_run',
m='InteractiveShellApp.module_to_run',
ext="InteractiveShellApp.extra_extensions",
gui='InteractiveShellApp.gui',
pylab='InteractiveShellApp.pylab',
matplotlib='InteractiveShellApp.matplotlib',
)
shell_aliases['cache-size'] = 'InteractiveShell.cache_size'
#-----------------------------------------------------------------------------
# Main classes and functions
#-----------------------------------------------------------------------------
class InteractiveShellApp(Configurable):
"""A Mixin for applications that start InteractiveShell instances.
Provides configurables for loading extensions and executing files
as part of configuring a Shell environment.
The following methods should be called by the :meth:`initialize` method
of the subclass:
- :meth:`init_path`
- :meth:`init_shell` (to be implemented by the subclass)
- :meth:`init_gui_pylab`
- :meth:`init_extensions`
- :meth:`init_code`
"""
extensions = List(Unicode(),
help="A list of dotted module names of IPython extensions to load."
).tag(config=True)
extra_extensions = List(
DottedObjectName(),
help="""
Dotted module name(s) of one or more IPython extensions to load.
For specifying extra extensions to load on the command-line.
.. versionadded:: 7.10
""",
).tag(config=True)
reraise_ipython_extension_failures = Bool(False,
help="Reraise exceptions encountered loading IPython extensions?",
).tag(config=True)
# Extensions that are always loaded (not configurable)
default_extensions = List(Unicode(), [u'storemagic']).tag(config=False)
hide_initial_ns = Bool(True,
help="""Should variables loaded at startup (by startup files, exec_lines, etc.)
be hidden from tools like %who?"""
).tag(config=True)
exec_files = List(Unicode(),
help="""List of files to run at IPython startup."""
).tag(config=True)
exec_PYTHONSTARTUP = Bool(True,
help="""Run the file referenced by the PYTHONSTARTUP environment
variable at IPython startup."""
).tag(config=True)
file_to_run = Unicode('',
help="""A file to be run""").tag(config=True)
exec_lines = List(Unicode(),
help="""lines of code to run at IPython startup."""
).tag(config=True)
code_to_run = Unicode('',
help="Execute the given command string."
).tag(config=True)
module_to_run = Unicode('',
help="Run the module as a script."
).tag(config=True)
gui = CaselessStrEnum(gui_keys, allow_none=True,
help="Enable GUI event loop integration with any of {0}.".format(gui_keys)
).tag(config=True)
matplotlib = CaselessStrEnum(backend_keys, allow_none=True,
help="""Configure matplotlib for interactive use with
the default matplotlib backend."""
).tag(config=True)
pylab = CaselessStrEnum(backend_keys, allow_none=True,
help="""Pre-load matplotlib and numpy for interactive use,
selecting a particular matplotlib backend and loop integration.
"""
).tag(config=True)
pylab_import_all = Bool(True,
help="""If true, IPython will populate the user namespace with numpy, pylab, etc.
and an ``import *`` is done from numpy and pylab, when using pylab mode.
When False, pylab mode should not import any names into the user namespace.
"""
).tag(config=True)
ignore_cwd = Bool(
False,
help="""If True, IPython will not add the current working directory to sys.path.
When False, the current working directory is added to sys.path, allowing imports
of modules defined in the current directory."""
).tag(config=True)
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
# whether interact-loop should start
interact = Bool(True)
user_ns = Instance(dict, args=None, allow_none=True)
@observe('user_ns')
def _user_ns_changed(self, change):
if self.shell is not None:
self.shell.user_ns = change['new']
self.shell.init_user_ns()
def init_path(self):
"""Add current working directory, '', to sys.path
Unlike Python's default, we insert before the first `site-packages`
or `dist-packages` directory,
so that it is after the standard library.
.. versionchanged:: 7.2
Try to insert after the standard library, instead of first.
.. versionchanged:: 8.0
Allow optionally not including the current directory in sys.path
"""
if '' in sys.path or self.ignore_cwd:
return
for idx, path in enumerate(sys.path):
parent, last_part = os.path.split(path)
if last_part in {'site-packages', 'dist-packages'}:
break
else:
# no site-packages or dist-packages found (?!)
# back to original behavior of inserting at the front
idx = 0
sys.path.insert(idx, '')
def init_shell(self):
raise NotImplementedError("Override in subclasses")
def init_gui_pylab(self):
"""Enable GUI event loop integration, taking pylab into account."""
enable = False
shell = self.shell
if self.pylab:
enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all)
key = self.pylab
elif self.matplotlib:
enable = shell.enable_matplotlib
key = self.matplotlib
elif self.gui:
enable = shell.enable_gui
key = self.gui
if not enable:
return
try:
r = enable(key)
except ImportError:
self.log.warning("Eventloop or matplotlib integration failed. Is matplotlib installed?")
self.shell.showtraceback()
return
except Exception:
self.log.warning("GUI event loop or pylab initialization failed")
self.shell.showtraceback()
return
if isinstance(r, tuple):
gui, backend = r[:2]
self.log.info("Enabling GUI event loop integration, "
"eventloop=%s, matplotlib=%s", gui, backend)
if key == "auto":
print("Using matplotlib backend: %s" % backend)
else:
gui = r
self.log.info("Enabling GUI event loop integration, "
"eventloop=%s", gui)
def init_extensions(self):
"""Load all IPython extensions in IPythonApp.extensions.
This uses the :meth:`ExtensionManager.load_extensions` to load all
the extensions listed in ``self.extensions``.
"""
try:
self.log.debug("Loading IPython extensions...")
extensions = (
self.default_extensions + self.extensions + self.extra_extensions
)
for ext in extensions:
try:
self.log.info("Loading IPython extension: %s" % ext)
self.shell.extension_manager.load_extension(ext)
except:
if self.reraise_ipython_extension_failures:
raise
msg = ("Error in loading extension: {ext}\n"
"Check your config files in {location}".format(
ext=ext,
location=self.profile_dir.location
))
self.log.warning(msg, exc_info=True)
except:
if self.reraise_ipython_extension_failures:
raise
self.log.warning("Unknown error in loading extensions:", exc_info=True)
def init_code(self):
"""run the pre-flight code, specified via exec_lines"""
self._run_startup_files()
self._run_exec_lines()
self._run_exec_files()
# Hide variables defined here from %who etc.
if self.hide_initial_ns:
self.shell.user_ns_hidden.update(self.shell.user_ns)
# command-line execution (ipython -i script.py, ipython -m module)
# should *not* be excluded from %whos
self._run_cmd_line_code()
self._run_module()
# flush output, so itwon't be attached to the first cell
sys.stdout.flush()
sys.stderr.flush()
self.shell._sys_modules_keys = set(sys.modules.keys())
def _run_exec_lines(self):
"""Run lines of code in IPythonApp.exec_lines in the user's namespace."""
if not self.exec_lines:
return
try:
self.log.debug("Running code from IPythonApp.exec_lines...")
for line in self.exec_lines:
try:
self.log.info("Running code in user namespace: %s" %
line)
self.shell.run_cell(line, store_history=False)
except:
self.log.warning("Error in executing line in user "
"namespace: %s" % line)
self.shell.showtraceback()
except:
self.log.warning("Unknown error in handling IPythonApp.exec_lines:")
self.shell.showtraceback()
def _exec_file(self, fname, shell_futures=False):
try:
full_filename = filefind(fname, [u'.', self.ipython_dir])
except IOError:
self.log.warning("File not found: %r"%fname)
return
# Make sure that the running script gets a proper sys.argv as if it
# were run from a system shell.
save_argv = sys.argv
sys.argv = [full_filename] + self.extra_args[1:]
try:
if os.path.isfile(full_filename):
self.log.info("Running file in user namespace: %s" %
full_filename)
# Ensure that __file__ is always defined to match Python
# behavior.
with preserve_keys(self.shell.user_ns, '__file__'):
self.shell.user_ns['__file__'] = fname
if full_filename.endswith('.ipy') or full_filename.endswith('.ipynb'):
self.shell.safe_execfile_ipy(full_filename,
shell_futures=shell_futures)
else:
# default to python, even without extension
self.shell.safe_execfile(full_filename,
self.shell.user_ns,
shell_futures=shell_futures,
raise_exceptions=True)
finally:
sys.argv = save_argv
def _run_startup_files(self):
"""Run files from profile startup directory"""
startup_dirs = [self.profile_dir.startup_dir] + [
os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS)
]
startup_files = []
if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \
not (self.file_to_run or self.code_to_run or self.module_to_run):
python_startup = os.environ['PYTHONSTARTUP']
self.log.debug("Running PYTHONSTARTUP file %s...", python_startup)
try:
self._exec_file(python_startup)
except:
self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup)
self.shell.showtraceback()
for startup_dir in startup_dirs[::-1]:
startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
if not startup_files:
return
self.log.debug("Running startup files from %s...", startup_dir)
try:
for fname in sorted(startup_files):
self._exec_file(fname)
except:
self.log.warning("Unknown error in handling startup files:")
self.shell.showtraceback()
def _run_exec_files(self):
"""Run files from IPythonApp.exec_files"""
if not self.exec_files:
return
self.log.debug("Running files in IPythonApp.exec_files...")
try:
for fname in self.exec_files:
self._exec_file(fname)
except:
self.log.warning("Unknown error in handling IPythonApp.exec_files:")
self.shell.showtraceback()
def _run_cmd_line_code(self):
"""Run code or file specified at the command-line"""
if self.code_to_run:
line = self.code_to_run
try:
self.log.info("Running code given at command line (c=): %s" %
line)
self.shell.run_cell(line, store_history=False)
except:
self.log.warning("Error in executing line in user namespace: %s" %
line)
self.shell.showtraceback()
if not self.interact:
self.exit(1)
# Like Python itself, ignore the second if the first of these is present
elif self.file_to_run:
fname = self.file_to_run
if os.path.isdir(fname):
fname = os.path.join(fname, "__main__.py")
if not os.path.exists(fname):
self.log.warning("File '%s' doesn't exist", fname)
if not self.interact:
self.exit(2)
try:
self._exec_file(fname, shell_futures=True)
except:
self.shell.showtraceback(tb_offset=4)
if not self.interact:
self.exit(1)
def _run_module(self):
"""Run module specified at the command-line."""
if self.module_to_run:
# Make sure that the module gets a proper sys.argv as if it were
# run using `python -m`.
save_argv = sys.argv
sys.argv = [sys.executable] + self.extra_args
try:
self.shell.safe_run_module(self.module_to_run,
self.shell.user_ns)
finally:
sys.argv = save_argv
| 39.432671 | 100 | 0.600459 |
import glob
from itertools import chain
import os
import sys
from traitlets.config.application import boolean_flag
from traitlets.config.configurable import Configurable
from traitlets.config.loader import Config
from IPython.core.application import SYSTEM_CONFIG_DIRS, ENV_CONFIG_DIRS
from IPython.core import pylabtools
from IPython.utils.contexts import preserve_keys
from IPython.utils.path import filefind
import traitlets
from traitlets import (
Unicode, Instance, List, Bool, CaselessStrEnum, observe,
DottedObjectName,
)
from IPython.terminal import pt_inputhooks
gui_keys = tuple(sorted(pt_inputhooks.backends) + sorted(pt_inputhooks.aliases))
backend_keys = sorted(pylabtools.backends.keys())
backend_keys.insert(0, 'auto')
shell_flags = {}
addflag = lambda *args: shell_flags.update(boolean_flag(*args))
addflag('autoindent', 'InteractiveShell.autoindent',
'Turn on autoindenting.', 'Turn off autoindenting.'
)
addflag('automagic', 'InteractiveShell.automagic',
"""Turn on the auto calling of magic commands. Type %%magic at the
IPython prompt for more information.""",
'Turn off the auto calling of magic commands.'
)
addflag('pdb', 'InteractiveShell.pdb',
"Enable auto calling the pdb debugger after every exception.",
"Disable auto calling the pdb debugger after every exception."
)
addflag('pprint', 'PlainTextFormatter.pprint',
"Enable auto pretty printing of results.",
"Disable auto pretty printing of results."
)
addflag('color-info', 'InteractiveShell.color_info',
"""IPython can display information about objects via a set of functions,
and optionally can use colors for this, syntax highlighting
source code and various other elements. This is on by default, but can cause
problems with some pagers. If you see such problems, you can disable the
colours.""",
"Disable using colors for info related things."
)
addflag('ignore-cwd', 'InteractiveShellApp.ignore_cwd',
"Exclude the current working directory from sys.path",
"Include the current working directory in sys.path",
)
nosep_config = Config()
nosep_config.InteractiveShell.separate_in = ''
nosep_config.InteractiveShell.separate_out = ''
nosep_config.InteractiveShell.separate_out2 = ''
shell_flags['nosep']=(nosep_config, "Eliminate all spacing between prompts.")
shell_flags['pylab'] = (
{'InteractiveShellApp' : {'pylab' : 'auto'}},
"""Pre-load matplotlib and numpy for interactive use with
the default matplotlib backend."""
)
shell_flags['matplotlib'] = (
{'InteractiveShellApp' : {'matplotlib' : 'auto'}},
"""Configure matplotlib for interactive use with
the default matplotlib backend."""
)
shell_aliases = dict(
autocall='InteractiveShell.autocall',
colors='InteractiveShell.colors',
logfile='InteractiveShell.logfile',
logappend='InteractiveShell.logappend',
c='InteractiveShellApp.code_to_run',
m='InteractiveShellApp.module_to_run',
ext="InteractiveShellApp.extra_extensions",
gui='InteractiveShellApp.gui',
pylab='InteractiveShellApp.pylab',
matplotlib='InteractiveShellApp.matplotlib',
)
shell_aliases['cache-size'] = 'InteractiveShell.cache_size'
class InteractiveShellApp(Configurable):
extensions = List(Unicode(),
help="A list of dotted module names of IPython extensions to load."
).tag(config=True)
extra_extensions = List(
DottedObjectName(),
help="""
Dotted module name(s) of one or more IPython extensions to load.
For specifying extra extensions to load on the command-line.
.. versionadded:: 7.10
""",
).tag(config=True)
reraise_ipython_extension_failures = Bool(False,
help="Reraise exceptions encountered loading IPython extensions?",
).tag(config=True)
default_extensions = List(Unicode(), [u'storemagic']).tag(config=False)
hide_initial_ns = Bool(True,
help="""Should variables loaded at startup (by startup files, exec_lines, etc.)
be hidden from tools like %who?"""
).tag(config=True)
exec_files = List(Unicode(),
help="""List of files to run at IPython startup."""
).tag(config=True)
exec_PYTHONSTARTUP = Bool(True,
help="""Run the file referenced by the PYTHONSTARTUP environment
variable at IPython startup."""
).tag(config=True)
file_to_run = Unicode('',
help="""A file to be run""").tag(config=True)
exec_lines = List(Unicode(),
help="""lines of code to run at IPython startup."""
).tag(config=True)
code_to_run = Unicode('',
help="Execute the given command string."
).tag(config=True)
module_to_run = Unicode('',
help="Run the module as a script."
).tag(config=True)
gui = CaselessStrEnum(gui_keys, allow_none=True,
help="Enable GUI event loop integration with any of {0}.".format(gui_keys)
).tag(config=True)
matplotlib = CaselessStrEnum(backend_keys, allow_none=True,
help="""Configure matplotlib for interactive use with
the default matplotlib backend."""
).tag(config=True)
pylab = CaselessStrEnum(backend_keys, allow_none=True,
help="""Pre-load matplotlib and numpy for interactive use,
selecting a particular matplotlib backend and loop integration.
"""
).tag(config=True)
pylab_import_all = Bool(True,
help="""If true, IPython will populate the user namespace with numpy, pylab, etc.
and an ``import *`` is done from numpy and pylab, when using pylab mode.
When False, pylab mode should not import any names into the user namespace.
"""
).tag(config=True)
ignore_cwd = Bool(
False,
help="""If True, IPython will not add the current working directory to sys.path.
When False, the current working directory is added to sys.path, allowing imports
of modules defined in the current directory."""
).tag(config=True)
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
allow_none=True)
interact = Bool(True)
user_ns = Instance(dict, args=None, allow_none=True)
@observe('user_ns')
def _user_ns_changed(self, change):
if self.shell is not None:
self.shell.user_ns = change['new']
self.shell.init_user_ns()
def init_path(self):
if '' in sys.path or self.ignore_cwd:
return
for idx, path in enumerate(sys.path):
parent, last_part = os.path.split(path)
if last_part in {'site-packages', 'dist-packages'}:
break
else:
idx = 0
sys.path.insert(idx, '')
def init_shell(self):
raise NotImplementedError("Override in subclasses")
def init_gui_pylab(self):
enable = False
shell = self.shell
if self.pylab:
enable = lambda key: shell.enable_pylab(key, import_all=self.pylab_import_all)
key = self.pylab
elif self.matplotlib:
enable = shell.enable_matplotlib
key = self.matplotlib
elif self.gui:
enable = shell.enable_gui
key = self.gui
if not enable:
return
try:
r = enable(key)
except ImportError:
self.log.warning("Eventloop or matplotlib integration failed. Is matplotlib installed?")
self.shell.showtraceback()
return
except Exception:
self.log.warning("GUI event loop or pylab initialization failed")
self.shell.showtraceback()
return
if isinstance(r, tuple):
gui, backend = r[:2]
self.log.info("Enabling GUI event loop integration, "
"eventloop=%s, matplotlib=%s", gui, backend)
if key == "auto":
print("Using matplotlib backend: %s" % backend)
else:
gui = r
self.log.info("Enabling GUI event loop integration, "
"eventloop=%s", gui)
def init_extensions(self):
try:
self.log.debug("Loading IPython extensions...")
extensions = (
self.default_extensions + self.extensions + self.extra_extensions
)
for ext in extensions:
try:
self.log.info("Loading IPython extension: %s" % ext)
self.shell.extension_manager.load_extension(ext)
except:
if self.reraise_ipython_extension_failures:
raise
msg = ("Error in loading extension: {ext}\n"
"Check your config files in {location}".format(
ext=ext,
location=self.profile_dir.location
))
self.log.warning(msg, exc_info=True)
except:
if self.reraise_ipython_extension_failures:
raise
self.log.warning("Unknown error in loading extensions:", exc_info=True)
def init_code(self):
self._run_startup_files()
self._run_exec_lines()
self._run_exec_files()
if self.hide_initial_ns:
self.shell.user_ns_hidden.update(self.shell.user_ns)
self._run_cmd_line_code()
self._run_module()
sys.stdout.flush()
sys.stderr.flush()
self.shell._sys_modules_keys = set(sys.modules.keys())
def _run_exec_lines(self):
if not self.exec_lines:
return
try:
self.log.debug("Running code from IPythonApp.exec_lines...")
for line in self.exec_lines:
try:
self.log.info("Running code in user namespace: %s" %
line)
self.shell.run_cell(line, store_history=False)
except:
self.log.warning("Error in executing line in user "
"namespace: %s" % line)
self.shell.showtraceback()
except:
self.log.warning("Unknown error in handling IPythonApp.exec_lines:")
self.shell.showtraceback()
def _exec_file(self, fname, shell_futures=False):
try:
full_filename = filefind(fname, [u'.', self.ipython_dir])
except IOError:
self.log.warning("File not found: %r"%fname)
return
# Make sure that the running script gets a proper sys.argv as if it
# were run from a system shell.
save_argv = sys.argv
sys.argv = [full_filename] + self.extra_args[1:]
try:
if os.path.isfile(full_filename):
self.log.info("Running file in user namespace: %s" %
full_filename)
# Ensure that __file__ is always defined to match Python
# behavior.
with preserve_keys(self.shell.user_ns, '__file__'):
self.shell.user_ns['__file__'] = fname
if full_filename.endswith('.ipy') or full_filename.endswith('.ipynb'):
self.shell.safe_execfile_ipy(full_filename,
shell_futures=shell_futures)
else:
# default to python, even without extension
self.shell.safe_execfile(full_filename,
self.shell.user_ns,
shell_futures=shell_futures,
raise_exceptions=True)
finally:
sys.argv = save_argv
def _run_startup_files(self):
startup_dirs = [self.profile_dir.startup_dir] + [
os.path.join(p, 'startup') for p in chain(ENV_CONFIG_DIRS, SYSTEM_CONFIG_DIRS)
]
startup_files = []
if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \
not (self.file_to_run or self.code_to_run or self.module_to_run):
python_startup = os.environ['PYTHONSTARTUP']
self.log.debug("Running PYTHONSTARTUP file %s...", python_startup)
try:
self._exec_file(python_startup)
except:
self.log.warning("Unknown error in handling PYTHONSTARTUP file %s:", python_startup)
self.shell.showtraceback()
for startup_dir in startup_dirs[::-1]:
startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
if not startup_files:
return
self.log.debug("Running startup files from %s...", startup_dir)
try:
for fname in sorted(startup_files):
self._exec_file(fname)
except:
self.log.warning("Unknown error in handling startup files:")
self.shell.showtraceback()
def _run_exec_files(self):
if not self.exec_files:
return
self.log.debug("Running files in IPythonApp.exec_files...")
try:
for fname in self.exec_files:
self._exec_file(fname)
except:
self.log.warning("Unknown error in handling IPythonApp.exec_files:")
self.shell.showtraceback()
def _run_cmd_line_code(self):
if self.code_to_run:
line = self.code_to_run
try:
self.log.info("Running code given at command line (c=): %s" %
line)
self.shell.run_cell(line, store_history=False)
except:
self.log.warning("Error in executing line in user namespace: %s" %
line)
self.shell.showtraceback()
if not self.interact:
self.exit(1)
# Like Python itself, ignore the second if the first of these is present
elif self.file_to_run:
fname = self.file_to_run
if os.path.isdir(fname):
fname = os.path.join(fname, "__main__.py")
if not os.path.exists(fname):
self.log.warning("File '%s' doesn't exist", fname)
if not self.interact:
self.exit(2)
try:
self._exec_file(fname, shell_futures=True)
except:
self.shell.showtraceback(tb_offset=4)
if not self.interact:
self.exit(1)
def _run_module(self):
if self.module_to_run:
save_argv = sys.argv
sys.argv = [sys.executable] + self.extra_args
try:
self.shell.safe_run_module(self.module_to_run,
self.shell.user_ns)
finally:
sys.argv = save_argv
| true | true |
f737be41539176acb9dc0dc3f1d6dc4462e252f6 | 738 | py | Python | setup.py | IgnacioGarrido/Gavl | 453fa0e1da5f43696cb4f943e749a8bfd16c6f87 | [
"MIT"
] | null | null | null | setup.py | IgnacioGarrido/Gavl | 453fa0e1da5f43696cb4f943e749a8bfd16c6f87 | [
"MIT"
] | null | null | null | setup.py | IgnacioGarrido/Gavl | 453fa0e1da5f43696cb4f943e749a8bfd16c6f87 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="Gavl-Ignacio-Garrido-Botella", # Replace with your own username
version="1.0.1",
author="Ignacio Garrido Botella",
author_email="ignaciogabo95@gmail.com",
description="Framework to launch a genetic algorithm with chromosomes with variable length.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/IgnacioGarrido/Gavl.git",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
) | 35.142857 | 97 | 0.693767 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="Gavl-Ignacio-Garrido-Botella",
version="1.0.1",
author="Ignacio Garrido Botella",
author_email="ignaciogabo95@gmail.com",
description="Framework to launch a genetic algorithm with chromosomes with variable length.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/IgnacioGarrido/Gavl.git",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
) | true | true |
f737bf2f8e1560b7e9d9155861d2bbfdb2f267f1 | 9,445 | py | Python | vmraid/parallel_test_runner.py | sowrisurya/vmraid | f833e00978019dad87af80b41279c0146c063ed5 | [
"MIT"
] | null | null | null | vmraid/parallel_test_runner.py | sowrisurya/vmraid | f833e00978019dad87af80b41279c0146c063ed5 | [
"MIT"
] | null | null | null | vmraid/parallel_test_runner.py | sowrisurya/vmraid | f833e00978019dad87af80b41279c0146c063ed5 | [
"MIT"
] | null | null | null | import json
import os
import re
import sys
import time
import unittest
import click
import vmraid
import requests
from .test_runner import (SLOW_TEST_THRESHOLD, make_test_records, set_test_email_config)
click_ctx = click.get_current_context(True)
if click_ctx:
click_ctx.color = True
class ParallelTestRunner():
def __init__(self, app, site, build_number=1, total_builds=1, with_coverage=False):
self.app = app
self.site = site
self.with_coverage = with_coverage
self.build_number = vmraid.utils.cint(build_number) or 1
self.total_builds = vmraid.utils.cint(total_builds)
self.setup_test_site()
self.run_tests()
def setup_test_site(self):
vmraid.init(site=self.site)
if not vmraid.db:
vmraid.connect()
vmraid.flags.in_test = True
vmraid.clear_cache()
vmraid.utils.scheduler.disable_scheduler()
set_test_email_config()
self.before_test_setup()
def before_test_setup(self):
start_time = time.time()
for fn in vmraid.get_hooks("before_tests", app_name=self.app):
vmraid.get_attr(fn)()
test_module = vmraid.get_module(f'{self.app}.tests')
if hasattr(test_module, "global_test_dependencies"):
for doctype in test_module.global_test_dependencies:
make_test_records(doctype)
elapsed = time.time() - start_time
elapsed = click.style(f' ({elapsed:.03}s)', fg='red')
click.echo(f'Before Test {elapsed}')
def run_tests(self):
self.test_result = ParallelTestResult(stream=sys.stderr, descriptions=True, verbosity=2)
self.start_coverage()
for test_file_info in self.get_test_file_list():
self.run_tests_for_file(test_file_info)
self.save_coverage()
self.print_result()
def run_tests_for_file(self, file_info):
if not file_info: return
vmraid.set_user('Administrator')
path, filename = file_info
module = self.get_module(path, filename)
self.create_test_dependency_records(module, path, filename)
test_suite = unittest.TestSuite()
module_test_cases = unittest.TestLoader().loadTestsFromModule(module)
test_suite.addTest(module_test_cases)
test_suite(self.test_result)
def create_test_dependency_records(self, module, path, filename):
if hasattr(module, "test_dependencies"):
for doctype in module.test_dependencies:
make_test_records(doctype)
if os.path.basename(os.path.dirname(path)) == "doctype":
# test_data_migration_connector.py > data_migration_connector.json
test_record_filename = re.sub('^test_', '', filename).replace(".py", ".json")
test_record_file_path = os.path.join(path, test_record_filename)
if os.path.exists(test_record_file_path):
with open(test_record_file_path, 'r') as f:
doc = json.loads(f.read())
doctype = doc["name"]
make_test_records(doctype)
def get_module(self, path, filename):
app_path = vmraid.get_pymodule_path(self.app)
relative_path = os.path.relpath(path, app_path)
if relative_path == '.':
module_name = self.app
else:
relative_path = relative_path.replace('/', '.')
module_name = os.path.splitext(filename)[0]
module_name = f'{self.app}.{relative_path}.{module_name}'
return vmraid.get_module(module_name)
def print_result(self):
self.test_result.printErrors()
click.echo(self.test_result)
if self.test_result.failures or self.test_result.errors:
if os.environ.get('CI'):
sys.exit(1)
def start_coverage(self):
if self.with_coverage:
from coverage import Coverage
from vmraid.utils import get_bench_path
# Generate coverage report only for app that is being tested
source_path = os.path.join(get_bench_path(), 'apps', self.app)
omit=['*.html', '*.js', '*.xml', '*.css', '*.less', '*.scss',
'*.vue', '*/doctype/*/*_dashboard.py', '*/patches/*']
if self.app == 'vmraid':
omit.append('*/commands/*')
self.coverage = Coverage(source=[source_path], omit=omit)
self.coverage.start()
def save_coverage(self):
if not self.with_coverage:
return
self.coverage.stop()
self.coverage.save()
def get_test_file_list(self):
test_list = get_all_tests(self.app)
split_size = vmraid.utils.ceil(len(test_list) / self.total_builds)
# [1,2,3,4,5,6] to [[1,2], [3,4], [4,6]] if split_size is 2
test_chunks = [test_list[x:x+split_size] for x in range(0, len(test_list), split_size)]
return test_chunks[self.build_number - 1]
class ParallelTestResult(unittest.TextTestResult):
def startTest(self, test):
self._started_at = time.time()
super(unittest.TextTestResult, self).startTest(test)
test_class = unittest.util.strclass(test.__class__)
if not hasattr(self, 'current_test_class') or self.current_test_class != test_class:
click.echo(f"\n{unittest.util.strclass(test.__class__)}")
self.current_test_class = test_class
def getTestMethodName(self, test):
return test._testMethodName if hasattr(test, '_testMethodName') else str(test)
def addSuccess(self, test):
super(unittest.TextTestResult, self).addSuccess(test)
elapsed = time.time() - self._started_at
threshold_passed = elapsed >= SLOW_TEST_THRESHOLD
elapsed = click.style(f' ({elapsed:.03}s)', fg='red') if threshold_passed else ''
click.echo(f" {click.style(' ✔ ', fg='green')} {self.getTestMethodName(test)}{elapsed}")
def addError(self, test, err):
super(unittest.TextTestResult, self).addError(test, err)
click.echo(f" {click.style(' ✖ ', fg='red')} {self.getTestMethodName(test)}")
def addFailure(self, test, err):
super(unittest.TextTestResult, self).addFailure(test, err)
click.echo(f" {click.style(' ✖ ', fg='red')} {self.getTestMethodName(test)}")
def addSkip(self, test, reason):
super(unittest.TextTestResult, self).addSkip(test, reason)
click.echo(f" {click.style(' = ', fg='white')} {self.getTestMethodName(test)}")
def addExpectedFailure(self, test, err):
super(unittest.TextTestResult, self).addExpectedFailure(test, err)
click.echo(f" {click.style(' ✖ ', fg='red')} {self.getTestMethodName(test)}")
def addUnexpectedSuccess(self, test):
super(unittest.TextTestResult, self).addUnexpectedSuccess(test)
click.echo(f" {click.style(' ✔ ', fg='green')} {self.getTestMethodName(test)}")
def printErrors(self):
click.echo('\n')
self.printErrorList(' ERROR ', self.errors, 'red')
self.printErrorList(' FAIL ', self.failures, 'red')
def printErrorList(self, flavour, errors, color):
for test, err in errors:
click.echo(self.separator1)
click.echo(f"{click.style(flavour, bg=color)} {self.getDescription(test)}")
click.echo(self.separator2)
click.echo(err)
def __str__(self):
return f"Tests: {self.testsRun}, Failing: {len(self.failures)}, Errors: {len(self.errors)}"
def get_all_tests(app):
test_file_list = []
for path, folders, files in os.walk(vmraid.get_pymodule_path(app)):
for dontwalk in ('locals', '.git', 'public', '__pycache__'):
if dontwalk in folders:
folders.remove(dontwalk)
# for predictability
folders.sort()
files.sort()
if os.path.sep.join(["doctype", "doctype", "boilerplate"]) in path:
# in /doctype/doctype/boilerplate/
continue
for filename in files:
if filename.startswith("test_") and filename.endswith(".py") \
and filename != 'test_runner.py':
test_file_list.append([path, filename])
return test_file_list
class ParallelTestWithOrchestrator(ParallelTestRunner):
'''
This can be used to balance-out test time across multiple instances
This is dependent on external orchestrator which returns next test to run
orchestrator endpoints
- register-instance (<build_id>, <instance_id>, test_spec_list)
- get-next-test-spec (<build_id>, <instance_id>)
- test-completed (<build_id>, <instance_id>)
'''
def __init__(self, app, site, with_coverage=False):
self.orchestrator_url = os.environ.get('ORCHESTRATOR_URL')
if not self.orchestrator_url:
click.echo('ORCHESTRATOR_URL environment variable not found!')
click.echo('Pass public URL after hosting https://github.com/vmraid/test-orchestrator')
sys.exit(1)
self.ci_build_id = os.environ.get('CI_BUILD_ID')
self.ci_instance_id = os.environ.get('CI_INSTANCE_ID') or vmraid.generate_hash(length=10)
if not self.ci_build_id:
click.echo('CI_BUILD_ID environment variable not found!')
sys.exit(1)
ParallelTestRunner.__init__(self, app, site, with_coverage=with_coverage)
def run_tests(self):
self.test_status = 'ongoing'
self.register_instance()
super().run_tests()
def get_test_file_list(self):
while self.test_status == 'ongoing':
yield self.get_next_test()
def register_instance(self):
test_spec_list = get_all_tests(self.app)
response_data = self.call_orchestrator('register-instance', data={
'test_spec_list': test_spec_list
})
self.is_master = response_data.get('is_master')
def get_next_test(self):
response_data = self.call_orchestrator('get-next-test-spec')
self.test_status = response_data.get('status')
return response_data.get('next_test')
def print_result(self):
self.call_orchestrator('test-completed')
return super().print_result()
def call_orchestrator(self, endpoint, data={}):
# add repo token header
# build id in header
headers = {
'CI-BUILD-ID': self.ci_build_id,
'CI-INSTANCE-ID': self.ci_instance_id,
'REPO-TOKEN': '2948288382838DE'
}
url = f'{self.orchestrator_url}/{endpoint}'
res = requests.get(url, json=data, headers=headers)
res.raise_for_status()
response_data = {}
if 'application/json' in res.headers.get('content-type'):
response_data = res.json()
return response_data
| 33.374558 | 93 | 0.730334 | import json
import os
import re
import sys
import time
import unittest
import click
import vmraid
import requests
from .test_runner import (SLOW_TEST_THRESHOLD, make_test_records, set_test_email_config)
click_ctx = click.get_current_context(True)
if click_ctx:
click_ctx.color = True
class ParallelTestRunner():
def __init__(self, app, site, build_number=1, total_builds=1, with_coverage=False):
self.app = app
self.site = site
self.with_coverage = with_coverage
self.build_number = vmraid.utils.cint(build_number) or 1
self.total_builds = vmraid.utils.cint(total_builds)
self.setup_test_site()
self.run_tests()
def setup_test_site(self):
vmraid.init(site=self.site)
if not vmraid.db:
vmraid.connect()
vmraid.flags.in_test = True
vmraid.clear_cache()
vmraid.utils.scheduler.disable_scheduler()
set_test_email_config()
self.before_test_setup()
def before_test_setup(self):
start_time = time.time()
for fn in vmraid.get_hooks("before_tests", app_name=self.app):
vmraid.get_attr(fn)()
test_module = vmraid.get_module(f'{self.app}.tests')
if hasattr(test_module, "global_test_dependencies"):
for doctype in test_module.global_test_dependencies:
make_test_records(doctype)
elapsed = time.time() - start_time
elapsed = click.style(f' ({elapsed:.03}s)', fg='red')
click.echo(f'Before Test {elapsed}')
def run_tests(self):
self.test_result = ParallelTestResult(stream=sys.stderr, descriptions=True, verbosity=2)
self.start_coverage()
for test_file_info in self.get_test_file_list():
self.run_tests_for_file(test_file_info)
self.save_coverage()
self.print_result()
def run_tests_for_file(self, file_info):
if not file_info: return
vmraid.set_user('Administrator')
path, filename = file_info
module = self.get_module(path, filename)
self.create_test_dependency_records(module, path, filename)
test_suite = unittest.TestSuite()
module_test_cases = unittest.TestLoader().loadTestsFromModule(module)
test_suite.addTest(module_test_cases)
test_suite(self.test_result)
def create_test_dependency_records(self, module, path, filename):
if hasattr(module, "test_dependencies"):
for doctype in module.test_dependencies:
make_test_records(doctype)
if os.path.basename(os.path.dirname(path)) == "doctype":
test_record_filename = re.sub('^test_', '', filename).replace(".py", ".json")
test_record_file_path = os.path.join(path, test_record_filename)
if os.path.exists(test_record_file_path):
with open(test_record_file_path, 'r') as f:
doc = json.loads(f.read())
doctype = doc["name"]
make_test_records(doctype)
def get_module(self, path, filename):
app_path = vmraid.get_pymodule_path(self.app)
relative_path = os.path.relpath(path, app_path)
if relative_path == '.':
module_name = self.app
else:
relative_path = relative_path.replace('/', '.')
module_name = os.path.splitext(filename)[0]
module_name = f'{self.app}.{relative_path}.{module_name}'
return vmraid.get_module(module_name)
def print_result(self):
self.test_result.printErrors()
click.echo(self.test_result)
if self.test_result.failures or self.test_result.errors:
if os.environ.get('CI'):
sys.exit(1)
def start_coverage(self):
if self.with_coverage:
from coverage import Coverage
from vmraid.utils import get_bench_path
source_path = os.path.join(get_bench_path(), 'apps', self.app)
omit=['*.html', '*.js', '*.xml', '*.css', '*.less', '*.scss',
'*.vue', '*/doctype/*/*_dashboard.py', '*/patches/*']
if self.app == 'vmraid':
omit.append('*/commands/*')
self.coverage = Coverage(source=[source_path], omit=omit)
self.coverage.start()
def save_coverage(self):
if not self.with_coverage:
return
self.coverage.stop()
self.coverage.save()
def get_test_file_list(self):
test_list = get_all_tests(self.app)
split_size = vmraid.utils.ceil(len(test_list) / self.total_builds)
test_chunks = [test_list[x:x+split_size] for x in range(0, len(test_list), split_size)]
return test_chunks[self.build_number - 1]
class ParallelTestResult(unittest.TextTestResult):
def startTest(self, test):
self._started_at = time.time()
super(unittest.TextTestResult, self).startTest(test)
test_class = unittest.util.strclass(test.__class__)
if not hasattr(self, 'current_test_class') or self.current_test_class != test_class:
click.echo(f"\n{unittest.util.strclass(test.__class__)}")
self.current_test_class = test_class
def getTestMethodName(self, test):
return test._testMethodName if hasattr(test, '_testMethodName') else str(test)
def addSuccess(self, test):
super(unittest.TextTestResult, self).addSuccess(test)
elapsed = time.time() - self._started_at
threshold_passed = elapsed >= SLOW_TEST_THRESHOLD
elapsed = click.style(f' ({elapsed:.03}s)', fg='red') if threshold_passed else ''
click.echo(f" {click.style(' ✔ ', fg='green')} {self.getTestMethodName(test)}{elapsed}")
def addError(self, test, err):
super(unittest.TextTestResult, self).addError(test, err)
click.echo(f" {click.style(' ✖ ', fg='red')} {self.getTestMethodName(test)}")
def addFailure(self, test, err):
super(unittest.TextTestResult, self).addFailure(test, err)
click.echo(f" {click.style(' ✖ ', fg='red')} {self.getTestMethodName(test)}")
def addSkip(self, test, reason):
super(unittest.TextTestResult, self).addSkip(test, reason)
click.echo(f" {click.style(' = ', fg='white')} {self.getTestMethodName(test)}")
def addExpectedFailure(self, test, err):
super(unittest.TextTestResult, self).addExpectedFailure(test, err)
click.echo(f" {click.style(' ✖ ', fg='red')} {self.getTestMethodName(test)}")
def addUnexpectedSuccess(self, test):
super(unittest.TextTestResult, self).addUnexpectedSuccess(test)
click.echo(f" {click.style(' ✔ ', fg='green')} {self.getTestMethodName(test)}")
def printErrors(self):
click.echo('\n')
self.printErrorList(' ERROR ', self.errors, 'red')
self.printErrorList(' FAIL ', self.failures, 'red')
def printErrorList(self, flavour, errors, color):
for test, err in errors:
click.echo(self.separator1)
click.echo(f"{click.style(flavour, bg=color)} {self.getDescription(test)}")
click.echo(self.separator2)
click.echo(err)
def __str__(self):
return f"Tests: {self.testsRun}, Failing: {len(self.failures)}, Errors: {len(self.errors)}"
def get_all_tests(app):
test_file_list = []
for path, folders, files in os.walk(vmraid.get_pymodule_path(app)):
for dontwalk in ('locals', '.git', 'public', '__pycache__'):
if dontwalk in folders:
folders.remove(dontwalk)
folders.sort()
files.sort()
if os.path.sep.join(["doctype", "doctype", "boilerplate"]) in path:
continue
for filename in files:
if filename.startswith("test_") and filename.endswith(".py") \
and filename != 'test_runner.py':
test_file_list.append([path, filename])
return test_file_list
class ParallelTestWithOrchestrator(ParallelTestRunner):
def __init__(self, app, site, with_coverage=False):
self.orchestrator_url = os.environ.get('ORCHESTRATOR_URL')
if not self.orchestrator_url:
click.echo('ORCHESTRATOR_URL environment variable not found!')
click.echo('Pass public URL after hosting https://github.com/vmraid/test-orchestrator')
sys.exit(1)
self.ci_build_id = os.environ.get('CI_BUILD_ID')
self.ci_instance_id = os.environ.get('CI_INSTANCE_ID') or vmraid.generate_hash(length=10)
if not self.ci_build_id:
click.echo('CI_BUILD_ID environment variable not found!')
sys.exit(1)
ParallelTestRunner.__init__(self, app, site, with_coverage=with_coverage)
def run_tests(self):
self.test_status = 'ongoing'
self.register_instance()
super().run_tests()
def get_test_file_list(self):
while self.test_status == 'ongoing':
yield self.get_next_test()
def register_instance(self):
test_spec_list = get_all_tests(self.app)
response_data = self.call_orchestrator('register-instance', data={
'test_spec_list': test_spec_list
})
self.is_master = response_data.get('is_master')
def get_next_test(self):
response_data = self.call_orchestrator('get-next-test-spec')
self.test_status = response_data.get('status')
return response_data.get('next_test')
def print_result(self):
self.call_orchestrator('test-completed')
return super().print_result()
def call_orchestrator(self, endpoint, data={}):
headers = {
'CI-BUILD-ID': self.ci_build_id,
'CI-INSTANCE-ID': self.ci_instance_id,
'REPO-TOKEN': '2948288382838DE'
}
url = f'{self.orchestrator_url}/{endpoint}'
res = requests.get(url, json=data, headers=headers)
res.raise_for_status()
response_data = {}
if 'application/json' in res.headers.get('content-type'):
response_data = res.json()
return response_data
| true | true |
f737bfbed384e68bf72ef3708819341f6519d1f2 | 11,841 | py | Python | python/hsfs/connection.py | moritzmeister/feature-store-api-old | 8d4733bb35627d25b6f877dff451e71467171f6e | [
"Apache-2.0"
] | null | null | null | python/hsfs/connection.py | moritzmeister/feature-store-api-old | 8d4733bb35627d25b6f877dff451e71467171f6e | [
"Apache-2.0"
] | null | null | null | python/hsfs/connection.py | moritzmeister/feature-store-api-old | 8d4733bb35627d25b6f877dff451e71467171f6e | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2020 Logical Clocks AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from requests.exceptions import ConnectionError
from hsfs.decorators import connected, not_connected
from hsfs import engine, client
from hsfs.core import feature_store_api, project_api, hosts_api, services_api
class Connection:
AWS_DEFAULT_REGION = "default"
HOPSWORKS_PORT_DEFAULT = 443
SECRETS_STORE_DEFAULT = "parameterstore"
HOSTNAME_VERIFICATION_DEFAULT = True
CERT_FOLDER_DEFAULT = "hops"
def __init__(
self,
host=None,
port=None,
project=None,
region_name=None,
secrets_store=None,
hostname_verification=None,
trust_store_path=None,
cert_folder=None,
api_key_file=None,
):
self._host = host
self._port = port or self.HOPSWORKS_PORT_DEFAULT
self._project = project
self._region_name = region_name or self.AWS_DEFAULT_REGION
self._secrets_store = secrets_store or self.SECRETS_STORE_DEFAULT
self._hostname_verification = (
hostname_verification or self.HOSTNAME_VERIFICATION_DEFAULT
)
self._trust_store_path = trust_store_path
self._cert_folder = cert_folder or self.CERT_FOLDER_DEFAULT
self._api_key_file = api_key_file
self._connected = False
self.connect()
@classmethod
def connection(
cls,
host=None,
port=None,
project=None,
region_name=None,
secrets_store=None,
hostname_verification=None,
trust_store_path=None,
cert_folder=None,
api_key_file=None,
):
return cls(
host,
port,
project,
region_name,
secrets_store,
hostname_verification,
trust_store_path,
cert_folder,
api_key_file,
)
@classmethod
def setup_databricks(
cls,
host,
project,
port=443,
region_name="default",
secrets_store="parameterstore",
cert_folder="hops",
hostname_verification=True,
trust_store_path=None,
api_key_file=None,
):
connection = cls(
host,
port,
project,
region_name,
secrets_store,
hostname_verification,
trust_store_path,
cert_folder,
api_key_file,
)
dbfs_folder = client.get_instance()._cert_folder_base
os.makedirs(os.path.join(dbfs_folder, "scripts"), exist_ok=True)
connection._get_clients(dbfs_folder)
hive_host = connection._get_hivemetastore_hostname()
connection._write_init_script(dbfs_folder)
connection._print_instructions(
cert_folder, client.get_instance()._cert_folder, hive_host
)
return connection
@not_connected
def connect(self):
self._connected = True
try:
if client.base.Client.REST_ENDPOINT not in os.environ:
if os.path.exists("/dbfs/"):
# databricks
client.init(
"external",
self._host,
self._port,
self._project,
self._region_name,
self._secrets_store,
self._hostname_verification,
os.path.join("/dbfs", self._trust_store_path)
if self._trust_store_path is not None
else None,
os.path.join("/dbfs", self._cert_folder),
os.path.join("/dbfs", self._api_key_file)
if self._api_key_file is not None
else None,
)
engine.init("spark")
else:
# aws
client.init(
"external",
self._host,
self._port,
self._project,
self._region_name,
self._secrets_store,
self._hostname_verification,
self._trust_store_path,
self._cert_folder,
self._api_key_file,
)
engine.init(
"hive",
self._host,
self._cert_folder,
self._project,
client.get_instance()._cert_key,
)
else:
client.init("hopsworks")
engine.init("spark")
self._feature_store_api = feature_store_api.FeatureStoreApi()
self._project_api = project_api.ProjectApi()
self._hosts_api = hosts_api.HostsApi()
self._services_api = services_api.ServicesApi()
except (TypeError, ConnectionError):
self._connected = False
raise
print("Connected. Call `.close()` to terminate connection gracefully.")
def close(self):
client.stop()
self._feature_store_api = None
engine.stop()
self._connected = False
print("Connection closed.")
@connected
def get_feature_store(self, name=None):
"""Get a reference to a feature store, to perform operations on.
Defaulting to the project's default feature store. Shared feature stores can be
retrieved by passing the `name`.
:param name: the name of the feature store, defaults to None
:type name: str, optional
:return: feature store object
:rtype: FeatureStore
"""
if not name:
name = client.get_instance()._project_name + "_featurestore"
return self._feature_store_api.get(name)
def _get_clients(self, dbfs_folder):
"""
Get the client libraries and save them in the dbfs folder.
:param dbfs_folder: the folder in which to save the libraries
:type dbfs_folder: str
"""
client_path = os.path.join(dbfs_folder, "client.tar.gz")
if not os.path.exists(client_path):
client_libs = self._project_api.get_client()
with open(client_path, "wb") as f:
for chunk in client_libs:
f.write(chunk)
def _get_hivemetastore_hostname(self):
"""
Get the internal hostname of the Hopsworks instance.
"""
hosts = self._hosts_api.get()
hivemetastore = self._services_api.get_service("hivemetastore")
hosts = [host for host in hosts if host["id"] == hivemetastore["hostId"]]
return hosts[0]["hostname"]
def _write_init_script(self, dbfs_folder):
"""
Write the init script for databricks clusters to dbfs.
:param dbfs_folder: the folder on dbfs in which to save the script
:type dbfs_foler: str
"""
initScript = """
#!/bin/sh
tar -xvf PATH/client.tar.gz -C /tmp
tar -xvf /tmp/client/apache-hive-*-bin.tar.gz -C /tmp
mv /tmp/apache-hive-*-bin /tmp/apache-hive-bin
chmod -R +xr /tmp/apache-hive-bin
cp /tmp/client/hopsfs-client*.jar /databricks/jars/
"""
script_path = os.path.join(dbfs_folder, "scripts/initScript.sh")
if not os.path.exists(script_path):
initScript = initScript.replace("PATH", dbfs_folder)
with open(script_path, "w") as f:
f.write(initScript)
def _print_instructions(self, user_cert_folder, cert_folder, internal_host):
"""
Print the instructions to set up the hopsfs hive connection on databricks.
:param user_cert_folder: the original user specified cert_folder without `/dbfs/` prefix
:type user_cert_folder: str
:cert_folder: the directory in which the credential were saved, prefixed with `/dbfs/` and `[hostname]`
:type cert_folder: str
:param internal_ip: the internal ip of the hopsworks instance
:type internal_ip: str
"""
instructions = """
In the advanced options of your databricks cluster configuration
add the following path to Init Scripts: dbfs:/{0}/scripts/initScript.sh
add the following to the Spark Config:
spark.hadoop.fs.hopsfs.impl io.hops.hopsfs.client.HopsFileSystem
spark.hadoop.hops.ipc.server.ssl.enabled true
spark.hadoop.hops.ssl.hostname.verifier ALLOW_ALL
spark.hadoop.hops.rpc.socket.factory.class.default io.hops.hadoop.shaded.org.apache.hadoop.net.HopsSSLSocketFactory
spark.hadoop.client.rpc.ssl.enabled.protocol TLSv1.2
spark.hadoop.hops.ssl.keystores.passwd.name {1}/material_passwd
spark.hadoop.hops.ssl.keystore.name {1}/keyStore.jks
spark.hadoop.hops.ssl.trustore.name {1}/trustStore.jks
spark.sql.hive.metastore.jars /tmp/apache-hive-bin/lib/*
spark.hadoop.hive.metastore.uris thrift://{2}:9083
Then save and restart the cluster.
""".format(
user_cert_folder, cert_folder, internal_host
)
print(instructions)
@property
def host(self):
return self._host
@host.setter
@not_connected
def host(self, host):
self._host = host
@property
def port(self):
return self._port
@port.setter
@not_connected
def port(self, port):
self._port = port
@property
def project(self):
return self._project
@project.setter
@not_connected
def project(self, project):
self._project = project
@property
def region_name(self):
return self._region_name
@region_name.setter
@not_connected
def region_name(self, region_name):
self._region_name = region_name
@property
def secrets_store(self):
return self._secrets_store
@secrets_store.setter
@not_connected
def secrets_store(self, secrets_store):
self._secrets_store = secrets_store
@property
def hostname_verification(self):
return self._hostname_verification
@hostname_verification.setter
@not_connected
def hostname_verification(self, hostname_verification):
self._hostname_verification = hostname_verification
@property
def trust_store_path(self):
return self._trust_store_path
@trust_store_path.setter
@not_connected
def trust_store_path(self, trust_store_path):
self._trust_store_path = trust_store_path
@property
def cert_folder(self):
return self._cert_folder
@cert_folder.setter
@not_connected
def cert_folder(self, cert_folder):
self._cert_folder = cert_folder
@property
def api_key_file(self):
return self._api_key_file
@api_key_file.setter
@not_connected
def api_key_file(self, api_key_file):
self._api_key_file = api_key_file
def __enter__(self):
self.connect()
return self
def __exit__(self, type, value, traceback):
self.close()
| 32.089431 | 123 | 0.600034 |
import os
from requests.exceptions import ConnectionError
from hsfs.decorators import connected, not_connected
from hsfs import engine, client
from hsfs.core import feature_store_api, project_api, hosts_api, services_api
class Connection:
AWS_DEFAULT_REGION = "default"
HOPSWORKS_PORT_DEFAULT = 443
SECRETS_STORE_DEFAULT = "parameterstore"
HOSTNAME_VERIFICATION_DEFAULT = True
CERT_FOLDER_DEFAULT = "hops"
def __init__(
self,
host=None,
port=None,
project=None,
region_name=None,
secrets_store=None,
hostname_verification=None,
trust_store_path=None,
cert_folder=None,
api_key_file=None,
):
self._host = host
self._port = port or self.HOPSWORKS_PORT_DEFAULT
self._project = project
self._region_name = region_name or self.AWS_DEFAULT_REGION
self._secrets_store = secrets_store or self.SECRETS_STORE_DEFAULT
self._hostname_verification = (
hostname_verification or self.HOSTNAME_VERIFICATION_DEFAULT
)
self._trust_store_path = trust_store_path
self._cert_folder = cert_folder or self.CERT_FOLDER_DEFAULT
self._api_key_file = api_key_file
self._connected = False
self.connect()
@classmethod
def connection(
cls,
host=None,
port=None,
project=None,
region_name=None,
secrets_store=None,
hostname_verification=None,
trust_store_path=None,
cert_folder=None,
api_key_file=None,
):
return cls(
host,
port,
project,
region_name,
secrets_store,
hostname_verification,
trust_store_path,
cert_folder,
api_key_file,
)
@classmethod
def setup_databricks(
cls,
host,
project,
port=443,
region_name="default",
secrets_store="parameterstore",
cert_folder="hops",
hostname_verification=True,
trust_store_path=None,
api_key_file=None,
):
connection = cls(
host,
port,
project,
region_name,
secrets_store,
hostname_verification,
trust_store_path,
cert_folder,
api_key_file,
)
dbfs_folder = client.get_instance()._cert_folder_base
os.makedirs(os.path.join(dbfs_folder, "scripts"), exist_ok=True)
connection._get_clients(dbfs_folder)
hive_host = connection._get_hivemetastore_hostname()
connection._write_init_script(dbfs_folder)
connection._print_instructions(
cert_folder, client.get_instance()._cert_folder, hive_host
)
return connection
@not_connected
def connect(self):
self._connected = True
try:
if client.base.Client.REST_ENDPOINT not in os.environ:
if os.path.exists("/dbfs/"):
client.init(
"external",
self._host,
self._port,
self._project,
self._region_name,
self._secrets_store,
self._hostname_verification,
os.path.join("/dbfs", self._trust_store_path)
if self._trust_store_path is not None
else None,
os.path.join("/dbfs", self._cert_folder),
os.path.join("/dbfs", self._api_key_file)
if self._api_key_file is not None
else None,
)
engine.init("spark")
else:
client.init(
"external",
self._host,
self._port,
self._project,
self._region_name,
self._secrets_store,
self._hostname_verification,
self._trust_store_path,
self._cert_folder,
self._api_key_file,
)
engine.init(
"hive",
self._host,
self._cert_folder,
self._project,
client.get_instance()._cert_key,
)
else:
client.init("hopsworks")
engine.init("spark")
self._feature_store_api = feature_store_api.FeatureStoreApi()
self._project_api = project_api.ProjectApi()
self._hosts_api = hosts_api.HostsApi()
self._services_api = services_api.ServicesApi()
except (TypeError, ConnectionError):
self._connected = False
raise
print("Connected. Call `.close()` to terminate connection gracefully.")
def close(self):
client.stop()
self._feature_store_api = None
engine.stop()
self._connected = False
print("Connection closed.")
@connected
def get_feature_store(self, name=None):
if not name:
name = client.get_instance()._project_name + "_featurestore"
return self._feature_store_api.get(name)
def _get_clients(self, dbfs_folder):
client_path = os.path.join(dbfs_folder, "client.tar.gz")
if not os.path.exists(client_path):
client_libs = self._project_api.get_client()
with open(client_path, "wb") as f:
for chunk in client_libs:
f.write(chunk)
def _get_hivemetastore_hostname(self):
hosts = self._hosts_api.get()
hivemetastore = self._services_api.get_service("hivemetastore")
hosts = [host for host in hosts if host["id"] == hivemetastore["hostId"]]
return hosts[0]["hostname"]
def _write_init_script(self, dbfs_folder):
initScript = """
#!/bin/sh
tar -xvf PATH/client.tar.gz -C /tmp
tar -xvf /tmp/client/apache-hive-*-bin.tar.gz -C /tmp
mv /tmp/apache-hive-*-bin /tmp/apache-hive-bin
chmod -R +xr /tmp/apache-hive-bin
cp /tmp/client/hopsfs-client*.jar /databricks/jars/
"""
script_path = os.path.join(dbfs_folder, "scripts/initScript.sh")
if not os.path.exists(script_path):
initScript = initScript.replace("PATH", dbfs_folder)
with open(script_path, "w") as f:
f.write(initScript)
def _print_instructions(self, user_cert_folder, cert_folder, internal_host):
instructions = """
In the advanced options of your databricks cluster configuration
add the following path to Init Scripts: dbfs:/{0}/scripts/initScript.sh
add the following to the Spark Config:
spark.hadoop.fs.hopsfs.impl io.hops.hopsfs.client.HopsFileSystem
spark.hadoop.hops.ipc.server.ssl.enabled true
spark.hadoop.hops.ssl.hostname.verifier ALLOW_ALL
spark.hadoop.hops.rpc.socket.factory.class.default io.hops.hadoop.shaded.org.apache.hadoop.net.HopsSSLSocketFactory
spark.hadoop.client.rpc.ssl.enabled.protocol TLSv1.2
spark.hadoop.hops.ssl.keystores.passwd.name {1}/material_passwd
spark.hadoop.hops.ssl.keystore.name {1}/keyStore.jks
spark.hadoop.hops.ssl.trustore.name {1}/trustStore.jks
spark.sql.hive.metastore.jars /tmp/apache-hive-bin/lib/*
spark.hadoop.hive.metastore.uris thrift://{2}:9083
Then save and restart the cluster.
""".format(
user_cert_folder, cert_folder, internal_host
)
print(instructions)
@property
def host(self):
return self._host
@host.setter
@not_connected
def host(self, host):
self._host = host
@property
def port(self):
return self._port
@port.setter
@not_connected
def port(self, port):
self._port = port
@property
def project(self):
return self._project
@project.setter
@not_connected
def project(self, project):
self._project = project
@property
def region_name(self):
return self._region_name
@region_name.setter
@not_connected
def region_name(self, region_name):
self._region_name = region_name
@property
def secrets_store(self):
return self._secrets_store
@secrets_store.setter
@not_connected
def secrets_store(self, secrets_store):
self._secrets_store = secrets_store
@property
def hostname_verification(self):
return self._hostname_verification
@hostname_verification.setter
@not_connected
def hostname_verification(self, hostname_verification):
self._hostname_verification = hostname_verification
@property
def trust_store_path(self):
return self._trust_store_path
@trust_store_path.setter
@not_connected
def trust_store_path(self, trust_store_path):
self._trust_store_path = trust_store_path
@property
def cert_folder(self):
return self._cert_folder
@cert_folder.setter
@not_connected
def cert_folder(self, cert_folder):
self._cert_folder = cert_folder
@property
def api_key_file(self):
return self._api_key_file
@api_key_file.setter
@not_connected
def api_key_file(self, api_key_file):
self._api_key_file = api_key_file
def __enter__(self):
self.connect()
return self
def __exit__(self, type, value, traceback):
self.close()
| true | true |
f737bfe9fadc96bc835acfc826c69421f47a980c | 868 | py | Python | oop-for-loop.py | syedmurad1/OOP-Python | d09627269c12ce901677ec1053bdf565861030d7 | [
"MIT"
] | null | null | null | oop-for-loop.py | syedmurad1/OOP-Python | d09627269c12ce901677ec1053bdf565861030d7 | [
"MIT"
] | null | null | null | oop-for-loop.py | syedmurad1/OOP-Python | d09627269c12ce901677ec1053bdf565861030d7 | [
"MIT"
] | null | null | null | for letter in "Syed":
print(letter)
print("--------------------------------------------------------")
friends=["Tom","Ali"]
for fr in friends:
print(fr)
print("--------------------------------------------------------")
for num in range(4):
print(num)
print("--------------------------------------------------------")
for num in range(1,4):
print(num)
print("--------------------------------------------------------")
number=int(input("enter an integer: "))
for num in range(1,4):
cal=number* num
print(number, "X", num, "=", cal)
print("--------------------------------------------------------")
fr1=["Tom","Ali","M"]
for num in range(len(fr1)): # len(friends)
print(fr1[num])
print("--------------------------------------------------------")
for num in range(5):
if num ==0:
print ("first")
else:
print("not") | 26.30303 | 65 | 0.345622 | for letter in "Syed":
print(letter)
print("--------------------------------------------------------")
friends=["Tom","Ali"]
for fr in friends:
print(fr)
print("--------------------------------------------------------")
for num in range(4):
print(num)
print("--------------------------------------------------------")
for num in range(1,4):
print(num)
print("--------------------------------------------------------")
number=int(input("enter an integer: "))
for num in range(1,4):
cal=number* num
print(number, "X", num, "=", cal)
print("--------------------------------------------------------")
fr1=["Tom","Ali","M"]
for num in range(len(fr1)):
print(fr1[num])
print("--------------------------------------------------------")
for num in range(5):
if num ==0:
print ("first")
else:
print("not") | true | true |
f737c27741b148ed85db27868dcdd835a85afc21 | 2,437 | py | Python | sdk/python/pulumi_aws/waf/regex_pattern_set.py | Charliekenney23/pulumi-aws | 55bd0390160d27350b297834026fee52114a2d41 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/waf/regex_pattern_set.py | Charliekenney23/pulumi-aws | 55bd0390160d27350b297834026fee52114a2d41 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/waf/regex_pattern_set.py | Charliekenney23/pulumi-aws | 55bd0390160d27350b297834026fee52114a2d41 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-03-08T15:05:29.000Z | 2021-03-08T15:05:29.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from .. import utilities, tables
class RegexPatternSet(pulumi.CustomResource):
name: pulumi.Output[str]
"""
The name or description of the Regex Pattern Set.
"""
regex_pattern_strings: pulumi.Output[list]
"""
A list of regular expression (regex) patterns that you want AWS WAF to search for, such as `B[a@]dB[o0]t`.
"""
def __init__(__self__, resource_name, opts=None, name=None, regex_pattern_strings=None, __name__=None, __opts__=None):
"""
Provides a WAF Regex Pattern Set Resource
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: The name or description of the Regex Pattern Set.
:param pulumi.Input[list] regex_pattern_strings: A list of regular expression (regex) patterns that you want AWS WAF to search for, such as `B[a@]dB[o0]t`.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if not resource_name:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(resource_name, str):
raise TypeError('Expected resource name to be a string')
if opts and not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
__props__['name'] = name
__props__['regex_pattern_strings'] = regex_pattern_strings
super(RegexPatternSet, __self__).__init__(
'aws:waf/regexPatternSet:RegexPatternSet',
resource_name,
__props__,
opts)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 39.95082 | 163 | 0.677472 |
import json
import warnings
import pulumi
import pulumi.runtime
from .. import utilities, tables
class RegexPatternSet(pulumi.CustomResource):
name: pulumi.Output[str]
regex_pattern_strings: pulumi.Output[list]
def __init__(__self__, resource_name, opts=None, name=None, regex_pattern_strings=None, __name__=None, __opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if not resource_name:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(resource_name, str):
raise TypeError('Expected resource name to be a string')
if opts and not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
__props__['name'] = name
__props__['regex_pattern_strings'] = regex_pattern_strings
super(RegexPatternSet, __self__).__init__(
'aws:waf/regexPatternSet:RegexPatternSet',
resource_name,
__props__,
opts)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
f737c2c340b88424a22af1b44c6a868e18dc3868 | 10,086 | py | Python | build/PureCloudPlatformClientV2/models/journey_outcome_events_notification_browser.py | MyPureCloud/platform-client-sdk-python | 51249f4c655a1c8a67561c9eaa852ef95e25e57d | [
"MIT"
] | 10 | 2019-02-22T00:27:08.000Z | 2021-09-12T23:23:44.000Z | build/PureCloudPlatformClientV2/models/journey_outcome_events_notification_browser.py | MyPureCloud/platform-client-sdk-python | 51249f4c655a1c8a67561c9eaa852ef95e25e57d | [
"MIT"
] | 5 | 2018-06-07T08:32:00.000Z | 2021-07-28T17:37:26.000Z | build/PureCloudPlatformClientV2/models/journey_outcome_events_notification_browser.py | MyPureCloud/platform-client-sdk-python | 51249f4c655a1c8a67561c9eaa852ef95e25e57d | [
"MIT"
] | 6 | 2020-04-09T17:43:07.000Z | 2022-02-17T08:48:05.000Z | # coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class JourneyOutcomeEventsNotificationBrowser(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
JourneyOutcomeEventsNotificationBrowser - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'family': 'str',
'version': 'str',
'lang': 'str',
'fingerprint': 'str',
'view_height': 'int',
'view_width': 'int',
'features_flash': 'bool',
'features_java': 'bool',
'features_pdf': 'bool',
'features_webrtc': 'bool'
}
self.attribute_map = {
'family': 'family',
'version': 'version',
'lang': 'lang',
'fingerprint': 'fingerprint',
'view_height': 'viewHeight',
'view_width': 'viewWidth',
'features_flash': 'featuresFlash',
'features_java': 'featuresJava',
'features_pdf': 'featuresPdf',
'features_webrtc': 'featuresWebrtc'
}
self._family = None
self._version = None
self._lang = None
self._fingerprint = None
self._view_height = None
self._view_width = None
self._features_flash = None
self._features_java = None
self._features_pdf = None
self._features_webrtc = None
@property
def family(self):
"""
Gets the family of this JourneyOutcomeEventsNotificationBrowser.
:return: The family of this JourneyOutcomeEventsNotificationBrowser.
:rtype: str
"""
return self._family
@family.setter
def family(self, family):
"""
Sets the family of this JourneyOutcomeEventsNotificationBrowser.
:param family: The family of this JourneyOutcomeEventsNotificationBrowser.
:type: str
"""
self._family = family
@property
def version(self):
"""
Gets the version of this JourneyOutcomeEventsNotificationBrowser.
:return: The version of this JourneyOutcomeEventsNotificationBrowser.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this JourneyOutcomeEventsNotificationBrowser.
:param version: The version of this JourneyOutcomeEventsNotificationBrowser.
:type: str
"""
self._version = version
@property
def lang(self):
"""
Gets the lang of this JourneyOutcomeEventsNotificationBrowser.
:return: The lang of this JourneyOutcomeEventsNotificationBrowser.
:rtype: str
"""
return self._lang
@lang.setter
def lang(self, lang):
"""
Sets the lang of this JourneyOutcomeEventsNotificationBrowser.
:param lang: The lang of this JourneyOutcomeEventsNotificationBrowser.
:type: str
"""
self._lang = lang
@property
def fingerprint(self):
"""
Gets the fingerprint of this JourneyOutcomeEventsNotificationBrowser.
:return: The fingerprint of this JourneyOutcomeEventsNotificationBrowser.
:rtype: str
"""
return self._fingerprint
@fingerprint.setter
def fingerprint(self, fingerprint):
"""
Sets the fingerprint of this JourneyOutcomeEventsNotificationBrowser.
:param fingerprint: The fingerprint of this JourneyOutcomeEventsNotificationBrowser.
:type: str
"""
self._fingerprint = fingerprint
@property
def view_height(self):
"""
Gets the view_height of this JourneyOutcomeEventsNotificationBrowser.
:return: The view_height of this JourneyOutcomeEventsNotificationBrowser.
:rtype: int
"""
return self._view_height
@view_height.setter
def view_height(self, view_height):
"""
Sets the view_height of this JourneyOutcomeEventsNotificationBrowser.
:param view_height: The view_height of this JourneyOutcomeEventsNotificationBrowser.
:type: int
"""
self._view_height = view_height
@property
def view_width(self):
"""
Gets the view_width of this JourneyOutcomeEventsNotificationBrowser.
:return: The view_width of this JourneyOutcomeEventsNotificationBrowser.
:rtype: int
"""
return self._view_width
@view_width.setter
def view_width(self, view_width):
"""
Sets the view_width of this JourneyOutcomeEventsNotificationBrowser.
:param view_width: The view_width of this JourneyOutcomeEventsNotificationBrowser.
:type: int
"""
self._view_width = view_width
@property
def features_flash(self):
"""
Gets the features_flash of this JourneyOutcomeEventsNotificationBrowser.
:return: The features_flash of this JourneyOutcomeEventsNotificationBrowser.
:rtype: bool
"""
return self._features_flash
@features_flash.setter
def features_flash(self, features_flash):
"""
Sets the features_flash of this JourneyOutcomeEventsNotificationBrowser.
:param features_flash: The features_flash of this JourneyOutcomeEventsNotificationBrowser.
:type: bool
"""
self._features_flash = features_flash
@property
def features_java(self):
"""
Gets the features_java of this JourneyOutcomeEventsNotificationBrowser.
:return: The features_java of this JourneyOutcomeEventsNotificationBrowser.
:rtype: bool
"""
return self._features_java
@features_java.setter
def features_java(self, features_java):
"""
Sets the features_java of this JourneyOutcomeEventsNotificationBrowser.
:param features_java: The features_java of this JourneyOutcomeEventsNotificationBrowser.
:type: bool
"""
self._features_java = features_java
@property
def features_pdf(self):
"""
Gets the features_pdf of this JourneyOutcomeEventsNotificationBrowser.
:return: The features_pdf of this JourneyOutcomeEventsNotificationBrowser.
:rtype: bool
"""
return self._features_pdf
@features_pdf.setter
def features_pdf(self, features_pdf):
"""
Sets the features_pdf of this JourneyOutcomeEventsNotificationBrowser.
:param features_pdf: The features_pdf of this JourneyOutcomeEventsNotificationBrowser.
:type: bool
"""
self._features_pdf = features_pdf
@property
def features_webrtc(self):
"""
Gets the features_webrtc of this JourneyOutcomeEventsNotificationBrowser.
:return: The features_webrtc of this JourneyOutcomeEventsNotificationBrowser.
:rtype: bool
"""
return self._features_webrtc
@features_webrtc.setter
def features_webrtc(self, features_webrtc):
"""
Sets the features_webrtc of this JourneyOutcomeEventsNotificationBrowser.
:param features_webrtc: The features_webrtc of this JourneyOutcomeEventsNotificationBrowser.
:type: bool
"""
self._features_webrtc = features_webrtc
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""
Returns the model as raw JSON
"""
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 27.632877 | 100 | 0.614614 |
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class JourneyOutcomeEventsNotificationBrowser(object):
def __init__(self):
self.swagger_types = {
'family': 'str',
'version': 'str',
'lang': 'str',
'fingerprint': 'str',
'view_height': 'int',
'view_width': 'int',
'features_flash': 'bool',
'features_java': 'bool',
'features_pdf': 'bool',
'features_webrtc': 'bool'
}
self.attribute_map = {
'family': 'family',
'version': 'version',
'lang': 'lang',
'fingerprint': 'fingerprint',
'view_height': 'viewHeight',
'view_width': 'viewWidth',
'features_flash': 'featuresFlash',
'features_java': 'featuresJava',
'features_pdf': 'featuresPdf',
'features_webrtc': 'featuresWebrtc'
}
self._family = None
self._version = None
self._lang = None
self._fingerprint = None
self._view_height = None
self._view_width = None
self._features_flash = None
self._features_java = None
self._features_pdf = None
self._features_webrtc = None
@property
def family(self):
return self._family
@family.setter
def family(self, family):
self._family = family
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
@property
def lang(self):
return self._lang
@lang.setter
def lang(self, lang):
self._lang = lang
@property
def fingerprint(self):
return self._fingerprint
@fingerprint.setter
def fingerprint(self, fingerprint):
self._fingerprint = fingerprint
@property
def view_height(self):
return self._view_height
@view_height.setter
def view_height(self, view_height):
self._view_height = view_height
@property
def view_width(self):
return self._view_width
@view_width.setter
def view_width(self, view_width):
self._view_width = view_width
@property
def features_flash(self):
return self._features_flash
@features_flash.setter
def features_flash(self, features_flash):
self._features_flash = features_flash
@property
def features_java(self):
return self._features_java
@features_java.setter
def features_java(self, features_java):
self._features_java = features_java
@property
def features_pdf(self):
return self._features_pdf
@features_pdf.setter
def features_pdf(self, features_pdf):
self._features_pdf = features_pdf
@property
def features_webrtc(self):
return self._features_webrtc
@features_webrtc.setter
def features_webrtc(self, features_webrtc):
self._features_webrtc = features_webrtc
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f737c2d0235797af3042f44c846b9ab021f14721 | 3,142 | py | Python | qutip/tests/test_tensor.py | kiuthed/qutip | b6fb8e5bbd9ffeae117b54e56313e8617038deab | [
"BSD-3-Clause"
] | null | null | null | qutip/tests/test_tensor.py | kiuthed/qutip | b6fb8e5bbd9ffeae117b54e56313e8617038deab | [
"BSD-3-Clause"
] | null | null | null | qutip/tests/test_tensor.py | kiuthed/qutip | b6fb8e5bbd9ffeae117b54e56313e8617038deab | [
"BSD-3-Clause"
] | null | null | null | # This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
from numpy.testing import assert_equal, assert_, run_module_suite
from qutip.operators import identity
from qutip.superop_reps import to_super
from qutip.tensor import (
tensor_contract, flatten, enumerate_flat, deep_remove, unflatten
)
def test_flatten():
l = [[[0], 1], 2]
assert_equal(flatten(l), [0, 1, 2])
def test_enumerate_flat():
l = [[[10], [20, 30]], 40]
labels = enumerate_flat(l)
assert_equal(labels, [[[0], [1, 2]], 3])
def test_deep_remove():
l = [[[0], 1], 2]
l = deep_remove(l, 1)
assert_equal(l, [[[0]], 2])
# Harder case...
l = [[[[0, 1, 2]], [3, 4], [5], [6, 7]]]
l = deep_remove(l, 0, 5)
assert l == [[[[1, 2]], [3, 4], [], [6, 7]]]
def test_unflatten():
l = [[[10, 20, 30], [40, 50, 60]], [[70, 80, 90], [100, 110, 120]]]
labels = enumerate_flat(l)
assert unflatten(flatten(l), labels) == l
def test_tensor_contract():
qobj = identity([2, 3, 4])
ans = 3 * identity([2, 4])
assert_(ans == tensor_contract(qobj, (1, 4)))
# Now try for superoperators.
# For now, we just ensure the dims are correct.
sqobj = to_super(qobj)
correct_dims = [[[2, 4], [2, 4]], [[2, 4], [2, 4]]]
assert_equal(correct_dims, tensor_contract(sqobj, (1, 4), (7, 10)).dims)
if __name__ == "__main__":
run_module_suite() | 37.404762 | 79 | 0.659134 | true | true | |
f737c38e50e881ea56d893e423192259ae1551aa | 26,436 | py | Python | fpga/mqnic/fb2CG/fpga_25g/tb/fpga_core/test_fpga_core.py | Vic0428/corundum | aa0915d61dc0ca394183fa6e5e40b4883141f4e5 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2022-02-17T22:05:14.000Z | 2022-02-17T22:05:14.000Z | fpga/mqnic/fb2CG/fpga_25g/tb/fpga_core/test_fpga_core.py | Vic0428/corundum | aa0915d61dc0ca394183fa6e5e40b4883141f4e5 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | fpga/mqnic/fb2CG/fpga_25g/tb/fpga_core/test_fpga_core.py | Vic0428/corundum | aa0915d61dc0ca394183fa6e5e40b4883141f4e5 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2021-07-27T10:20:24.000Z | 2021-07-27T10:20:24.000Z | """
Copyright 2020, The Regents of the University of California.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE REGENTS OF THE UNIVERSITY OF CALIFORNIA ''AS
IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE REGENTS OF THE UNIVERSITY OF CALIFORNIA OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of The Regents of the University of California.
"""
import logging
import os
import sys
import scapy.utils
from scapy.layers.l2 import Ether
from scapy.layers.inet import IP, UDP
import cocotb_test.simulator
import cocotb
from cocotb.log import SimLog
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge, FallingEdge, Timer
from cocotbext.axi import AxiStreamBus
from cocotbext.eth import XgmiiSource, XgmiiSink
from cocotbext.pcie.core import RootComplex
from cocotbext.pcie.xilinx.us import UltraScalePlusPcieDevice
try:
import mqnic
except ImportError:
# attempt import from current directory
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
try:
import mqnic
finally:
del sys.path[0]
class TB(object):
def __init__(self, dut):
self.dut = dut
self.BAR0_APERTURE = int(os.getenv("PARAM_BAR0_APERTURE"))
self.log = SimLog("cocotb.tb")
self.log.setLevel(logging.DEBUG)
# PCIe
self.rc = RootComplex()
self.rc.max_payload_size = 0x1 # 256 bytes
self.rc.max_read_request_size = 0x2 # 512 bytes
self.dev = UltraScalePlusPcieDevice(
# configuration options
pcie_generation=3,
pcie_link_width=16,
user_clk_frequency=250e6,
alignment="dword",
cq_cc_straddle=False,
rq_rc_straddle=False,
rc_4tlp_straddle=False,
enable_pf1=False,
enable_client_tag=True,
enable_extended_tag=True,
enable_parity=False,
enable_rx_msg_interface=False,
enable_sriov=False,
enable_extended_configuration=False,
enable_pf0_msi=True,
enable_pf1_msi=False,
# signals
# Clock and Reset Interface
user_clk=dut.clk_250mhz,
user_reset=dut.rst_250mhz,
# user_lnk_up
# sys_clk
# sys_clk_gt
# sys_reset
# phy_rdy_out
# Requester reQuest Interface
rq_bus=AxiStreamBus.from_prefix(dut, "m_axis_rq"),
pcie_rq_seq_num0=dut.s_axis_rq_seq_num_0,
pcie_rq_seq_num_vld0=dut.s_axis_rq_seq_num_valid_0,
pcie_rq_seq_num1=dut.s_axis_rq_seq_num_1,
pcie_rq_seq_num_vld1=dut.s_axis_rq_seq_num_valid_1,
# pcie_rq_tag0
# pcie_rq_tag1
# pcie_rq_tag_av
# pcie_rq_tag_vld0
# pcie_rq_tag_vld1
# Requester Completion Interface
rc_bus=AxiStreamBus.from_prefix(dut, "s_axis_rc"),
# Completer reQuest Interface
cq_bus=AxiStreamBus.from_prefix(dut, "s_axis_cq"),
# pcie_cq_np_req
# pcie_cq_np_req_count
# Completer Completion Interface
cc_bus=AxiStreamBus.from_prefix(dut, "m_axis_cc"),
# Transmit Flow Control Interface
# pcie_tfc_nph_av=dut.pcie_tfc_nph_av,
# pcie_tfc_npd_av=dut.pcie_tfc_npd_av,
# Configuration Management Interface
cfg_mgmt_addr=dut.cfg_mgmt_addr,
cfg_mgmt_function_number=dut.cfg_mgmt_function_number,
cfg_mgmt_write=dut.cfg_mgmt_write,
cfg_mgmt_write_data=dut.cfg_mgmt_write_data,
cfg_mgmt_byte_enable=dut.cfg_mgmt_byte_enable,
cfg_mgmt_read=dut.cfg_mgmt_read,
cfg_mgmt_read_data=dut.cfg_mgmt_read_data,
cfg_mgmt_read_write_done=dut.cfg_mgmt_read_write_done,
# cfg_mgmt_debug_access
# Configuration Status Interface
# cfg_phy_link_down
# cfg_phy_link_status
# cfg_negotiated_width
# cfg_current_speed
cfg_max_payload=dut.cfg_max_payload,
cfg_max_read_req=dut.cfg_max_read_req,
# cfg_function_status
# cfg_vf_status
# cfg_function_power_state
# cfg_vf_power_state
# cfg_link_power_state
# cfg_err_cor_out
# cfg_err_nonfatal_out
# cfg_err_fatal_out
# cfg_local_error_out
# cfg_local_error_valid
# cfg_rx_pm_state
# cfg_tx_pm_state
# cfg_ltssm_state
# cfg_rcb_status
# cfg_obff_enable
# cfg_pl_status_change
# cfg_tph_requester_enable
# cfg_tph_st_mode
# cfg_vf_tph_requester_enable
# cfg_vf_tph_st_mode
# Configuration Received Message Interface
# cfg_msg_received
# cfg_msg_received_data
# cfg_msg_received_type
# Configuration Transmit Message Interface
# cfg_msg_transmit
# cfg_msg_transmit_type
# cfg_msg_transmit_data
# cfg_msg_transmit_done
# Configuration Flow Control Interface
cfg_fc_ph=dut.cfg_fc_ph,
cfg_fc_pd=dut.cfg_fc_pd,
cfg_fc_nph=dut.cfg_fc_nph,
cfg_fc_npd=dut.cfg_fc_npd,
cfg_fc_cplh=dut.cfg_fc_cplh,
cfg_fc_cpld=dut.cfg_fc_cpld,
cfg_fc_sel=dut.cfg_fc_sel,
# Configuration Control Interface
# cfg_hot_reset_in
# cfg_hot_reset_out
# cfg_config_space_enable
# cfg_dsn
# cfg_bus_number
# cfg_ds_port_number
# cfg_ds_bus_number
# cfg_ds_device_number
# cfg_ds_function_number
# cfg_power_state_change_ack
# cfg_power_state_change_interrupt
cfg_err_cor_in=dut.status_error_cor,
cfg_err_uncor_in=dut.status_error_uncor,
# cfg_flr_in_process
# cfg_flr_done
# cfg_vf_flr_in_process
# cfg_vf_flr_func_num
# cfg_vf_flr_done
# cfg_pm_aspm_l1_entry_reject
# cfg_pm_aspm_tx_l0s_entry_disable
# cfg_req_pm_transition_l23_ready
# cfg_link_training_enable
# Configuration Interrupt Controller Interface
# cfg_interrupt_int
# cfg_interrupt_sent
# cfg_interrupt_pending
cfg_interrupt_msi_enable=dut.cfg_interrupt_msi_enable,
cfg_interrupt_msi_mmenable=dut.cfg_interrupt_msi_mmenable,
cfg_interrupt_msi_mask_update=dut.cfg_interrupt_msi_mask_update,
cfg_interrupt_msi_data=dut.cfg_interrupt_msi_data,
# cfg_interrupt_msi_select=dut.cfg_interrupt_msi_select,
cfg_interrupt_msi_int=dut.cfg_interrupt_msi_int,
cfg_interrupt_msi_pending_status=dut.cfg_interrupt_msi_pending_status,
cfg_interrupt_msi_pending_status_data_enable=dut.cfg_interrupt_msi_pending_status_data_enable,
# cfg_interrupt_msi_pending_status_function_num=dut.cfg_interrupt_msi_pending_status_function_num,
cfg_interrupt_msi_sent=dut.cfg_interrupt_msi_sent,
cfg_interrupt_msi_fail=dut.cfg_interrupt_msi_fail,
# cfg_interrupt_msix_enable
# cfg_interrupt_msix_mask
# cfg_interrupt_msix_vf_enable
# cfg_interrupt_msix_vf_mask
# cfg_interrupt_msix_address
# cfg_interrupt_msix_data
# cfg_interrupt_msix_int
# cfg_interrupt_msix_vec_pending
# cfg_interrupt_msix_vec_pending_status
cfg_interrupt_msi_attr=dut.cfg_interrupt_msi_attr,
cfg_interrupt_msi_tph_present=dut.cfg_interrupt_msi_tph_present,
cfg_interrupt_msi_tph_type=dut.cfg_interrupt_msi_tph_type,
# cfg_interrupt_msi_tph_st_tag=dut.cfg_interrupt_msi_tph_st_tag,
# cfg_interrupt_msi_function_number=dut.cfg_interrupt_msi_function_number,
# Configuration Extend Interface
# cfg_ext_read_received
# cfg_ext_write_received
# cfg_ext_register_number
# cfg_ext_function_number
# cfg_ext_write_data
# cfg_ext_write_byte_enable
# cfg_ext_read_data
# cfg_ext_read_data_valid
)
# self.dev.log.setLevel(logging.DEBUG)
self.rc.make_port().connect(self.dev)
self.driver = mqnic.Driver(self.rc)
self.dev.functions[0].msi_cap.msi_multiple_message_capable = 5
self.dev.functions[0].configure_bar(0, 2**self.BAR0_APERTURE, ext=True, prefetch=True)
# Ethernet
cocotb.fork(Clock(dut.qsfp_0_rx_clk_0, 2.56, units="ns").start())
self.qsfp_0_0_source = XgmiiSource(dut.qsfp_0_rxd_0, dut.qsfp_0_rxc_0, dut.qsfp_0_rx_clk_0, dut.qsfp_0_rx_rst_0)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_0, 2.56, units="ns").start())
self.qsfp_0_0_sink = XgmiiSink(dut.qsfp_0_txd_0, dut.qsfp_0_txc_0, dut.qsfp_0_tx_clk_0, dut.qsfp_0_tx_rst_0)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_1, 2.56, units="ns").start())
self.qsfp_0_1_source = XgmiiSource(dut.qsfp_0_rxd_1, dut.qsfp_0_rxc_1, dut.qsfp_0_rx_clk_1, dut.qsfp_0_rx_rst_1)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_1, 2.56, units="ns").start())
self.qsfp_0_1_sink = XgmiiSink(dut.qsfp_0_txd_1, dut.qsfp_0_txc_1, dut.qsfp_0_tx_clk_1, dut.qsfp_0_tx_rst_1)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_2, 2.56, units="ns").start())
self.qsfp_0_2_source = XgmiiSource(dut.qsfp_0_rxd_2, dut.qsfp_0_rxc_2, dut.qsfp_0_rx_clk_2, dut.qsfp_0_rx_rst_2)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_2, 2.56, units="ns").start())
self.qsfp_0_2_sink = XgmiiSink(dut.qsfp_0_txd_2, dut.qsfp_0_txc_2, dut.qsfp_0_tx_clk_2, dut.qsfp_0_tx_rst_2)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_3, 2.56, units="ns").start())
self.qsfp_0_3_source = XgmiiSource(dut.qsfp_0_rxd_3, dut.qsfp_0_rxc_3, dut.qsfp_0_rx_clk_3, dut.qsfp_0_rx_rst_3)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_3, 2.56, units="ns").start())
self.qsfp_0_3_sink = XgmiiSink(dut.qsfp_0_txd_3, dut.qsfp_0_txc_3, dut.qsfp_0_tx_clk_3, dut.qsfp_0_tx_rst_3)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_0, 2.56, units="ns").start())
self.qsfp_1_0_source = XgmiiSource(dut.qsfp_1_rxd_0, dut.qsfp_1_rxc_0, dut.qsfp_1_rx_clk_0, dut.qsfp_1_rx_rst_0)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_0, 2.56, units="ns").start())
self.qsfp_1_0_sink = XgmiiSink(dut.qsfp_1_txd_0, dut.qsfp_1_txc_0, dut.qsfp_1_tx_clk_0, dut.qsfp_1_tx_rst_0)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_1, 2.56, units="ns").start())
self.qsfp_1_1_source = XgmiiSource(dut.qsfp_1_rxd_1, dut.qsfp_1_rxc_1, dut.qsfp_1_rx_clk_1, dut.qsfp_1_rx_rst_1)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_1, 2.56, units="ns").start())
self.qsfp_1_1_sink = XgmiiSink(dut.qsfp_1_txd_1, dut.qsfp_1_txc_1, dut.qsfp_1_tx_clk_1, dut.qsfp_1_tx_rst_1)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_2, 2.56, units="ns").start())
self.qsfp_1_2_source = XgmiiSource(dut.qsfp_1_rxd_2, dut.qsfp_1_rxc_2, dut.qsfp_1_rx_clk_2, dut.qsfp_1_rx_rst_2)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_2, 2.56, units="ns").start())
self.qsfp_1_2_sink = XgmiiSink(dut.qsfp_1_txd_2, dut.qsfp_1_txc_2, dut.qsfp_1_tx_clk_2, dut.qsfp_1_tx_rst_2)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_3, 2.56, units="ns").start())
self.qsfp_1_3_source = XgmiiSource(dut.qsfp_1_rxd_3, dut.qsfp_1_rxc_3, dut.qsfp_1_rx_clk_3, dut.qsfp_1_rx_rst_3)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_3, 2.56, units="ns").start())
self.qsfp_1_3_sink = XgmiiSink(dut.qsfp_1_txd_3, dut.qsfp_1_txc_3, dut.qsfp_1_tx_clk_3, dut.qsfp_1_tx_rst_3)
dut.qsfp_0_i2c_scl_i.setimmediatevalue(1)
dut.qsfp_0_i2c_sda_i.setimmediatevalue(1)
dut.qsfp_0_intr_n.setimmediatevalue(1)
dut.qsfp_0_mod_prsnt_n.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_0.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_1.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_2.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_3.setimmediatevalue(0)
dut.qsfp_1_i2c_scl_i.setimmediatevalue(1)
dut.qsfp_1_i2c_sda_i.setimmediatevalue(1)
dut.qsfp_1_intr_n.setimmediatevalue(1)
dut.qsfp_1_mod_prsnt_n.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_0.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_1.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_2.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_3.setimmediatevalue(0)
dut.qspi_dq_i.setimmediatevalue(0)
dut.pps_in.setimmediatevalue(0)
dut.bmc_miso.setimmediatevalue(0)
dut.bmc_int.setimmediatevalue(0)
self.loopback_enable = False
cocotb.fork(self._run_loopback())
async def init(self):
self.dut.qsfp_0_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_3.setimmediatevalue(0)
await RisingEdge(self.dut.clk_250mhz)
await RisingEdge(self.dut.clk_250mhz)
self.dut.qsfp_0_rx_rst_0.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_0.setimmediatevalue(1)
self.dut.qsfp_0_rx_rst_1.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_1.setimmediatevalue(1)
self.dut.qsfp_0_rx_rst_2.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_2.setimmediatevalue(1)
self.dut.qsfp_0_rx_rst_3.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_3.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_0.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_0.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_1.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_1.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_2.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_2.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_3.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_3.setimmediatevalue(1)
await FallingEdge(self.dut.rst_250mhz)
await Timer(100, 'ns')
await RisingEdge(self.dut.clk_250mhz)
await RisingEdge(self.dut.clk_250mhz)
self.dut.qsfp_0_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_3.setimmediatevalue(0)
await self.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
async def _run_loopback(self):
while True:
await RisingEdge(self.dut.clk_250mhz)
if self.loopback_enable:
if not self.qsfp_0_0_sink.empty():
await self.qsfp_0_0_source.send(await self.qsfp_0_0_sink.recv())
if not self.qsfp_0_1_sink.empty():
await self.qsfp_0_1_source.send(await self.qsfp_0_1_sink.recv())
if not self.qsfp_0_2_sink.empty():
await self.qsfp_0_2_source.send(await self.qsfp_0_2_sink.recv())
if not self.qsfp_0_3_sink.empty():
await self.qsfp_0_3_source.send(await self.qsfp_0_3_sink.recv())
if not self.qsfp_1_0_sink.empty():
await self.qsfp_1_0_source.send(await self.qsfp_1_0_sink.recv())
if not self.qsfp_1_1_sink.empty():
await self.qsfp_1_1_source.send(await self.qsfp_1_1_sink.recv())
if not self.qsfp_1_2_sink.empty():
await self.qsfp_1_2_source.send(await self.qsfp_1_2_sink.recv())
if not self.qsfp_1_3_sink.empty():
await self.qsfp_1_3_source.send(await self.qsfp_1_3_sink.recv())
@cocotb.test()
async def run_test_nic(dut):
tb = TB(dut)
await tb.init()
tb.log.info("Init driver")
await tb.driver.init_dev(tb.dev.functions[0].pcie_id)
await tb.driver.interfaces[0].open()
# await driver.interfaces[1].open()
# enable queues
tb.log.info("Enable queues")
await tb.rc.mem_write_dword(tb.driver.interfaces[0].ports[0].hw_addr+mqnic.MQNIC_PORT_REG_SCHED_ENABLE, 0x00000001)
for k in range(tb.driver.interfaces[0].tx_queue_count):
await tb.rc.mem_write_dword(tb.driver.interfaces[0].ports[0].schedulers[0].hw_addr+4*k, 0x00000003)
# wait for all writes to complete
await tb.rc.mem_read(tb.driver.hw_addr, 4)
tb.log.info("Init complete")
tb.log.info("Send and receive single packet")
data = bytearray([x % 256 for x in range(1024)])
await tb.driver.interfaces[0].start_xmit(data, 0)
pkt = await tb.qsfp_0_0_sink.recv()
tb.log.info("Packet: %s", pkt)
await tb.qsfp_0_0_source.send(pkt)
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
# await tb.driver.interfaces[1].start_xmit(data, 0)
# pkt = await tb.qsfp_1_0_sink.recv()
# tb.log.info("Packet: %s", pkt)
# await tb.qsfp_1_0_source.send(pkt)
# pkt = await tb.driver.interfaces[1].recv()
# tb.log.info("Packet: %s", pkt)
# assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
tb.log.info("RX and TX checksum tests")
payload = bytes([x % 256 for x in range(256)])
eth = Ether(src='5A:51:52:53:54:55', dst='DA:D1:D2:D3:D4:D5')
ip = IP(src='192.168.1.100', dst='192.168.1.101')
udp = UDP(sport=1, dport=2)
test_pkt = eth / ip / udp / payload
test_pkt2 = test_pkt.copy()
test_pkt2[UDP].chksum = scapy.utils.checksum(bytes(test_pkt2[UDP]))
await tb.driver.interfaces[0].start_xmit(test_pkt2.build(), 0, 34, 6)
pkt = await tb.qsfp_0_0_sink.recv()
tb.log.info("Packet: %s", pkt)
await tb.qsfp_0_0_source.send(pkt)
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
assert Ether(pkt.data).build() == test_pkt.build()
tb.log.info("Multiple small packets")
count = 64
pkts = [bytearray([(x+k) % 256 for x in range(60)]) for k in range(count)]
tb.loopback_enable = True
for p in pkts:
await tb.driver.interfaces[0].start_xmit(p, 0)
for k in range(count):
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.data == pkts[k]
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
tb.loopback_enable = False
tb.log.info("Multiple large packets")
count = 64
pkts = [bytearray([(x+k) % 256 for x in range(1514)]) for k in range(count)]
tb.loopback_enable = True
for p in pkts:
await tb.driver.interfaces[0].start_xmit(p, 0)
for k in range(count):
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.data == pkts[k]
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
tb.loopback_enable = False
await RisingEdge(dut.clk_250mhz)
await RisingEdge(dut.clk_250mhz)
# cocotb-test
tests_dir = os.path.dirname(__file__)
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib'))
axi_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axi', 'rtl'))
axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axis', 'rtl'))
eth_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'eth', 'rtl'))
pcie_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'pcie', 'rtl'))
def test_fpga_core(request):
dut = "fpga_core"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(rtl_dir, "bmc_spi.v"),
os.path.join(rtl_dir, "common", "mqnic_interface.v"),
os.path.join(rtl_dir, "common", "mqnic_port.v"),
os.path.join(rtl_dir, "common", "cpl_write.v"),
os.path.join(rtl_dir, "common", "cpl_op_mux.v"),
os.path.join(rtl_dir, "common", "desc_fetch.v"),
os.path.join(rtl_dir, "common", "desc_op_mux.v"),
os.path.join(rtl_dir, "common", "queue_manager.v"),
os.path.join(rtl_dir, "common", "cpl_queue_manager.v"),
os.path.join(rtl_dir, "common", "tx_engine.v"),
os.path.join(rtl_dir, "common", "rx_engine.v"),
os.path.join(rtl_dir, "common", "tx_checksum.v"),
os.path.join(rtl_dir, "common", "rx_hash.v"),
os.path.join(rtl_dir, "common", "rx_checksum.v"),
os.path.join(rtl_dir, "common", "tx_scheduler_rr.v"),
os.path.join(rtl_dir, "common", "event_mux.v"),
os.path.join(rtl_dir, "common", "tdma_scheduler.v"),
os.path.join(rtl_dir, "common", "tdma_ber.v"),
os.path.join(rtl_dir, "common", "tdma_ber_ch.v"),
os.path.join(eth_rtl_dir, "eth_mac_10g_fifo.v"),
os.path.join(eth_rtl_dir, "eth_mac_10g.v"),
os.path.join(eth_rtl_dir, "axis_xgmii_rx_64.v"),
os.path.join(eth_rtl_dir, "axis_xgmii_tx_64.v"),
os.path.join(eth_rtl_dir, "lfsr.v"),
os.path.join(eth_rtl_dir, "ptp_clock.v"),
os.path.join(eth_rtl_dir, "ptp_clock_cdc.v"),
os.path.join(eth_rtl_dir, "ptp_perout.v"),
os.path.join(eth_rtl_dir, "ptp_ts_extract.v"),
os.path.join(axi_rtl_dir, "axil_interconnect.v"),
os.path.join(axi_rtl_dir, "arbiter.v"),
os.path.join(axi_rtl_dir, "priority_encoder.v"),
os.path.join(axis_rtl_dir, "axis_adapter.v"),
os.path.join(axis_rtl_dir, "axis_arb_mux.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo_adapter.v"),
os.path.join(axis_rtl_dir, "axis_fifo.v"),
os.path.join(axis_rtl_dir, "axis_register.v"),
os.path.join(pcie_rtl_dir, "pcie_us_axil_master.v"),
os.path.join(pcie_rtl_dir, "dma_if_pcie_us.v"),
os.path.join(pcie_rtl_dir, "dma_if_pcie_us_rd.v"),
os.path.join(pcie_rtl_dir, "dma_if_pcie_us_wr.v"),
os.path.join(pcie_rtl_dir, "dma_if_mux.v"),
os.path.join(pcie_rtl_dir, "dma_if_mux_rd.v"),
os.path.join(pcie_rtl_dir, "dma_if_mux_wr.v"),
os.path.join(pcie_rtl_dir, "dma_psdpram.v"),
os.path.join(pcie_rtl_dir, "dma_client_axis_sink.v"),
os.path.join(pcie_rtl_dir, "dma_client_axis_source.v"),
os.path.join(pcie_rtl_dir, "pcie_us_cfg.v"),
os.path.join(pcie_rtl_dir, "pcie_us_msi.v"),
os.path.join(pcie_rtl_dir, "pulse_merge.v"),
]
parameters = {}
parameters['AXIS_PCIE_DATA_WIDTH'] = 512
parameters['AXIS_PCIE_KEEP_WIDTH'] = parameters['AXIS_PCIE_DATA_WIDTH'] // 32
parameters['AXIS_PCIE_RQ_USER_WIDTH'] = 62 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 137
parameters['AXIS_PCIE_RC_USER_WIDTH'] = 75 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 161
parameters['AXIS_PCIE_CQ_USER_WIDTH'] = 88 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 183
parameters['AXIS_PCIE_CC_USER_WIDTH'] = 33 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 81
parameters['RQ_SEQ_NUM_WIDTH'] = 6
parameters['BAR0_APERTURE'] = 24
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| 41.17757 | 120 | 0.676388 |
import logging
import os
import sys
import scapy.utils
from scapy.layers.l2 import Ether
from scapy.layers.inet import IP, UDP
import cocotb_test.simulator
import cocotb
from cocotb.log import SimLog
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge, FallingEdge, Timer
from cocotbext.axi import AxiStreamBus
from cocotbext.eth import XgmiiSource, XgmiiSink
from cocotbext.pcie.core import RootComplex
from cocotbext.pcie.xilinx.us import UltraScalePlusPcieDevice
try:
import mqnic
except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
try:
import mqnic
finally:
del sys.path[0]
class TB(object):
def __init__(self, dut):
self.dut = dut
self.BAR0_APERTURE = int(os.getenv("PARAM_BAR0_APERTURE"))
self.log = SimLog("cocotb.tb")
self.log.setLevel(logging.DEBUG)
self.rc = RootComplex()
self.rc.max_payload_size = 0x1
self.rc.max_read_request_size = 0x2
self.dev = UltraScalePlusPcieDevice(
pcie_generation=3,
pcie_link_width=16,
user_clk_frequency=250e6,
alignment="dword",
cq_cc_straddle=False,
rq_rc_straddle=False,
rc_4tlp_straddle=False,
enable_pf1=False,
enable_client_tag=True,
enable_extended_tag=True,
enable_parity=False,
enable_rx_msg_interface=False,
enable_sriov=False,
enable_extended_configuration=False,
enable_pf0_msi=True,
enable_pf1_msi=False,
user_clk=dut.clk_250mhz,
user_reset=dut.rst_250mhz,
rq_bus=AxiStreamBus.from_prefix(dut, "m_axis_rq"),
pcie_rq_seq_num0=dut.s_axis_rq_seq_num_0,
pcie_rq_seq_num_vld0=dut.s_axis_rq_seq_num_valid_0,
pcie_rq_seq_num1=dut.s_axis_rq_seq_num_1,
pcie_rq_seq_num_vld1=dut.s_axis_rq_seq_num_valid_1,
rc_bus=AxiStreamBus.from_prefix(dut, "s_axis_rc"),
cq_bus=AxiStreamBus.from_prefix(dut, "s_axis_cq"),
cc_bus=AxiStreamBus.from_prefix(dut, "m_axis_cc"),
cfg_mgmt_addr=dut.cfg_mgmt_addr,
cfg_mgmt_function_number=dut.cfg_mgmt_function_number,
cfg_mgmt_write=dut.cfg_mgmt_write,
cfg_mgmt_write_data=dut.cfg_mgmt_write_data,
cfg_mgmt_byte_enable=dut.cfg_mgmt_byte_enable,
cfg_mgmt_read=dut.cfg_mgmt_read,
cfg_mgmt_read_data=dut.cfg_mgmt_read_data,
cfg_mgmt_read_write_done=dut.cfg_mgmt_read_write_done,
cfg_max_payload=dut.cfg_max_payload,
cfg_max_read_req=dut.cfg_max_read_req,
cfg_fc_ph=dut.cfg_fc_ph,
cfg_fc_pd=dut.cfg_fc_pd,
cfg_fc_nph=dut.cfg_fc_nph,
cfg_fc_npd=dut.cfg_fc_npd,
cfg_fc_cplh=dut.cfg_fc_cplh,
cfg_fc_cpld=dut.cfg_fc_cpld,
cfg_fc_sel=dut.cfg_fc_sel,
cfg_err_cor_in=dut.status_error_cor,
cfg_err_uncor_in=dut.status_error_uncor,
cfg_interrupt_msi_enable=dut.cfg_interrupt_msi_enable,
cfg_interrupt_msi_mmenable=dut.cfg_interrupt_msi_mmenable,
cfg_interrupt_msi_mask_update=dut.cfg_interrupt_msi_mask_update,
cfg_interrupt_msi_data=dut.cfg_interrupt_msi_data,
cfg_interrupt_msi_int=dut.cfg_interrupt_msi_int,
cfg_interrupt_msi_pending_status=dut.cfg_interrupt_msi_pending_status,
cfg_interrupt_msi_pending_status_data_enable=dut.cfg_interrupt_msi_pending_status_data_enable,
cfg_interrupt_msi_sent=dut.cfg_interrupt_msi_sent,
cfg_interrupt_msi_fail=dut.cfg_interrupt_msi_fail,
cfg_interrupt_msi_attr=dut.cfg_interrupt_msi_attr,
cfg_interrupt_msi_tph_present=dut.cfg_interrupt_msi_tph_present,
cfg_interrupt_msi_tph_type=dut.cfg_interrupt_msi_tph_type,
)
self.rc.make_port().connect(self.dev)
self.driver = mqnic.Driver(self.rc)
self.dev.functions[0].msi_cap.msi_multiple_message_capable = 5
self.dev.functions[0].configure_bar(0, 2**self.BAR0_APERTURE, ext=True, prefetch=True)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_0, 2.56, units="ns").start())
self.qsfp_0_0_source = XgmiiSource(dut.qsfp_0_rxd_0, dut.qsfp_0_rxc_0, dut.qsfp_0_rx_clk_0, dut.qsfp_0_rx_rst_0)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_0, 2.56, units="ns").start())
self.qsfp_0_0_sink = XgmiiSink(dut.qsfp_0_txd_0, dut.qsfp_0_txc_0, dut.qsfp_0_tx_clk_0, dut.qsfp_0_tx_rst_0)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_1, 2.56, units="ns").start())
self.qsfp_0_1_source = XgmiiSource(dut.qsfp_0_rxd_1, dut.qsfp_0_rxc_1, dut.qsfp_0_rx_clk_1, dut.qsfp_0_rx_rst_1)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_1, 2.56, units="ns").start())
self.qsfp_0_1_sink = XgmiiSink(dut.qsfp_0_txd_1, dut.qsfp_0_txc_1, dut.qsfp_0_tx_clk_1, dut.qsfp_0_tx_rst_1)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_2, 2.56, units="ns").start())
self.qsfp_0_2_source = XgmiiSource(dut.qsfp_0_rxd_2, dut.qsfp_0_rxc_2, dut.qsfp_0_rx_clk_2, dut.qsfp_0_rx_rst_2)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_2, 2.56, units="ns").start())
self.qsfp_0_2_sink = XgmiiSink(dut.qsfp_0_txd_2, dut.qsfp_0_txc_2, dut.qsfp_0_tx_clk_2, dut.qsfp_0_tx_rst_2)
cocotb.fork(Clock(dut.qsfp_0_rx_clk_3, 2.56, units="ns").start())
self.qsfp_0_3_source = XgmiiSource(dut.qsfp_0_rxd_3, dut.qsfp_0_rxc_3, dut.qsfp_0_rx_clk_3, dut.qsfp_0_rx_rst_3)
cocotb.fork(Clock(dut.qsfp_0_tx_clk_3, 2.56, units="ns").start())
self.qsfp_0_3_sink = XgmiiSink(dut.qsfp_0_txd_3, dut.qsfp_0_txc_3, dut.qsfp_0_tx_clk_3, dut.qsfp_0_tx_rst_3)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_0, 2.56, units="ns").start())
self.qsfp_1_0_source = XgmiiSource(dut.qsfp_1_rxd_0, dut.qsfp_1_rxc_0, dut.qsfp_1_rx_clk_0, dut.qsfp_1_rx_rst_0)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_0, 2.56, units="ns").start())
self.qsfp_1_0_sink = XgmiiSink(dut.qsfp_1_txd_0, dut.qsfp_1_txc_0, dut.qsfp_1_tx_clk_0, dut.qsfp_1_tx_rst_0)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_1, 2.56, units="ns").start())
self.qsfp_1_1_source = XgmiiSource(dut.qsfp_1_rxd_1, dut.qsfp_1_rxc_1, dut.qsfp_1_rx_clk_1, dut.qsfp_1_rx_rst_1)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_1, 2.56, units="ns").start())
self.qsfp_1_1_sink = XgmiiSink(dut.qsfp_1_txd_1, dut.qsfp_1_txc_1, dut.qsfp_1_tx_clk_1, dut.qsfp_1_tx_rst_1)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_2, 2.56, units="ns").start())
self.qsfp_1_2_source = XgmiiSource(dut.qsfp_1_rxd_2, dut.qsfp_1_rxc_2, dut.qsfp_1_rx_clk_2, dut.qsfp_1_rx_rst_2)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_2, 2.56, units="ns").start())
self.qsfp_1_2_sink = XgmiiSink(dut.qsfp_1_txd_2, dut.qsfp_1_txc_2, dut.qsfp_1_tx_clk_2, dut.qsfp_1_tx_rst_2)
cocotb.fork(Clock(dut.qsfp_1_rx_clk_3, 2.56, units="ns").start())
self.qsfp_1_3_source = XgmiiSource(dut.qsfp_1_rxd_3, dut.qsfp_1_rxc_3, dut.qsfp_1_rx_clk_3, dut.qsfp_1_rx_rst_3)
cocotb.fork(Clock(dut.qsfp_1_tx_clk_3, 2.56, units="ns").start())
self.qsfp_1_3_sink = XgmiiSink(dut.qsfp_1_txd_3, dut.qsfp_1_txc_3, dut.qsfp_1_tx_clk_3, dut.qsfp_1_tx_rst_3)
dut.qsfp_0_i2c_scl_i.setimmediatevalue(1)
dut.qsfp_0_i2c_sda_i.setimmediatevalue(1)
dut.qsfp_0_intr_n.setimmediatevalue(1)
dut.qsfp_0_mod_prsnt_n.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_0.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_1.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_2.setimmediatevalue(0)
dut.qsfp_0_rx_error_count_3.setimmediatevalue(0)
dut.qsfp_1_i2c_scl_i.setimmediatevalue(1)
dut.qsfp_1_i2c_sda_i.setimmediatevalue(1)
dut.qsfp_1_intr_n.setimmediatevalue(1)
dut.qsfp_1_mod_prsnt_n.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_0.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_1.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_2.setimmediatevalue(0)
dut.qsfp_1_rx_error_count_3.setimmediatevalue(0)
dut.qspi_dq_i.setimmediatevalue(0)
dut.pps_in.setimmediatevalue(0)
dut.bmc_miso.setimmediatevalue(0)
dut.bmc_int.setimmediatevalue(0)
self.loopback_enable = False
cocotb.fork(self._run_loopback())
async def init(self):
self.dut.qsfp_0_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_3.setimmediatevalue(0)
await RisingEdge(self.dut.clk_250mhz)
await RisingEdge(self.dut.clk_250mhz)
self.dut.qsfp_0_rx_rst_0.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_0.setimmediatevalue(1)
self.dut.qsfp_0_rx_rst_1.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_1.setimmediatevalue(1)
self.dut.qsfp_0_rx_rst_2.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_2.setimmediatevalue(1)
self.dut.qsfp_0_rx_rst_3.setimmediatevalue(1)
self.dut.qsfp_0_tx_rst_3.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_0.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_0.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_1.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_1.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_2.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_2.setimmediatevalue(1)
self.dut.qsfp_1_rx_rst_3.setimmediatevalue(1)
self.dut.qsfp_1_tx_rst_3.setimmediatevalue(1)
await FallingEdge(self.dut.rst_250mhz)
await Timer(100, 'ns')
await RisingEdge(self.dut.clk_250mhz)
await RisingEdge(self.dut.clk_250mhz)
self.dut.qsfp_0_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_0_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_0_tx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_0.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_1.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_2.setimmediatevalue(0)
self.dut.qsfp_1_rx_rst_3.setimmediatevalue(0)
self.dut.qsfp_1_tx_rst_3.setimmediatevalue(0)
await self.rc.enumerate(enable_bus_mastering=True, configure_msi=True)
async def _run_loopback(self):
while True:
await RisingEdge(self.dut.clk_250mhz)
if self.loopback_enable:
if not self.qsfp_0_0_sink.empty():
await self.qsfp_0_0_source.send(await self.qsfp_0_0_sink.recv())
if not self.qsfp_0_1_sink.empty():
await self.qsfp_0_1_source.send(await self.qsfp_0_1_sink.recv())
if not self.qsfp_0_2_sink.empty():
await self.qsfp_0_2_source.send(await self.qsfp_0_2_sink.recv())
if not self.qsfp_0_3_sink.empty():
await self.qsfp_0_3_source.send(await self.qsfp_0_3_sink.recv())
if not self.qsfp_1_0_sink.empty():
await self.qsfp_1_0_source.send(await self.qsfp_1_0_sink.recv())
if not self.qsfp_1_1_sink.empty():
await self.qsfp_1_1_source.send(await self.qsfp_1_1_sink.recv())
if not self.qsfp_1_2_sink.empty():
await self.qsfp_1_2_source.send(await self.qsfp_1_2_sink.recv())
if not self.qsfp_1_3_sink.empty():
await self.qsfp_1_3_source.send(await self.qsfp_1_3_sink.recv())
@cocotb.test()
async def run_test_nic(dut):
tb = TB(dut)
await tb.init()
tb.log.info("Init driver")
await tb.driver.init_dev(tb.dev.functions[0].pcie_id)
await tb.driver.interfaces[0].open()
tb.log.info("Enable queues")
await tb.rc.mem_write_dword(tb.driver.interfaces[0].ports[0].hw_addr+mqnic.MQNIC_PORT_REG_SCHED_ENABLE, 0x00000001)
for k in range(tb.driver.interfaces[0].tx_queue_count):
await tb.rc.mem_write_dword(tb.driver.interfaces[0].ports[0].schedulers[0].hw_addr+4*k, 0x00000003)
await tb.rc.mem_read(tb.driver.hw_addr, 4)
tb.log.info("Init complete")
tb.log.info("Send and receive single packet")
data = bytearray([x % 256 for x in range(1024)])
await tb.driver.interfaces[0].start_xmit(data, 0)
pkt = await tb.qsfp_0_0_sink.recv()
tb.log.info("Packet: %s", pkt)
await tb.qsfp_0_0_source.send(pkt)
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
tb.log.info("RX and TX checksum tests")
payload = bytes([x % 256 for x in range(256)])
eth = Ether(src='5A:51:52:53:54:55', dst='DA:D1:D2:D3:D4:D5')
ip = IP(src='192.168.1.100', dst='192.168.1.101')
udp = UDP(sport=1, dport=2)
test_pkt = eth / ip / udp / payload
test_pkt2 = test_pkt.copy()
test_pkt2[UDP].chksum = scapy.utils.checksum(bytes(test_pkt2[UDP]))
await tb.driver.interfaces[0].start_xmit(test_pkt2.build(), 0, 34, 6)
pkt = await tb.qsfp_0_0_sink.recv()
tb.log.info("Packet: %s", pkt)
await tb.qsfp_0_0_source.send(pkt)
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
assert Ether(pkt.data).build() == test_pkt.build()
tb.log.info("Multiple small packets")
count = 64
pkts = [bytearray([(x+k) % 256 for x in range(60)]) for k in range(count)]
tb.loopback_enable = True
for p in pkts:
await tb.driver.interfaces[0].start_xmit(p, 0)
for k in range(count):
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.data == pkts[k]
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
tb.loopback_enable = False
tb.log.info("Multiple large packets")
count = 64
pkts = [bytearray([(x+k) % 256 for x in range(1514)]) for k in range(count)]
tb.loopback_enable = True
for p in pkts:
await tb.driver.interfaces[0].start_xmit(p, 0)
for k in range(count):
pkt = await tb.driver.interfaces[0].recv()
tb.log.info("Packet: %s", pkt)
assert pkt.data == pkts[k]
assert pkt.rx_checksum == ~scapy.utils.checksum(bytes(pkt.data[14:])) & 0xffff
tb.loopback_enable = False
await RisingEdge(dut.clk_250mhz)
await RisingEdge(dut.clk_250mhz)
tests_dir = os.path.dirname(__file__)
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib'))
axi_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axi', 'rtl'))
axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axis', 'rtl'))
eth_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'eth', 'rtl'))
pcie_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'pcie', 'rtl'))
def test_fpga_core(request):
dut = "fpga_core"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(rtl_dir, "bmc_spi.v"),
os.path.join(rtl_dir, "common", "mqnic_interface.v"),
os.path.join(rtl_dir, "common", "mqnic_port.v"),
os.path.join(rtl_dir, "common", "cpl_write.v"),
os.path.join(rtl_dir, "common", "cpl_op_mux.v"),
os.path.join(rtl_dir, "common", "desc_fetch.v"),
os.path.join(rtl_dir, "common", "desc_op_mux.v"),
os.path.join(rtl_dir, "common", "queue_manager.v"),
os.path.join(rtl_dir, "common", "cpl_queue_manager.v"),
os.path.join(rtl_dir, "common", "tx_engine.v"),
os.path.join(rtl_dir, "common", "rx_engine.v"),
os.path.join(rtl_dir, "common", "tx_checksum.v"),
os.path.join(rtl_dir, "common", "rx_hash.v"),
os.path.join(rtl_dir, "common", "rx_checksum.v"),
os.path.join(rtl_dir, "common", "tx_scheduler_rr.v"),
os.path.join(rtl_dir, "common", "event_mux.v"),
os.path.join(rtl_dir, "common", "tdma_scheduler.v"),
os.path.join(rtl_dir, "common", "tdma_ber.v"),
os.path.join(rtl_dir, "common", "tdma_ber_ch.v"),
os.path.join(eth_rtl_dir, "eth_mac_10g_fifo.v"),
os.path.join(eth_rtl_dir, "eth_mac_10g.v"),
os.path.join(eth_rtl_dir, "axis_xgmii_rx_64.v"),
os.path.join(eth_rtl_dir, "axis_xgmii_tx_64.v"),
os.path.join(eth_rtl_dir, "lfsr.v"),
os.path.join(eth_rtl_dir, "ptp_clock.v"),
os.path.join(eth_rtl_dir, "ptp_clock_cdc.v"),
os.path.join(eth_rtl_dir, "ptp_perout.v"),
os.path.join(eth_rtl_dir, "ptp_ts_extract.v"),
os.path.join(axi_rtl_dir, "axil_interconnect.v"),
os.path.join(axi_rtl_dir, "arbiter.v"),
os.path.join(axi_rtl_dir, "priority_encoder.v"),
os.path.join(axis_rtl_dir, "axis_adapter.v"),
os.path.join(axis_rtl_dir, "axis_arb_mux.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo_adapter.v"),
os.path.join(axis_rtl_dir, "axis_fifo.v"),
os.path.join(axis_rtl_dir, "axis_register.v"),
os.path.join(pcie_rtl_dir, "pcie_us_axil_master.v"),
os.path.join(pcie_rtl_dir, "dma_if_pcie_us.v"),
os.path.join(pcie_rtl_dir, "dma_if_pcie_us_rd.v"),
os.path.join(pcie_rtl_dir, "dma_if_pcie_us_wr.v"),
os.path.join(pcie_rtl_dir, "dma_if_mux.v"),
os.path.join(pcie_rtl_dir, "dma_if_mux_rd.v"),
os.path.join(pcie_rtl_dir, "dma_if_mux_wr.v"),
os.path.join(pcie_rtl_dir, "dma_psdpram.v"),
os.path.join(pcie_rtl_dir, "dma_client_axis_sink.v"),
os.path.join(pcie_rtl_dir, "dma_client_axis_source.v"),
os.path.join(pcie_rtl_dir, "pcie_us_cfg.v"),
os.path.join(pcie_rtl_dir, "pcie_us_msi.v"),
os.path.join(pcie_rtl_dir, "pulse_merge.v"),
]
parameters = {}
parameters['AXIS_PCIE_DATA_WIDTH'] = 512
parameters['AXIS_PCIE_KEEP_WIDTH'] = parameters['AXIS_PCIE_DATA_WIDTH'] // 32
parameters['AXIS_PCIE_RQ_USER_WIDTH'] = 62 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 137
parameters['AXIS_PCIE_RC_USER_WIDTH'] = 75 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 161
parameters['AXIS_PCIE_CQ_USER_WIDTH'] = 88 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 183
parameters['AXIS_PCIE_CC_USER_WIDTH'] = 33 if parameters['AXIS_PCIE_DATA_WIDTH'] < 512 else 81
parameters['RQ_SEQ_NUM_WIDTH'] = 6
parameters['BAR0_APERTURE'] = 24
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| true | true |
f737c4729852b474fe5ede0528e9a5dfdb750a4c | 2,331 | py | Python | code/test/test.py | selinozdas/ObsCo | 296c0fbac97612b50bca03ccb24b8851781952ac | [
"MIT"
] | 1 | 2018-10-22T18:44:31.000Z | 2018-10-22T18:44:31.000Z | code/test/test.py | selinozdas/epsikon | 296c0fbac97612b50bca03ccb24b8851781952ac | [
"MIT"
] | null | null | null | code/test/test.py | selinozdas/epsikon | 296c0fbac97612b50bca03ccb24b8851781952ac | [
"MIT"
] | null | null | null | from flask import Flask, jsonify
from flask_pymongo import PyMongo
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://localhost:27017/obsco"
mongo = PyMongo(app)
@app.route('/users/<int:userId>', methods=['GET'])
def get_user(name = '', userId = -1):
users = mongo.db.users
results = []
#No query for invalid calls
if (name == "" and userId == -1):
raise Exception("You need to enter the name or the ID of the user.")
#function call with userId
elif (userId != -1) :
for entry in users.find({},{'_id':0}):
if (int(entry["id"]) == int(userId)):
results.append(entry)
#function call with only name
elif (str(name) != "") :
split_name = "".join(name.split())
split_name = split_name.lower()
for entry in users.find({},{'_id':0}):
temp_entry = entry["name"].lower()
temp_entry = "".join(temp_entry.split())
if (split_name in temp_entry):
results.append(entry)
if (len(results)==0):
raise Exception("No user has been found with the given credentials.")
return jsonify({'users': results})
@app.route("/skills/<int:userId>", methods=['GET'])
def get_skill(userId, skill = -1)->list:
"""
Finds the specified skill information of a user, if it is not entered returns all skills of the user.
Keyword Arguments:
userId -- unique id of the user (non-optional)
skill -- unique id of the skill (default -1)
Return Value:
skill_temp -- skill information if skill id is given else all skills of the given user
"""
#fetch user
try:
curs_user = mongo.db.users.find({'id':userId},{'_id':0})
user = [i for i in curs_user]
except:
user = []
if (len(user) != 0):
skills = user[0]['skills']
skill_temp = -1
for entry in skills:
if(skill == entry["id"]):
skill_temp = entry
if (skill_temp == -1):
raise Exception("No such skill exist for the given user")
else:
jsonify({'skills': skill_temp})
else:
return jsonify({'skills': skills})
if __name__ == '__main__':
app.run()
| 33.782609 | 109 | 0.55384 | from flask import Flask, jsonify
from flask_pymongo import PyMongo
app = Flask(__name__)
app.config["MONGO_URI"] = "mongodb://localhost:27017/obsco"
mongo = PyMongo(app)
@app.route('/users/<int:userId>', methods=['GET'])
def get_user(name = '', userId = -1):
users = mongo.db.users
results = []
if (name == "" and userId == -1):
raise Exception("You need to enter the name or the ID of the user.")
elif (userId != -1) :
for entry in users.find({},{'_id':0}):
if (int(entry["id"]) == int(userId)):
results.append(entry)
elif (str(name) != "") :
split_name = "".join(name.split())
split_name = split_name.lower()
for entry in users.find({},{'_id':0}):
temp_entry = entry["name"].lower()
temp_entry = "".join(temp_entry.split())
if (split_name in temp_entry):
results.append(entry)
if (len(results)==0):
raise Exception("No user has been found with the given credentials.")
return jsonify({'users': results})
@app.route("/skills/<int:userId>", methods=['GET'])
def get_skill(userId, skill = -1)->list:
try:
curs_user = mongo.db.users.find({'id':userId},{'_id':0})
user = [i for i in curs_user]
except:
user = []
if (len(user) != 0):
skills = user[0]['skills']
skill_temp = -1
for entry in skills:
if(skill == entry["id"]):
skill_temp = entry
if (skill_temp == -1):
raise Exception("No such skill exist for the given user")
else:
jsonify({'skills': skill_temp})
else:
return jsonify({'skills': skills})
if __name__ == '__main__':
app.run()
| true | true |
f737c50464d41648a3cb6c909e3a5fa4f084adea | 2,351 | py | Python | scripts/ipu/inference_gen_galaxy.py | BastienArcelin/IPU-GPU | dde946686478ce77a06821a1517b5b8206ab8de9 | [
"BSD-3-Clause"
] | null | null | null | scripts/ipu/inference_gen_galaxy.py | BastienArcelin/IPU-GPU | dde946686478ce77a06821a1517b5b8206ab8de9 | [
"BSD-3-Clause"
] | null | null | null | scripts/ipu/inference_gen_galaxy.py | BastienArcelin/IPU-GPU | dde946686478ce77a06821a1517b5b8206ab8de9 | [
"BSD-3-Clause"
] | null | null | null | ## Load necessary librairies
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
tfk = tf.keras
tfkl = tfk.layers
tfd = tfp.distributions
tfb = tfp.bijectors
import time
import sys
sys.path.insert(0,'')
from flow import *
import utils_vae
# IPU
from tensorflow.compiler.plugin.poplar.ops import gen_ipu_ops
from tensorflow.python import ipu
from tensorflow.python.ipu.scopes import ipu_scope
cfg = ipu.utils.create_ipu_config()#profiling=True,
#profile_execution=True,
#report_directory='fixed_fullModel'
cfg = ipu.utils.auto_select_ipus(cfg, 1)
ipu.utils.configure_ipu_system(cfg)
## Define the normalizing flow
hidden_dim = [256,256]
layers =8
bijectors = []
# IPU
# Create an IPU distribution strategy
strategy = ipu.ipu_strategy.IPUStrategy()
#with ipu_scope("/device:IPU:0"):
with strategy.scope():
for i in range(0, layers):
made = make_network(32, hidden_dim,2)
bijectors.append(MAF(made))
bijectors.append(tfb.Permute(permutation=[31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0]))
bijectors = tfb.Chain(bijectors=list(reversed(bijectors[:-1])))
distribution = tfd.TransformedDistribution(
distribution=tfd.Normal(loc=0., scale=1.),
bijector=bijectors,
event_shape=[32]
)
x_ = tfkl.Input(shape=(32,), dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf.optimizers.Adam(), loss=lambda _, log_prob: -log_prob)
print('flow defined')
## Load weights
loading_path = '../../nflow_weights/'
latest = tf.train.latest_checkpoint(loading_path)
model.load_weights(latest)
## Define VAE and load weights decoder VAE
vae_lsst_conv,vae_lsst_utils, encoder_LSST, decoder_LSST, Dkl = utils_vae.load_vae_full('../../vae_weights/weights_mse_noisy_v4.513-0.00.ckpt',6, folder= False)
### Do inference
## Warm-up
samples = distribution.sample(100)
out = decoder_LSST(samples)
print('warm-up over')
n_gal = 1000
print(n_gal)
## Actual inference
t0 = time.time()
samples = distribution.sample(n_gal)
out = decoder_LSST(samples)
t1 = time.time()
print('time for inference:' + str(t1-t0))
| 29.3875 | 164 | 0.68524 | tensorflow as tf
import tensorflow_probability as tfp
tfk = tf.keras
tfkl = tfk.layers
tfd = tfp.distributions
tfb = tfp.bijectors
import time
import sys
sys.path.insert(0,'')
from flow import *
import utils_vae
from tensorflow.compiler.plugin.poplar.ops import gen_ipu_ops
from tensorflow.python import ipu
from tensorflow.python.ipu.scopes import ipu_scope
cfg = ipu.utils.create_ipu_config()
cfg = ipu.utils.auto_select_ipus(cfg, 1)
ipu.utils.configure_ipu_system(cfg)
s =8
bijectors = []
strategy = ipu.ipu_strategy.IPUStrategy()
with strategy.scope():
for i in range(0, layers):
made = make_network(32, hidden_dim,2)
bijectors.append(MAF(made))
bijectors.append(tfb.Permute(permutation=[31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0]))
bijectors = tfb.Chain(bijectors=list(reversed(bijectors[:-1])))
distribution = tfd.TransformedDistribution(
distribution=tfd.Normal(loc=0., scale=1.),
bijector=bijectors,
event_shape=[32]
)
x_ = tfkl.Input(shape=(32,), dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf.optimizers.Adam(), loss=lambda _, log_prob: -log_prob)
print('flow defined')
ath = '../../nflow_weights/'
latest = tf.train.latest_checkpoint(loading_path)
model.load_weights(latest)
r_LSST, decoder_LSST, Dkl = utils_vae.load_vae_full('../../vae_weights/weights_mse_noisy_v4.513-0.00.ckpt',6, folder= False)
(100)
out = decoder_LSST(samples)
print('warm-up over')
n_gal = 1000
print(n_gal)
e()
samples = distribution.sample(n_gal)
out = decoder_LSST(samples)
t1 = time.time()
print('time for inference:' + str(t1-t0))
| true | true |
f737c5deb7e3adca1ac7a29b0ec85a81b591e9d0 | 11,520 | py | Python | osbrain/tests/test_nameserver.py | RezaBehzadpour/osbrain | 1b7061bfa6bcfa2176685081fd39c5c971107d51 | [
"Apache-2.0"
] | 176 | 2016-07-12T20:05:32.000Z | 2022-01-18T10:12:07.000Z | osbrain/tests/test_nameserver.py | RezaBehzadpour/osbrain | 1b7061bfa6bcfa2176685081fd39c5c971107d51 | [
"Apache-2.0"
] | 358 | 2016-08-04T09:21:35.000Z | 2021-10-15T07:20:07.000Z | osbrain/tests/test_nameserver.py | RezaBehzadpour/osbrain | 1b7061bfa6bcfa2176685081fd39c5c971107d51 | [
"Apache-2.0"
] | 50 | 2016-07-17T11:52:36.000Z | 2021-05-10T14:48:45.000Z | """
Test file for nameserver.
"""
import multiprocessing
import os
import random
import time
from threading import Timer
import pytest
from osbrain import Agent
from osbrain import AgentProcess
from osbrain import NameServer
from osbrain import NSProxy
from osbrain import Proxy
from osbrain import SocketAddress
from osbrain import run_agent
from osbrain import run_nameserver
from osbrain.helper import wait_agent_attr
from osbrain.nameserver import NameServerProcess
from osbrain.nameserver import random_nameserver_process
from .common import skip_windows_any_port
from .common import skip_windows_port_reuse
def test_nameserver_ping(nsproxy):
"""
Simple name server ping test.
"""
assert nsproxy.ping() == 'pong'
def test_nameserver_list(nsproxy):
"""
A simple test that checks the correct creation of a name server.
"""
agents = nsproxy.list()
name = 'Pyro.NameServer'
assert len(agents) == 1
assert list(agents.keys())[0] == name
assert agents[name] == 'PYRO:%s@%s' % (name, nsproxy.addr())
def test_nameserver_proxy_list(nsproxy):
"""
Verify new agents get registered in the nameserver.
"""
run_agent('a0', nsproxy.addr())
run_agent('a1', nsproxy.addr())
# List registered agents
agent_list = nsproxy.list()
assert 'a0' in agent_list
assert 'a1' in agent_list
def test_run_agents_same_name(nsproxy):
"""
Check that the user cannot create two agents with the same name. A
RuntimeError should be raised.
"""
run_agent('name')
with pytest.raises(RuntimeError) as error:
run_agent('name')
assert 'name already registered' in str(error.value)
def test_nameserver_proxy_shutdown_no_agents():
"""
Shutdown a name server through a proxy when the name server has no
agents registered.
"""
ns = run_nameserver()
ns.shutdown()
def test_nameserver_proxy_shutdown_agents(nsproxy):
"""
Shutdown agents registered in a name server from a name server proxy.
"""
run_agent('Agent0', nsaddr=nsproxy.addr())
run_agent('Agent1', nsaddr=nsproxy.addr())
nsproxy.shutdown_agents()
assert len(nsproxy.agents()) == 0
def test_nameserver_proxy_shutdown_with_agents():
"""
Shutdown a name server from a name server proxy.
"""
ns = run_nameserver()
run_agent('Agent0', nsaddr=ns.addr())
run_agent('Agent1', nsaddr=ns.addr())
ns.shutdown()
def test_nameserver_proxy_shutdown_with_many_agents():
"""
Shutdown a name server from a name server proxy when there are many agents
registered in the name server (make sure proxies do not saturate the name
server on shutdown).
The shutdown process is given a long timeout to avoid raising exceptions.
"""
import Pyro4
Pyro4.config.THREADPOOL_SIZE = 4
ns = run_nameserver()
for i in range(20):
run_agent('Agent%s' % i)
ns.shutdown(timeout=60)
def test_nameserver_proxy_shutdown_with_many_agents_timeout():
"""
Shutdown a name server from a name server proxy when there are many agents
registered in the name server (make sure proxies do not saturate the name
server on shutdown).
The shutdown process is given the shortest timeout to ensure an exception
is raised.
"""
import Pyro4
Pyro4.config.THREADPOOL_SIZE = 4
ns = run_nameserver()
for i in range(20):
run_agent('Agent%s' % i)
with pytest.raises(TimeoutError):
ns.shutdown(timeout=0.0)
ns.shutdown()
@pytest.mark.parametrize('delay', [1, 3, 5])
@pytest.mark.parametrize('timeout', [True, False])
def test_nameserver_proxy_shutdown_lazy_agents(delay, timeout):
"""
Shutdown a name server proxy with agents that wait some time before
shutting down.
The name server shutdown should always succeed. If the agents do not
shutdown cleanly soon they should be hard-killed.
"""
class Lazy(Agent):
def shutdown(self):
time.sleep(delay)
super().shutdown()
ns = run_nameserver()
run_agent('a0', base=Lazy)
run_agent('a1', base=Lazy)
t0 = time.time()
if timeout:
ns.shutdown(timeout=10)
else:
ns.shutdown()
assert time.time() - t0 > delay / 2.0
assert time.time() - t0 < delay + 2
def test_nameserver_proxy_shutdown_raise_timeout():
"""
A name server proxy should raise a TimeoutError if agents were not shutdown
or killed before the set timeout.
"""
ns = run_nameserver()
run_agent('a0')
with pytest.raises(TimeoutError) as error:
ns.shutdown(timeout=0.0)
assert 'not shutdown after' in str(error.value)
ns.shutdown()
def test_nameserver_proxy_shutdown_with_pyroerror():
"""
Check that `PyroError`s raised during `async_nameserver_shutdown` are
handled correctly.
"""
nameserver = run_nameserver()
ap = AgentProcess()
name = ap.start()
proxy = Proxy(name)
proxy.run()
ap.kill()
nameserver.async_shutdown_agents(nameserver.addr())
nameserver.shutdown()
def test_oneway_kill_non_running_agent_on_name_server_shutdown():
"""
The agent's `shutdown` method is only executed for running agents. When
agents are not running (i.e.: they raised an exception while running or
their `keep_alive` attribute was simply set to `False`, the `kill` method
is called instead.
When killing a non-running agent (i.e.: when shutting down the
architecture from the name server), this call is expected to be executed
one-way, as otherwise the Pyro daemon will shut down before returning
from the method, resulting in a `ConnectionClosedError`.
"""
class WilliamWallace(Agent):
def kill(self):
super().kill()
time.sleep(2)
ns = run_nameserver()
william = run_agent('william', base=WilliamWallace)
# Stop the agent
william.set_attr(_keep_alive=False)
assert wait_agent_attr(william, name='_running', value=False)
# Shut down should work just fine
ns.shutdown()
def test_nameserverprocess_shutdown():
"""
Name server shutdown can be called directly from the name server process.
"""
nameserver = random_nameserver_process()
run_agent('a0')
run_agent('a1')
while not len(nameserver.agents()) == 2:
continue
assert 'a0' in nameserver.agents()
assert 'a1' in nameserver.agents()
nameserver.shutdown()
assert not nameserver.is_alive()
def test_nameserverprocess_shutdown_lazy_agents():
"""
Shutdown a name server process with agents that wait some time before
shutting down.
"""
class Lazy(Agent):
def shutdown(self):
time.sleep(1)
super().shutdown()
nsprocess = random_nameserver_process()
run_agent('a0', base=Lazy)
run_agent('a1', base=Lazy)
t0 = time.time()
nsprocess.shutdown()
assert time.time() - t0 > 1
def test_nameserver_proxy_timeout():
"""
When creating a proxy to the name server, there should be a timeout
before raising an error if the name server cannot be located.
"""
while True:
try:
# Bind to random port
host = '127.0.0.1'
port = random.randrange(10000, 20000)
addr = SocketAddress(host, port)
nameserver = NameServerProcess(addr)
# Start name server later
Timer(1, nameserver.start).start()
# Locate name server now
pyro_address = NSProxy(addr, timeout=3.0).addr()
except PermissionError:
continue
break
assert pyro_address.host == host
assert pyro_address.port == port
nameserver.shutdown()
def test_nameserver_process_default_host():
"""
A name server process should default to localhost (127.0.0.1).
"""
ns = NameServerProcess(1234)
assert ns.port == 1234
assert ns.host == '127.0.0.1'
def test_nameserver_environ(nsproxy):
"""
When starting a nameserver, a environment variable should be set to ease
the process of running new agents.
"""
assert str(nsproxy.addr()) == os.environ.get('OSBRAIN_NAMESERVER_ADDRESS')
run_agent('a0')
run_agent('a1')
# List registered agents
agent_list = nsproxy.list()
assert 'a0' in agent_list
assert 'a1' in agent_list
def test_nameserver_agents(nsproxy):
"""
Test the agents() method, which should return a list with the names of
the registered agents.
"""
# No agents registered
agents = nsproxy.agents()
assert len(agents) == 0
# One agent registered
run_agent('Agent0')
agents = nsproxy.agents()
assert len(agents) == 1
# Two agents registered
run_agent('Agent1')
agents = nsproxy.agents()
assert len(agents) == 2
assert 'Agent0' in agents
assert 'Agent1' in agents
def test_nameserver_agent_address(nsproxy):
"""
A name server proxy can be used to retrieve an agent's socket address as
well, given the agent's alias and the socket's alias.
"""
a0 = run_agent('a0')
a1 = run_agent('a1')
addr0 = a0.bind('PUB', alias='foo')
addr1 = a1.bind('PUSH', alias='bar')
assert nsproxy.addr('a0', 'foo') == addr0
assert nsproxy.addr('a1', 'bar') == addr1
@skip_windows_any_port
def test_random_nameserver_process():
"""
Basic random_nameserver_process function tests: port range and exceptions.
"""
# Port range
port_start = 11000
port_stop = port_start + 100
nsprocess = random_nameserver_process(
port_start=port_start, port_stop=port_stop
)
address = nsprocess.addr
assert port_start <= address.port <= port_stop
ns = NSProxy(address)
ns.shutdown()
# Raising exceptions
with pytest.raises(ValueError):
random_nameserver_process(port_start=-1, port_stop=-2)
with pytest.raises(RuntimeError):
random_nameserver_process(port_start=22, port_stop=22, timeout=0.5)
@skip_windows_port_reuse
def test_nameserver_oserror(nsproxy):
"""
Name server start() should raise an error if address is already in use.
"""
with pytest.raises(RuntimeError) as error:
run_nameserver(nsproxy.addr())
assert 'OSError' in str(error.value)
assert 'Address already in use' in str(error.value)
@skip_windows_any_port
def test_nameserver_permissionerror():
"""
Name server start() should raise an error if it has not sufficient
permissions.
"""
with pytest.raises(RuntimeError) as error:
run_nameserver('127.0.0.1:22')
assert 'PermissionError' in str(error.value)
assert 'Permission denied' in str(error.value)
def test_run_nameserver_base():
"""
The `run_nameserver` function should accept a `base` parameter to specify
the base NameServer class.
"""
class BobMarley(NameServer):
def get_up(self):
return 'stand up!'
ns = run_nameserver(base=BobMarley)
assert ns.get_up() == 'stand up!'
ns.shutdown()
def test_nameserver_spawn_process(nsproxy):
"""
A name server should be able to spawn child processes.
It is a way to make sure name servers are run as non-daemonic processes,
which are not allowed to have children.
"""
class Spawner(NameServer):
def spawn_process(self):
p = multiprocessing.Process()
p.start()
return True
ns = run_nameserver(base=Spawner)
assert ns.spawn_process()
ns.shutdown()
| 27.961165 | 79 | 0.67526 | import multiprocessing
import os
import random
import time
from threading import Timer
import pytest
from osbrain import Agent
from osbrain import AgentProcess
from osbrain import NameServer
from osbrain import NSProxy
from osbrain import Proxy
from osbrain import SocketAddress
from osbrain import run_agent
from osbrain import run_nameserver
from osbrain.helper import wait_agent_attr
from osbrain.nameserver import NameServerProcess
from osbrain.nameserver import random_nameserver_process
from .common import skip_windows_any_port
from .common import skip_windows_port_reuse
def test_nameserver_ping(nsproxy):
assert nsproxy.ping() == 'pong'
def test_nameserver_list(nsproxy):
agents = nsproxy.list()
name = 'Pyro.NameServer'
assert len(agents) == 1
assert list(agents.keys())[0] == name
assert agents[name] == 'PYRO:%s@%s' % (name, nsproxy.addr())
def test_nameserver_proxy_list(nsproxy):
run_agent('a0', nsproxy.addr())
run_agent('a1', nsproxy.addr())
agent_list = nsproxy.list()
assert 'a0' in agent_list
assert 'a1' in agent_list
def test_run_agents_same_name(nsproxy):
run_agent('name')
with pytest.raises(RuntimeError) as error:
run_agent('name')
assert 'name already registered' in str(error.value)
def test_nameserver_proxy_shutdown_no_agents():
ns = run_nameserver()
ns.shutdown()
def test_nameserver_proxy_shutdown_agents(nsproxy):
run_agent('Agent0', nsaddr=nsproxy.addr())
run_agent('Agent1', nsaddr=nsproxy.addr())
nsproxy.shutdown_agents()
assert len(nsproxy.agents()) == 0
def test_nameserver_proxy_shutdown_with_agents():
ns = run_nameserver()
run_agent('Agent0', nsaddr=ns.addr())
run_agent('Agent1', nsaddr=ns.addr())
ns.shutdown()
def test_nameserver_proxy_shutdown_with_many_agents():
import Pyro4
Pyro4.config.THREADPOOL_SIZE = 4
ns = run_nameserver()
for i in range(20):
run_agent('Agent%s' % i)
ns.shutdown(timeout=60)
def test_nameserver_proxy_shutdown_with_many_agents_timeout():
import Pyro4
Pyro4.config.THREADPOOL_SIZE = 4
ns = run_nameserver()
for i in range(20):
run_agent('Agent%s' % i)
with pytest.raises(TimeoutError):
ns.shutdown(timeout=0.0)
ns.shutdown()
@pytest.mark.parametrize('delay', [1, 3, 5])
@pytest.mark.parametrize('timeout', [True, False])
def test_nameserver_proxy_shutdown_lazy_agents(delay, timeout):
class Lazy(Agent):
def shutdown(self):
time.sleep(delay)
super().shutdown()
ns = run_nameserver()
run_agent('a0', base=Lazy)
run_agent('a1', base=Lazy)
t0 = time.time()
if timeout:
ns.shutdown(timeout=10)
else:
ns.shutdown()
assert time.time() - t0 > delay / 2.0
assert time.time() - t0 < delay + 2
def test_nameserver_proxy_shutdown_raise_timeout():
ns = run_nameserver()
run_agent('a0')
with pytest.raises(TimeoutError) as error:
ns.shutdown(timeout=0.0)
assert 'not shutdown after' in str(error.value)
ns.shutdown()
def test_nameserver_proxy_shutdown_with_pyroerror():
nameserver = run_nameserver()
ap = AgentProcess()
name = ap.start()
proxy = Proxy(name)
proxy.run()
ap.kill()
nameserver.async_shutdown_agents(nameserver.addr())
nameserver.shutdown()
def test_oneway_kill_non_running_agent_on_name_server_shutdown():
class WilliamWallace(Agent):
def kill(self):
super().kill()
time.sleep(2)
ns = run_nameserver()
william = run_agent('william', base=WilliamWallace)
william.set_attr(_keep_alive=False)
assert wait_agent_attr(william, name='_running', value=False)
ns.shutdown()
def test_nameserverprocess_shutdown():
nameserver = random_nameserver_process()
run_agent('a0')
run_agent('a1')
while not len(nameserver.agents()) == 2:
continue
assert 'a0' in nameserver.agents()
assert 'a1' in nameserver.agents()
nameserver.shutdown()
assert not nameserver.is_alive()
def test_nameserverprocess_shutdown_lazy_agents():
class Lazy(Agent):
def shutdown(self):
time.sleep(1)
super().shutdown()
nsprocess = random_nameserver_process()
run_agent('a0', base=Lazy)
run_agent('a1', base=Lazy)
t0 = time.time()
nsprocess.shutdown()
assert time.time() - t0 > 1
def test_nameserver_proxy_timeout():
while True:
try:
host = '127.0.0.1'
port = random.randrange(10000, 20000)
addr = SocketAddress(host, port)
nameserver = NameServerProcess(addr)
Timer(1, nameserver.start).start()
pyro_address = NSProxy(addr, timeout=3.0).addr()
except PermissionError:
continue
break
assert pyro_address.host == host
assert pyro_address.port == port
nameserver.shutdown()
def test_nameserver_process_default_host():
ns = NameServerProcess(1234)
assert ns.port == 1234
assert ns.host == '127.0.0.1'
def test_nameserver_environ(nsproxy):
assert str(nsproxy.addr()) == os.environ.get('OSBRAIN_NAMESERVER_ADDRESS')
run_agent('a0')
run_agent('a1')
agent_list = nsproxy.list()
assert 'a0' in agent_list
assert 'a1' in agent_list
def test_nameserver_agents(nsproxy):
agents = nsproxy.agents()
assert len(agents) == 0
run_agent('Agent0')
agents = nsproxy.agents()
assert len(agents) == 1
run_agent('Agent1')
agents = nsproxy.agents()
assert len(agents) == 2
assert 'Agent0' in agents
assert 'Agent1' in agents
def test_nameserver_agent_address(nsproxy):
a0 = run_agent('a0')
a1 = run_agent('a1')
addr0 = a0.bind('PUB', alias='foo')
addr1 = a1.bind('PUSH', alias='bar')
assert nsproxy.addr('a0', 'foo') == addr0
assert nsproxy.addr('a1', 'bar') == addr1
@skip_windows_any_port
def test_random_nameserver_process():
port_start = 11000
port_stop = port_start + 100
nsprocess = random_nameserver_process(
port_start=port_start, port_stop=port_stop
)
address = nsprocess.addr
assert port_start <= address.port <= port_stop
ns = NSProxy(address)
ns.shutdown()
with pytest.raises(ValueError):
random_nameserver_process(port_start=-1, port_stop=-2)
with pytest.raises(RuntimeError):
random_nameserver_process(port_start=22, port_stop=22, timeout=0.5)
@skip_windows_port_reuse
def test_nameserver_oserror(nsproxy):
with pytest.raises(RuntimeError) as error:
run_nameserver(nsproxy.addr())
assert 'OSError' in str(error.value)
assert 'Address already in use' in str(error.value)
@skip_windows_any_port
def test_nameserver_permissionerror():
with pytest.raises(RuntimeError) as error:
run_nameserver('127.0.0.1:22')
assert 'PermissionError' in str(error.value)
assert 'Permission denied' in str(error.value)
def test_run_nameserver_base():
class BobMarley(NameServer):
def get_up(self):
return 'stand up!'
ns = run_nameserver(base=BobMarley)
assert ns.get_up() == 'stand up!'
ns.shutdown()
def test_nameserver_spawn_process(nsproxy):
class Spawner(NameServer):
def spawn_process(self):
p = multiprocessing.Process()
p.start()
return True
ns = run_nameserver(base=Spawner)
assert ns.spawn_process()
ns.shutdown()
| true | true |
f737c664b454fe361ef537964ca226eec7e50c7a | 12,998 | py | Python | open/NVIDIA/scripts/update_results.py | wom-ai/inference_results_v1.0 | af4bfffd5b6c4815f305a272cb42ae6de09f44e1 | [
"Apache-2.0"
] | null | null | null | open/NVIDIA/scripts/update_results.py | wom-ai/inference_results_v1.0 | af4bfffd5b6c4815f305a272cb42ae6de09f44e1 | [
"Apache-2.0"
] | 24 | 2021-07-19T01:09:35.000Z | 2022-03-17T11:44:02.000Z | open/NVIDIA/scripts/update_results.py | wom-ai/inference_results_v1.0 | af4bfffd5b6c4815f305a272cb42ae6de09f44e1 | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python3
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.getcwd())
import re
import sys
import shutil
import glob
import argparse
import datetime
import json
from scripts.utils import Tree, SortingCriteria, get_system_type
SCENARIO_PERF_RES_METADATA = {
# scenario: (result regex, SortingCriteria)
"Offline": (r"Samples per second: (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Higher),
"Server": (r"99\.00 percentile latency \(ns\) : (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Lower),
"SingleStream": (r"90th percentile latency \(ns\) : (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Lower),
"MultiStream": (r"99\.00 percentile latency \(ns\) : (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Lower),
}
def sort_perf_list(perf_file_list, scenario):
perf_vals = []
for perf_file in perf_file_list:
summary_file = perf_file.replace("_accuracy.json", "_summary.txt")
found_perf = False
with open(summary_file) as f:
log = f.read().split("\n")
for line in log:
matches = re.match(SCENARIO_PERF_RES_METADATA[scenario][0], line)
if matches is None:
continue
perf_vals.append((perf_file, float(matches.group(1))))
found_perf = True
break
if not found_perf:
raise Exception("Could not find perf value in file: " + summary_file)
sorted_perf_vals = sorted(perf_vals, key=lambda k: k[1],
reverse=(SCENARIO_PERF_RES_METADATA[scenario][1] == SortingCriteria.Lower))
return [k[0] for k in sorted_perf_vals]
def find_valid_runs(input_list, scenario):
# Check for query constraints documented in https://github.com/mlperf/inference_policies/blob/master/inference_rules.adoc#scenarios
QUERY_METRIC_CONSTRAINTS = {
"Offline": (r"samples_per_query : (\d+\.?\d*e?[-+]?\d*)", 24576),
"Server": (r"min_query_count : (\d+\.?\d*e?[-+]?\d*)", 270336),
"MultiStream": (r"min_query_count : (\d+\.?\d*e?[-+]?\d*)", 270336),
"SingleStream": (r"min_query_count : (\d+\.?\d*e?[-+]?\d*)", 1024),
}
perf_list = []
accu_list = []
for input_file in input_list:
# Check if this is Accuracy run or Performance run.
if os.path.getsize(input_file) > 4:
accu_list.append(input_file)
# Check for valid perf run
is_valid = False
satisfies_query_constraint = False
summary = input_file.replace("_accuracy.json", "_summary.txt")
with open(summary) as f:
for line in f:
# Result validity check
match = re.match(r"Result is : (VALID|INVALID)", line)
if match is not None and match.group(1) == "VALID":
is_valid = True
# Query constraint check
match = re.match(QUERY_METRIC_CONSTRAINTS[scenario][0], line)
if match is not None and float(match.group(1)) >= QUERY_METRIC_CONSTRAINTS[scenario][1]:
satisfies_query_constraint = True
if is_valid and satisfies_query_constraint:
perf_list.append(input_file)
return perf_list, accu_list
def process_results(args, system_ids, metadata):
time_now = str(datetime.datetime.utcnow())
result_id = args.result_id if args.result_id is not None else "manual-{:}".format(time_now)
for system_id in system_ids:
system_type = get_system_type(system_id)
for benchmark in system_ids[system_id]:
# Skip DLRM and BERT-99.9 for Edge
if system_type == "edge" and (benchmark.startswith("dlrm") or benchmark == "bert-99.9"):
print("{:} is an edge system. Skipping {:}".format(system_id, benchmark))
continue
# Skip SSD MobileNet for datacenter
if system_type == "datacenter" and benchmark == "ssd-mobilenet":
print("{:} is a datacenter system. Skipping {:}".format(system_id, benchmark))
continue
for scenario in system_ids[system_id][benchmark]:
# Skip Server for Edge systems
if system_type == "edge" and scenario in {"Server"}:
print("{:} is an edge system. Skipping Server scenario".format(system_id))
continue
# Skip SingleStream and MultiStream for Datacenter systems
if system_type == "datacenter" and scenario in {"SingleStream", "MultiStream"}:
print("{:} is a datacenter system. Skipping {:} scenario".format(system_id, scenario))
continue
print(">>>>>>>> Processing {:}-{:}-{:} <<<<<<<<".format(system_id, benchmark, scenario))
input_list = system_ids[system_id][benchmark][scenario]
print("Found {:} log files".format(len(input_list)))
perf_list, accu_list = find_valid_runs(input_list, scenario)
# For DLRM and 3d-UNET, the 99.9% and 99% accuracy targets use the same engines. We use the same
# logs here to make it more prominent that they are the same
if benchmark in {"dlrm-99", "3d-unet-99"}:
perf_list, accu_list = find_valid_runs(system_ids[system_id][benchmark + ".9"][scenario], scenario)
print("\t{:} perf logs".format(len(perf_list)))
print("\t{:} acc logs".format(len(accu_list)))
metadata.insert([system_id, benchmark, scenario, "accuracy", "count"], len(accu_list))
metadata.insert([system_id, benchmark, scenario, "performance", "count"], len(perf_list))
# Update accuracy run
if len(accu_list) == 0:
print("WARNING: Cannot find valid accuracy run.")
if args.abort_missing_accuracy:
return
else:
if len(accu_list) > 1:
print("WARNING: Found {:d} accuracy runs, which is more than needed. Empirically choose the last one.".format(len(accu_list)))
print(accu_list)
output_dir = os.path.join(args.output_dir, system_id, benchmark, scenario, "accuracy")
if not args.dry_run:
os.makedirs(output_dir, exist_ok=True)
for suffix in ["_accuracy.json", "_detail.txt", "_summary.txt"]:
input_file = accu_list[-1].replace("_accuracy.json", suffix)
output_file = os.path.join(output_dir, "mlperf_log{:}".format(suffix))
print("Copy {:} -> {:}".format(input_file, output_file))
if not args.dry_run:
shutil.copy(input_file, output_file)
input_file = os.path.join(os.path.dirname(input_file), "accuracy.txt")
output_file = os.path.join(output_dir, "accuracy.txt")
print("Copy {:} -> {:}".format(input_file, output_file))
if not args.dry_run:
shutil.copy(input_file, output_file)
# Update perf run
perf_count = 1
if len(perf_list) < perf_count:
print("WARNING: Cannot find enough passing perf runs. Only found {:d} runs.".format(len(perf_list)))
if args.abort_insufficient_runs:
return
elif len(perf_list) > perf_count:
print("WARNING: Found {:d} passing perf runs, which is more than needed. Choosing the highest perf one(s).".format(len(perf_list)))
perf_list = sort_perf_list(perf_list, scenario)[-perf_count:]
starting_idx = metadata.get([system_id, benchmark, scenario, "performance", "last_updated"])
if starting_idx is None:
starting_idx = 0
else:
# Starting idx is in range 1..perf_count, whereas actual indices are 0..perf_count-1. We wish the
# first index we modify to be the one after Starting idx, so taking (N mod perf_count) works.
starting_idx = starting_idx % perf_count
for run_idx in range(0, len(perf_list)):
run_num = ((run_idx + starting_idx) % perf_count) + 1
output_dir = os.path.join(args.output_dir, system_id, benchmark, scenario, "performance", "run_{:d}".format(run_num))
if not args.dry_run:
os.makedirs(output_dir, exist_ok=True)
for suffix in ["_accuracy.json", "_detail.txt", "_summary.txt"]:
input_file = perf_list[run_idx].replace("_accuracy.json", suffix)
output_file = os.path.join(output_dir, "mlperf_log{:}".format(suffix))
print("Copy {:} -> {:}".format(input_file, output_file))
if not args.dry_run:
shutil.copy(input_file, output_file)
metadata.insert([system_id, benchmark, scenario, "performance", "last_updated"], run_num)
metadata.insert([system_id, benchmark, scenario, "results_export_timestamp"], time_now)
metadata.insert([system_id, benchmark, scenario, "result_id"], result_id)
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--input_dir", "-d",
help="Specifies the directory containing the logs.",
default="build/logs"
)
parser.add_argument(
"--output_dir", "-o",
help="Specifies the directory to output the results/ entries to",
default="results"
)
parser.add_argument(
"--result_id",
help="Specifies a unique ID to use for this result",
default=None
)
parser.add_argument(
"--abort_insufficient_runs",
help="Abort instead if there are not enough perf runs to be considered valid",
action="store_true"
)
parser.add_argument(
"--abort_missing_accuracy",
help="Abort instead if there isn't a valid accuracy run",
action="store_true"
)
parser.add_argument(
"--dry_run",
help="Don't actually copy files, just log the actions taken.",
action="store_true"
)
parser.add_argument(
"--metadata_file",
help="File that stores metadata about these results",
default="results_metadata.json"
)
parser.add_argument(
"--add_metadata",
help="Save a field as part of metadata to the results directory. Format period.separated.key:value",
action="append"
)
return parser.parse_args()
def main():
args = get_args()
glob_to_logs = os.path.join(args.input_dir, "**", "mlperf_log_accuracy.json")
print("Looking for logs in {:}".format(glob_to_logs))
all_logs = glob.glob(glob_to_logs, recursive=True)
print("Found {:} mlperf_log entries".format(len(all_logs)))
# Loop through input_list to find all the system_ids
system_ids = Tree()
for entry in all_logs:
parts = entry.split("/")
system_id = parts[-4] # [input_dir]/<timestamp>/system_id/benchmark/scenario/*.json
benchmark = parts[-3]
scenario = parts[-2]
system_ids.insert([system_id, benchmark, scenario], entry, append=True)
metadata = None
if os.path.exists(args.metadata_file):
with open(args.metadata_file) as f:
metadata = json.load(f)
metadata = Tree(starting_val=metadata)
process_results(args, system_ids, metadata)
# Write out custom metadata
if args.add_metadata:
for md in args.add_metadata:
tmp = md.split(":")
if len(tmp) != 2:
print("WARNING: Invalid metadata \"{:}\"".format(md))
continue
keyspace = tmp[0].split(".")
value = tmp[1]
metadata.insert(keyspace, value)
if not args.dry_run:
with open(args.metadata_file, 'w') as f:
json.dump(metadata.tree, f, indent=4, sort_keys=True)
else:
print(json.dumps(metadata.tree, indent=4, sort_keys=True))
print("Done!")
if __name__ == '__main__':
main()
| 43.039735 | 151 | 0.590783 |
import os
import sys
sys.path.insert(0, os.getcwd())
import re
import sys
import shutil
import glob
import argparse
import datetime
import json
from scripts.utils import Tree, SortingCriteria, get_system_type
SCENARIO_PERF_RES_METADATA = {
"Offline": (r"Samples per second: (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Higher),
"Server": (r"99\.00 percentile latency \(ns\) : (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Lower),
"SingleStream": (r"90th percentile latency \(ns\) : (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Lower),
"MultiStream": (r"99\.00 percentile latency \(ns\) : (\d+\.?\d*e?[-+]?\d*)", SortingCriteria.Lower),
}
def sort_perf_list(perf_file_list, scenario):
perf_vals = []
for perf_file in perf_file_list:
summary_file = perf_file.replace("_accuracy.json", "_summary.txt")
found_perf = False
with open(summary_file) as f:
log = f.read().split("\n")
for line in log:
matches = re.match(SCENARIO_PERF_RES_METADATA[scenario][0], line)
if matches is None:
continue
perf_vals.append((perf_file, float(matches.group(1))))
found_perf = True
break
if not found_perf:
raise Exception("Could not find perf value in file: " + summary_file)
sorted_perf_vals = sorted(perf_vals, key=lambda k: k[1],
reverse=(SCENARIO_PERF_RES_METADATA[scenario][1] == SortingCriteria.Lower))
return [k[0] for k in sorted_perf_vals]
def find_valid_runs(input_list, scenario):
_METRIC_CONSTRAINTS = {
"Offline": (r"samples_per_query : (\d+\.?\d*e?[-+]?\d*)", 24576),
"Server": (r"min_query_count : (\d+\.?\d*e?[-+]?\d*)", 270336),
"MultiStream": (r"min_query_count : (\d+\.?\d*e?[-+]?\d*)", 270336),
"SingleStream": (r"min_query_count : (\d+\.?\d*e?[-+]?\d*)", 1024),
}
perf_list = []
accu_list = []
for input_file in input_list:
if os.path.getsize(input_file) > 4:
accu_list.append(input_file)
is_valid = False
satisfies_query_constraint = False
summary = input_file.replace("_accuracy.json", "_summary.txt")
with open(summary) as f:
for line in f:
match = re.match(r"Result is : (VALID|INVALID)", line)
if match is not None and match.group(1) == "VALID":
is_valid = True
match = re.match(QUERY_METRIC_CONSTRAINTS[scenario][0], line)
if match is not None and float(match.group(1)) >= QUERY_METRIC_CONSTRAINTS[scenario][1]:
satisfies_query_constraint = True
if is_valid and satisfies_query_constraint:
perf_list.append(input_file)
return perf_list, accu_list
def process_results(args, system_ids, metadata):
time_now = str(datetime.datetime.utcnow())
result_id = args.result_id if args.result_id is not None else "manual-{:}".format(time_now)
for system_id in system_ids:
system_type = get_system_type(system_id)
for benchmark in system_ids[system_id]:
if system_type == "edge" and (benchmark.startswith("dlrm") or benchmark == "bert-99.9"):
print("{:} is an edge system. Skipping {:}".format(system_id, benchmark))
continue
if system_type == "datacenter" and benchmark == "ssd-mobilenet":
print("{:} is a datacenter system. Skipping {:}".format(system_id, benchmark))
continue
for scenario in system_ids[system_id][benchmark]:
if system_type == "edge" and scenario in {"Server"}:
print("{:} is an edge system. Skipping Server scenario".format(system_id))
continue
if system_type == "datacenter" and scenario in {"SingleStream", "MultiStream"}:
print("{:} is a datacenter system. Skipping {:} scenario".format(system_id, scenario))
continue
print(">>>>>>>> Processing {:}-{:}-{:} <<<<<<<<".format(system_id, benchmark, scenario))
input_list = system_ids[system_id][benchmark][scenario]
print("Found {:} log files".format(len(input_list)))
perf_list, accu_list = find_valid_runs(input_list, scenario)
if benchmark in {"dlrm-99", "3d-unet-99"}:
perf_list, accu_list = find_valid_runs(system_ids[system_id][benchmark + ".9"][scenario], scenario)
print("\t{:} perf logs".format(len(perf_list)))
print("\t{:} acc logs".format(len(accu_list)))
metadata.insert([system_id, benchmark, scenario, "accuracy", "count"], len(accu_list))
metadata.insert([system_id, benchmark, scenario, "performance", "count"], len(perf_list))
if len(accu_list) == 0:
print("WARNING: Cannot find valid accuracy run.")
if args.abort_missing_accuracy:
return
else:
if len(accu_list) > 1:
print("WARNING: Found {:d} accuracy runs, which is more than needed. Empirically choose the last one.".format(len(accu_list)))
print(accu_list)
output_dir = os.path.join(args.output_dir, system_id, benchmark, scenario, "accuracy")
if not args.dry_run:
os.makedirs(output_dir, exist_ok=True)
for suffix in ["_accuracy.json", "_detail.txt", "_summary.txt"]:
input_file = accu_list[-1].replace("_accuracy.json", suffix)
output_file = os.path.join(output_dir, "mlperf_log{:}".format(suffix))
print("Copy {:} -> {:}".format(input_file, output_file))
if not args.dry_run:
shutil.copy(input_file, output_file)
input_file = os.path.join(os.path.dirname(input_file), "accuracy.txt")
output_file = os.path.join(output_dir, "accuracy.txt")
print("Copy {:} -> {:}".format(input_file, output_file))
if not args.dry_run:
shutil.copy(input_file, output_file)
perf_count = 1
if len(perf_list) < perf_count:
print("WARNING: Cannot find enough passing perf runs. Only found {:d} runs.".format(len(perf_list)))
if args.abort_insufficient_runs:
return
elif len(perf_list) > perf_count:
print("WARNING: Found {:d} passing perf runs, which is more than needed. Choosing the highest perf one(s).".format(len(perf_list)))
perf_list = sort_perf_list(perf_list, scenario)[-perf_count:]
starting_idx = metadata.get([system_id, benchmark, scenario, "performance", "last_updated"])
if starting_idx is None:
starting_idx = 0
else:
starting_idx = starting_idx % perf_count
for run_idx in range(0, len(perf_list)):
run_num = ((run_idx + starting_idx) % perf_count) + 1
output_dir = os.path.join(args.output_dir, system_id, benchmark, scenario, "performance", "run_{:d}".format(run_num))
if not args.dry_run:
os.makedirs(output_dir, exist_ok=True)
for suffix in ["_accuracy.json", "_detail.txt", "_summary.txt"]:
input_file = perf_list[run_idx].replace("_accuracy.json", suffix)
output_file = os.path.join(output_dir, "mlperf_log{:}".format(suffix))
print("Copy {:} -> {:}".format(input_file, output_file))
if not args.dry_run:
shutil.copy(input_file, output_file)
metadata.insert([system_id, benchmark, scenario, "performance", "last_updated"], run_num)
metadata.insert([system_id, benchmark, scenario, "results_export_timestamp"], time_now)
metadata.insert([system_id, benchmark, scenario, "result_id"], result_id)
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--input_dir", "-d",
help="Specifies the directory containing the logs.",
default="build/logs"
)
parser.add_argument(
"--output_dir", "-o",
help="Specifies the directory to output the results/ entries to",
default="results"
)
parser.add_argument(
"--result_id",
help="Specifies a unique ID to use for this result",
default=None
)
parser.add_argument(
"--abort_insufficient_runs",
help="Abort instead if there are not enough perf runs to be considered valid",
action="store_true"
)
parser.add_argument(
"--abort_missing_accuracy",
help="Abort instead if there isn't a valid accuracy run",
action="store_true"
)
parser.add_argument(
"--dry_run",
help="Don't actually copy files, just log the actions taken.",
action="store_true"
)
parser.add_argument(
"--metadata_file",
help="File that stores metadata about these results",
default="results_metadata.json"
)
parser.add_argument(
"--add_metadata",
help="Save a field as part of metadata to the results directory. Format period.separated.key:value",
action="append"
)
return parser.parse_args()
def main():
args = get_args()
glob_to_logs = os.path.join(args.input_dir, "**", "mlperf_log_accuracy.json")
print("Looking for logs in {:}".format(glob_to_logs))
all_logs = glob.glob(glob_to_logs, recursive=True)
print("Found {:} mlperf_log entries".format(len(all_logs)))
system_ids = Tree()
for entry in all_logs:
parts = entry.split("/")
system_id = parts[-4]
benchmark = parts[-3]
scenario = parts[-2]
system_ids.insert([system_id, benchmark, scenario], entry, append=True)
metadata = None
if os.path.exists(args.metadata_file):
with open(args.metadata_file) as f:
metadata = json.load(f)
metadata = Tree(starting_val=metadata)
process_results(args, system_ids, metadata)
if args.add_metadata:
for md in args.add_metadata:
tmp = md.split(":")
if len(tmp) != 2:
print("WARNING: Invalid metadata \"{:}\"".format(md))
continue
keyspace = tmp[0].split(".")
value = tmp[1]
metadata.insert(keyspace, value)
if not args.dry_run:
with open(args.metadata_file, 'w') as f:
json.dump(metadata.tree, f, indent=4, sort_keys=True)
else:
print(json.dumps(metadata.tree, indent=4, sort_keys=True))
print("Done!")
if __name__ == '__main__':
main()
| true | true |
f737c7504b046270f8b55cc91eebf2c59dfcdb24 | 496 | py | Python | aspc/coursesearch/management/commands/smart_update.py | DDKZ/mainsite | 425a8e147a17b6bdd1af420460a9ea83462252fc | [
"MIT"
] | null | null | null | aspc/coursesearch/management/commands/smart_update.py | DDKZ/mainsite | 425a8e147a17b6bdd1af420460a9ea83462252fc | [
"MIT"
] | null | null | null | aspc/coursesearch/management/commands/smart_update.py | DDKZ/mainsite | 425a8e147a17b6bdd1af420460a9ea83462252fc | [
"MIT"
] | null | null | null | import logging
import pyodbc
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from aspc.coursesearch.tasks import smart_update as update_task
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = """Performs a conditional update of the course schedule
depending on the refresh history and whether new data are
available"""
def handle(self, *args, **options):
update_task() | 31 | 68 | 0.733871 | import logging
import pyodbc
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from aspc.coursesearch.tasks import smart_update as update_task
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = """Performs a conditional update of the course schedule
depending on the refresh history and whether new data are
available"""
def handle(self, *args, **options):
update_task() | true | true |
f737c809f04dd6e50834dc16dae05718533e95cb | 1,532 | py | Python | src/robot/version.py | sipke/robotframework | 81e8251978d421227e784037c18d53777e6c248a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/robot/version.py | sipke/robotframework | 81e8251978d421227e784037c18d53777e6c248a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/robot/version.py | sipke/robotframework | 81e8251978d421227e784037c18d53777e6c248a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
# Version number typically updated by running `invoke set-version <version>`.
# Run `invoke --help set-version` or see tasks.py for details.
VERSION = '3.2.3.dev1'
def get_version(naked=False):
if naked:
return re.split('(a|b|rc|.dev)', VERSION)[0]
return VERSION
def get_full_version(program=None, naked=False):
version = '%s %s (%s %s on %s)' % (program or '',
get_version(naked),
get_interpreter(),
sys.version.split()[0],
sys.platform)
return version.strip()
def get_interpreter():
if sys.platform.startswith('java'):
return 'Jython'
if sys.platform == 'cli':
return 'IronPython'
if 'PyPy' in sys.version:
return 'PyPy'
return 'Python'
| 32.595745 | 77 | 0.626632 |
import re
import sys
VERSION = '3.2.3.dev1'
def get_version(naked=False):
if naked:
return re.split('(a|b|rc|.dev)', VERSION)[0]
return VERSION
def get_full_version(program=None, naked=False):
version = '%s %s (%s %s on %s)' % (program or '',
get_version(naked),
get_interpreter(),
sys.version.split()[0],
sys.platform)
return version.strip()
def get_interpreter():
if sys.platform.startswith('java'):
return 'Jython'
if sys.platform == 'cli':
return 'IronPython'
if 'PyPy' in sys.version:
return 'PyPy'
return 'Python'
| true | true |
f737ca83e3ca6c7f50e9c4149437ed42554fd0da | 1,612 | py | Python | aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/GetUserByExtensionRequest.py | ankitdobhal/aliyun-openapi-python-sdk | 991b1c2d91adc468480defc23ba790d4369cce7b | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/GetUserByExtensionRequest.py | ankitdobhal/aliyun-openapi-python-sdk | 991b1c2d91adc468480defc23ba790d4369cce7b | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-ccc/aliyunsdkccc/request/v20170705/GetUserByExtensionRequest.py | ankitdobhal/aliyun-openapi-python-sdk | 991b1c2d91adc468480defc23ba790d4369cce7b | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkccc.endpoint import endpoint_data
class GetUserByExtensionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CCC', '2017-07-05', 'GetUserByExtension','CCC')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Extension(self):
return self.get_query_params().get('Extension')
def set_Extension(self,Extension):
self.add_query_param('Extension',Extension)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId) | 36.636364 | 77 | 0.766749 |
from aliyunsdkcore.request import RpcRequest
from aliyunsdkccc.endpoint import endpoint_data
class GetUserByExtensionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CCC', '2017-07-05', 'GetUserByExtension','CCC')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Extension(self):
return self.get_query_params().get('Extension')
def set_Extension(self,Extension):
self.add_query_param('Extension',Extension)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId) | true | true |
f737cc1f8fc969edb5787cb99f4efb813d708da0 | 5,194 | py | Python | tests/test_genome_dataset.py | xduan7/dl-bio-seqs | 72287509508ed50e16c5b26d6db0cf687bbfe94b | [
"MIT"
] | 4 | 2020-10-03T08:57:01.000Z | 2021-03-13T19:21:06.000Z | tests/test_genome_dataset.py | xduan7/dl-bio-seqs | 72287509508ed50e16c5b26d6db0cf687bbfe94b | [
"MIT"
] | null | null | null | tests/test_genome_dataset.py | xduan7/dl-bio-seqs | 72287509508ed50e16c5b26d6db0cf687bbfe94b | [
"MIT"
] | 1 | 2020-10-03T08:57:03.000Z | 2020-10-03T08:57:03.000Z | """
File Name: test_masked_genome_dataset.py
Project: bioseq-learning
File Description:
"""
import os
import time
import random
import logging
import unittest
from typing import List
from torch.utils.data import DataLoader
from src import E_COLI_GENOME_PARENT_DIR_PATH
from src.datasets.genome_dataset import GenomeDataset, GenomeIterDataset
from src.datasets.genome_dataset import \
PADDING_CHAR, NUCLEOTIDE_CHAR_INDEX_DICT
_PADDING_INDEX: int = NUCLEOTIDE_CHAR_INDEX_DICT[PADDING_CHAR]
_TEST_EXAMPLES_DIR_PATH: str = \
os.path.join(os.path.dirname(os.path.realpath(__file__)), 'examples')
_TEST_GENOME_DIR_PATHS: List[str] = [
os.path.join(
_TEST_EXAMPLES_DIR_PATH,
'genome_processing_results_reference',
'562.2283'
)]
_TEST_SEQ_LEN: int = 1000
_TEST_NUM_MASKS: int = 5
_TEST_MAX_NUM_PADDINGS: int = 500
_TEST_BATCH_SIZE: int = 16
_TEST_NUM_SAMPLES: int = 10000
_TEST_GENOME_DATASET_KWARGS = {
'genome_dir_paths': _TEST_GENOME_DIR_PATHS,
'seq_len': _TEST_SEQ_LEN,
'max_num_paddings': _TEST_MAX_NUM_PADDINGS,
}
class TestGenomeDataset(unittest.TestCase):
"""unittest class for 'genome_dataset' and 'genome_iter_dataset' classes
"""
def test_genome_dataset(self):
"""test 'genome_dataset' class
"""
genome_dataset = GenomeDataset(**_TEST_GENOME_DATASET_KWARGS)
for _i in range(len(genome_dataset)):
_indexed_seq, _padding_mask = genome_dataset[_i]
# test if all the indexed values are in the index dict
assert set([_v.item() for _v in _indexed_seq.unique()]).issubset(
set(NUCLEOTIDE_CHAR_INDEX_DICT.values()))
# test if the number of paddings is valid (no bigger)
assert _padding_mask.sum().item() <= _TEST_MAX_NUM_PADDINGS
# test if the dataset will raise out-of-bound error
try:
_indexed_seq, _padding_mask = genome_dataset[len(genome_dataset)]
except IndexError:
assert True
else:
assert False
def test_genome_iter_dataset(self):
"""test 'genome_iter_dataset' class
"""
# test the strictly sampling dataloader
# make sure that the number of batches during an iteration only
# traverse all the samples once, and therefore the total number of
# batches should be the same as the length of the dataloader
genome_strict_iter_dataloader = DataLoader(
GenomeIterDataset(
**_TEST_GENOME_DATASET_KWARGS,
strict_iteration=True,
),
batch_size=_TEST_BATCH_SIZE,
drop_last=True,
)
_num_batches: int = 0
for _batch_data in genome_strict_iter_dataloader:
assert _batch_data[0].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
assert _batch_data[1].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
_num_batches += 1
assert _num_batches <= len(genome_strict_iter_dataloader)
# test the randomly sampling dataloader
# make sure that the random sampling goes beyond the length of the
# dataloader (can go on and on but with replacement)
genome_random_iter_dataloader = DataLoader(
GenomeIterDataset(**_TEST_GENOME_DATASET_KWARGS),
batch_size=_TEST_BATCH_SIZE,
drop_last=True,
)
_num_rand_batches: int = 0
for _batch_data in genome_random_iter_dataloader:
assert _batch_data[0].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
assert _batch_data[1].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
_num_rand_batches += 1
if _num_rand_batches > _num_batches:
return
assert False
def test_genome_dataset_indexing_time(self):
# measure the time for the construction of a genome dataset
# of all the e. coli genomes in the parent dir (1292)
_genome_parent_dir_path: str = E_COLI_GENOME_PARENT_DIR_PATH
_genome_dir_paths: List[str] = [
os.path.join(_genome_parent_dir_path, _genome_id)
for _genome_id in os.listdir(_genome_parent_dir_path)
if os.path.isdir(os.path.join(_genome_parent_dir_path, _genome_id))
]
# logging.getLogger('src.datasets').setLevel(logging.ERROR)
_start_time = time.time()
_genome_dataset = GenomeDataset(
_genome_dir_paths,
seq_len=_TEST_SEQ_LEN,
max_num_paddings=_TEST_MAX_NUM_PADDINGS,
)
print(f'Creating a genome dataset with {len(_genome_dir_paths)} '
f'genomes takes {time.time() - _start_time:.2f} seconds.')
# measure the time for sampling _TEST_NUM_SAMPLES genome sequences
_test_indices: List[int] = \
random.sample(range(len(_genome_dataset)), _TEST_NUM_SAMPLES)
_start_time = time.time()
for _i in _test_indices:
assert _genome_dataset[_i]
print(f'Indexing {_TEST_NUM_SAMPLES} samples from the dataset '
f'takes {time.time() - _start_time:.2f} seconds.')
if __name__ == '__main__':
unittest.main()
| 37.366906 | 79 | 0.669619 | import os
import time
import random
import logging
import unittest
from typing import List
from torch.utils.data import DataLoader
from src import E_COLI_GENOME_PARENT_DIR_PATH
from src.datasets.genome_dataset import GenomeDataset, GenomeIterDataset
from src.datasets.genome_dataset import \
PADDING_CHAR, NUCLEOTIDE_CHAR_INDEX_DICT
_PADDING_INDEX: int = NUCLEOTIDE_CHAR_INDEX_DICT[PADDING_CHAR]
_TEST_EXAMPLES_DIR_PATH: str = \
os.path.join(os.path.dirname(os.path.realpath(__file__)), 'examples')
_TEST_GENOME_DIR_PATHS: List[str] = [
os.path.join(
_TEST_EXAMPLES_DIR_PATH,
'genome_processing_results_reference',
'562.2283'
)]
_TEST_SEQ_LEN: int = 1000
_TEST_NUM_MASKS: int = 5
_TEST_MAX_NUM_PADDINGS: int = 500
_TEST_BATCH_SIZE: int = 16
_TEST_NUM_SAMPLES: int = 10000
_TEST_GENOME_DATASET_KWARGS = {
'genome_dir_paths': _TEST_GENOME_DIR_PATHS,
'seq_len': _TEST_SEQ_LEN,
'max_num_paddings': _TEST_MAX_NUM_PADDINGS,
}
class TestGenomeDataset(unittest.TestCase):
def test_genome_dataset(self):
genome_dataset = GenomeDataset(**_TEST_GENOME_DATASET_KWARGS)
for _i in range(len(genome_dataset)):
_indexed_seq, _padding_mask = genome_dataset[_i]
assert set([_v.item() for _v in _indexed_seq.unique()]).issubset(
set(NUCLEOTIDE_CHAR_INDEX_DICT.values()))
assert _padding_mask.sum().item() <= _TEST_MAX_NUM_PADDINGS
try:
_indexed_seq, _padding_mask = genome_dataset[len(genome_dataset)]
except IndexError:
assert True
else:
assert False
def test_genome_iter_dataset(self):
genome_strict_iter_dataloader = DataLoader(
GenomeIterDataset(
**_TEST_GENOME_DATASET_KWARGS,
strict_iteration=True,
),
batch_size=_TEST_BATCH_SIZE,
drop_last=True,
)
_num_batches: int = 0
for _batch_data in genome_strict_iter_dataloader:
assert _batch_data[0].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
assert _batch_data[1].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
_num_batches += 1
assert _num_batches <= len(genome_strict_iter_dataloader)
genome_random_iter_dataloader = DataLoader(
GenomeIterDataset(**_TEST_GENOME_DATASET_KWARGS),
batch_size=_TEST_BATCH_SIZE,
drop_last=True,
)
_num_rand_batches: int = 0
for _batch_data in genome_random_iter_dataloader:
assert _batch_data[0].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
assert _batch_data[1].shape == (_TEST_BATCH_SIZE, _TEST_SEQ_LEN)
_num_rand_batches += 1
if _num_rand_batches > _num_batches:
return
assert False
def test_genome_dataset_indexing_time(self):
_genome_parent_dir_path: str = E_COLI_GENOME_PARENT_DIR_PATH
_genome_dir_paths: List[str] = [
os.path.join(_genome_parent_dir_path, _genome_id)
for _genome_id in os.listdir(_genome_parent_dir_path)
if os.path.isdir(os.path.join(_genome_parent_dir_path, _genome_id))
]
_start_time = time.time()
_genome_dataset = GenomeDataset(
_genome_dir_paths,
seq_len=_TEST_SEQ_LEN,
max_num_paddings=_TEST_MAX_NUM_PADDINGS,
)
print(f'Creating a genome dataset with {len(_genome_dir_paths)} '
f'genomes takes {time.time() - _start_time:.2f} seconds.')
_test_indices: List[int] = \
random.sample(range(len(_genome_dataset)), _TEST_NUM_SAMPLES)
_start_time = time.time()
for _i in _test_indices:
assert _genome_dataset[_i]
print(f'Indexing {_TEST_NUM_SAMPLES} samples from the dataset '
f'takes {time.time() - _start_time:.2f} seconds.')
if __name__ == '__main__':
unittest.main()
| true | true |
f737cccc194654a1c722187f2a80a8228f3d53cc | 5,229 | py | Python | ECore_Copier_MM/transformation-Large/HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry.py | levilucio/SyVOLT | 7526ec794d21565e3efcc925a7b08ae8db27d46a | [
"MIT"
] | 3 | 2017-06-02T19:26:27.000Z | 2021-06-14T04:25:45.000Z | ECore_Copier_MM/transformation-Large/HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry.py | levilucio/SyVOLT | 7526ec794d21565e3efcc925a7b08ae8db27d46a | [
"MIT"
] | 8 | 2016-08-24T07:04:07.000Z | 2017-05-26T16:22:47.000Z | ECore_Copier_MM/transformation-Large/HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry.py | levilucio/SyVOLT | 7526ec794d21565e3efcc925a7b08ae8db27d46a | [
"MIT"
] | 1 | 2019-10-31T06:00:23.000Z | 2019-10-31T06:00:23.000Z |
from core.himesis import Himesis
class HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry, self).__init__(name='HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry', num_nodes=27, edges=[])
# Add the edges
self.add_edges([[0, 6], [6, 5], [0, 8], [8, 7], [1, 10], [10, 9], [1, 12], [12, 11], [5, 3], [3, 7], [9, 4], [4, 11], [9, 13], [13, 5], [11, 14], [14, 7], [9, 15], [15, 16], [17, 18], [18, 16], [17, 19], [19, 20], [11, 21], [21, 22], [23, 24], [24, 22], [23, 25], [25, 26], [0, 2], [2, 1]])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """eannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry"""
self["GUID__"] = 6128048148929558276
# Set the node attributes
self.vs[0]["mm__"] = """MatchModel"""
self.vs[0]["GUID__"] = 3241548613573966355
self.vs[1]["mm__"] = """ApplyModel"""
self.vs[1]["GUID__"] = 7588304692645087636
self.vs[2]["mm__"] = """paired_with"""
self.vs[2]["GUID__"] = 2842412945556472011
self.vs[3]["associationType"] = """details"""
self.vs[3]["mm__"] = """directLink_S"""
self.vs[3]["GUID__"] = 4504396410669904024
self.vs[4]["associationType"] = """details"""
self.vs[4]["mm__"] = """directLink_T"""
self.vs[4]["GUID__"] = 278924163179204411
self.vs[5]["name"] = """"""
self.vs[5]["classtype"] = """EAnnotation"""
self.vs[5]["mm__"] = """EAnnotation"""
self.vs[5]["cardinality"] = """+"""
self.vs[5]["GUID__"] = 8921102541237060280
self.vs[6]["mm__"] = """match_contains"""
self.vs[6]["GUID__"] = 5548880848816893316
self.vs[7]["name"] = """"""
self.vs[7]["classtype"] = """EStringToStringMapEntry"""
self.vs[7]["mm__"] = """EStringToStringMapEntry"""
self.vs[7]["cardinality"] = """+"""
self.vs[7]["GUID__"] = 3620815062825569950
self.vs[8]["mm__"] = """match_contains"""
self.vs[8]["GUID__"] = 4633829444907935660
self.vs[9]["name"] = """"""
self.vs[9]["classtype"] = """EAnnotation"""
self.vs[9]["mm__"] = """EAnnotation"""
self.vs[9]["cardinality"] = """1"""
self.vs[9]["GUID__"] = 5286614269433226811
self.vs[10]["mm__"] = """apply_contains"""
self.vs[10]["GUID__"] = 4260923047893991880
self.vs[11]["name"] = """"""
self.vs[11]["classtype"] = """EStringToStringMapEntry"""
self.vs[11]["mm__"] = """EStringToStringMapEntry"""
self.vs[11]["cardinality"] = """1"""
self.vs[11]["GUID__"] = 9089520517795853435
self.vs[12]["mm__"] = """apply_contains"""
self.vs[12]["GUID__"] = 7992456823388861929
self.vs[13]["mm__"] = """backward_link"""
self.vs[13]["type"] = """ruleDef"""
self.vs[13]["GUID__"] = 5846802733544168611
self.vs[14]["mm__"] = """backward_link"""
self.vs[14]["type"] = """ruleDef"""
self.vs[14]["GUID__"] = 8076248770867941562
self.vs[15]["mm__"] = """hasAttribute_T"""
self.vs[15]["GUID__"] = 5555931425631113564
self.vs[16]["name"] = """ApplyAttribute"""
self.vs[16]["Type"] = """'String'"""
self.vs[16]["mm__"] = """Attribute"""
self.vs[16]["GUID__"] = 458427999981145130
self.vs[17]["name"] = """eq_"""
self.vs[17]["mm__"] = """Equation"""
self.vs[17]["GUID__"] = 7239587957748939154
self.vs[18]["mm__"] = """leftExpr"""
self.vs[18]["GUID__"] = 4582391555812468568
self.vs[19]["mm__"] = """rightExpr"""
self.vs[19]["GUID__"] = 3632186366108425725
self.vs[20]["name"] = """solveRef"""
self.vs[20]["Type"] = """'String'"""
self.vs[20]["mm__"] = """Constant"""
self.vs[20]["GUID__"] = 2856418966115592764
self.vs[21]["mm__"] = """hasAttribute_T"""
self.vs[21]["GUID__"] = 6569304790853800773
self.vs[22]["name"] = """ApplyAttribute"""
self.vs[22]["Type"] = """'String'"""
self.vs[22]["mm__"] = """Attribute"""
self.vs[22]["GUID__"] = 5571281642373343531
self.vs[23]["name"] = """eq_"""
self.vs[23]["mm__"] = """Equation"""
self.vs[23]["GUID__"] = 7415746926020923022
self.vs[24]["mm__"] = """leftExpr"""
self.vs[24]["GUID__"] = 8365403988732939094
self.vs[25]["mm__"] = """rightExpr"""
self.vs[25]["GUID__"] = 5175019940122105361
self.vs[26]["name"] = """solveRef"""
self.vs[26]["Type"] = """'String'"""
self.vs[26]["mm__"] = """Constant"""
self.vs[26]["GUID__"] = 2895318596396602101
| 50.278846 | 298 | 0.562631 |
from core.himesis import Himesis
class HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry(Himesis):
def __init__(self):
self.is_compiled = True
super(HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry, self).__init__(name='HeannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry', num_nodes=27, edges=[])
self.add_edges([[0, 6], [6, 5], [0, 8], [8, 7], [1, 10], [10, 9], [1, 12], [12, 11], [5, 3], [3, 7], [9, 4], [4, 11], [9, 13], [13, 5], [11, 14], [14, 7], [9, 15], [15, 16], [17, 18], [18, 16], [17, 19], [19, 20], [11, 21], [21, 22], [23, 24], [24, 22], [23, 25], [25, 26], [0, 2], [2, 1]])
self["mm__"] = ['HimesisMM']
self["name"] = """eannotationOUTdetailsSolveRefEAnnotationEStringToStringMapEntryEAnnotationEStringToStringMapEntry"""
self["GUID__"] = 6128048148929558276
self.vs[0]["mm__"] = """MatchModel"""
self.vs[0]["GUID__"] = 3241548613573966355
self.vs[1]["mm__"] = """ApplyModel"""
self.vs[1]["GUID__"] = 7588304692645087636
self.vs[2]["mm__"] = """paired_with"""
self.vs[2]["GUID__"] = 2842412945556472011
self.vs[3]["associationType"] = """details"""
self.vs[3]["mm__"] = """directLink_S"""
self.vs[3]["GUID__"] = 4504396410669904024
self.vs[4]["associationType"] = """details"""
self.vs[4]["mm__"] = """directLink_T"""
self.vs[4]["GUID__"] = 278924163179204411
self.vs[5]["name"] = """"""
self.vs[5]["classtype"] = """EAnnotation"""
self.vs[5]["mm__"] = """EAnnotation"""
self.vs[5]["cardinality"] = """+"""
self.vs[5]["GUID__"] = 8921102541237060280
self.vs[6]["mm__"] = """match_contains"""
self.vs[6]["GUID__"] = 5548880848816893316
self.vs[7]["name"] = """"""
self.vs[7]["classtype"] = """EStringToStringMapEntry"""
self.vs[7]["mm__"] = """EStringToStringMapEntry"""
self.vs[7]["cardinality"] = """+"""
self.vs[7]["GUID__"] = 3620815062825569950
self.vs[8]["mm__"] = """match_contains"""
self.vs[8]["GUID__"] = 4633829444907935660
self.vs[9]["name"] = """"""
self.vs[9]["classtype"] = """EAnnotation"""
self.vs[9]["mm__"] = """EAnnotation"""
self.vs[9]["cardinality"] = """1"""
self.vs[9]["GUID__"] = 5286614269433226811
self.vs[10]["mm__"] = """apply_contains"""
self.vs[10]["GUID__"] = 4260923047893991880
self.vs[11]["name"] = """"""
self.vs[11]["classtype"] = """EStringToStringMapEntry"""
self.vs[11]["mm__"] = """EStringToStringMapEntry"""
self.vs[11]["cardinality"] = """1"""
self.vs[11]["GUID__"] = 9089520517795853435
self.vs[12]["mm__"] = """apply_contains"""
self.vs[12]["GUID__"] = 7992456823388861929
self.vs[13]["mm__"] = """backward_link"""
self.vs[13]["type"] = """ruleDef"""
self.vs[13]["GUID__"] = 5846802733544168611
self.vs[14]["mm__"] = """backward_link"""
self.vs[14]["type"] = """ruleDef"""
self.vs[14]["GUID__"] = 8076248770867941562
self.vs[15]["mm__"] = """hasAttribute_T"""
self.vs[15]["GUID__"] = 5555931425631113564
self.vs[16]["name"] = """ApplyAttribute"""
self.vs[16]["Type"] = """'String'"""
self.vs[16]["mm__"] = """Attribute"""
self.vs[16]["GUID__"] = 458427999981145130
self.vs[17]["name"] = """eq_"""
self.vs[17]["mm__"] = """Equation"""
self.vs[17]["GUID__"] = 7239587957748939154
self.vs[18]["mm__"] = """leftExpr"""
self.vs[18]["GUID__"] = 4582391555812468568
self.vs[19]["mm__"] = """rightExpr"""
self.vs[19]["GUID__"] = 3632186366108425725
self.vs[20]["name"] = """solveRef"""
self.vs[20]["Type"] = """'String'"""
self.vs[20]["mm__"] = """Constant"""
self.vs[20]["GUID__"] = 2856418966115592764
self.vs[21]["mm__"] = """hasAttribute_T"""
self.vs[21]["GUID__"] = 6569304790853800773
self.vs[22]["name"] = """ApplyAttribute"""
self.vs[22]["Type"] = """'String'"""
self.vs[22]["mm__"] = """Attribute"""
self.vs[22]["GUID__"] = 5571281642373343531
self.vs[23]["name"] = """eq_"""
self.vs[23]["mm__"] = """Equation"""
self.vs[23]["GUID__"] = 7415746926020923022
self.vs[24]["mm__"] = """leftExpr"""
self.vs[24]["GUID__"] = 8365403988732939094
self.vs[25]["mm__"] = """rightExpr"""
self.vs[25]["GUID__"] = 5175019940122105361
self.vs[26]["name"] = """solveRef"""
self.vs[26]["Type"] = """'String'"""
self.vs[26]["mm__"] = """Constant"""
self.vs[26]["GUID__"] = 2895318596396602101
| true | true |
f737cdc6b276f325296e721ec6026d32ab8eaed7 | 2,974 | py | Python | server/authentication_service/app/tests/test_auth.py | vartanbeno/SOEN487-Project | dce582cc1c78bca6400411aa93de746f3c950061 | [
"MIT"
] | null | null | null | server/authentication_service/app/tests/test_auth.py | vartanbeno/SOEN487-Project | dce582cc1c78bca6400411aa93de746f3c950061 | [
"MIT"
] | null | null | null | server/authentication_service/app/tests/test_auth.py | vartanbeno/SOEN487-Project | dce582cc1c78bca6400411aa93de746f3c950061 | [
"MIT"
] | null | null | null | import json
from unittest import TestCase
from app import create_app, db as test_db
from app.config import TestConfig
from app.models import User, Verification
test_app = create_app(TestConfig)
class TestAuth(TestCase):
def setUp(self):
context = test_app.app_context()
context.push()
self.db = test_db
self.db.create_all()
self.app = test_app.test_client()
def tearDown(self):
self.db.drop_all()
def test_register(self):
user_count = User.query.count()
verification_count = Verification.query.count()
response = self.app.post("/api/auth/register", data=json.dumps(dict(
email='test@test.com', username='test', password='test123'
)), content_type='application/json')
self.assertEqual(response.status_code, 200)
body = json.loads(str(response.data, "utf8"))
self.assertDictEqual(body, {"message": "Successfully registered. Please verify your account."})
self.assertEqual(User.query.count(), user_count + 1)
self.assertEqual(Verification.query.count(), verification_count + 1)
def test_verify(self):
# first register a user, we can just use the registration test above
self.test_register()
verification_count = Verification.query.count()
user = User.query.filter_by(username='test').first()
verification = Verification.query.filter_by(user_id=user.id).first()
response = self.app.post("/api/auth/verify?key=badkey")
self.assertEqual(response.status_code, 400)
body = json.loads(str(response.data, "utf8"))
self.assertDictEqual(body, {"message": "Invalid verification."})
response = self.app.post(f"/api/auth/verify?key={verification.key}")
self.assertEqual(response.status_code, 200)
body = json.loads(str(response.data, "utf8"))
self.assertDictEqual(body, {"message": "You've successfully verified your account."})
# a successful verification deletes its row from the verification table
self.assertEqual(Verification.query.count(), verification_count - 1)
def test_login(self):
# first register a user and verify them, we can just use the verification test above
self.test_verify()
response = self.app.post("/api/auth/login", data=json.dumps(dict(
username='test', password='incorrectpassword'
)), content_type='application/json')
self.assertEqual(response.status_code, 400)
body = json.loads(str(response.data, "utf8"))
self.assertEqual(body, {"message": "Incorrect username and/or password."})
response = self.app.post("/api/auth/login", data=json.dumps(dict(
username='test', password='test123'
)), content_type='application/json')
self.assertEqual(response.status_code, 200)
body = json.loads(str(response.data, "utf8"))
self.assertTrue('token' in body)
| 35.831325 | 103 | 0.665098 | import json
from unittest import TestCase
from app import create_app, db as test_db
from app.config import TestConfig
from app.models import User, Verification
test_app = create_app(TestConfig)
class TestAuth(TestCase):
def setUp(self):
context = test_app.app_context()
context.push()
self.db = test_db
self.db.create_all()
self.app = test_app.test_client()
def tearDown(self):
self.db.drop_all()
def test_register(self):
user_count = User.query.count()
verification_count = Verification.query.count()
response = self.app.post("/api/auth/register", data=json.dumps(dict(
email='test@test.com', username='test', password='test123'
)), content_type='application/json')
self.assertEqual(response.status_code, 200)
body = json.loads(str(response.data, "utf8"))
self.assertDictEqual(body, {"message": "Successfully registered. Please verify your account."})
self.assertEqual(User.query.count(), user_count + 1)
self.assertEqual(Verification.query.count(), verification_count + 1)
def test_verify(self):
self.test_register()
verification_count = Verification.query.count()
user = User.query.filter_by(username='test').first()
verification = Verification.query.filter_by(user_id=user.id).first()
response = self.app.post("/api/auth/verify?key=badkey")
self.assertEqual(response.status_code, 400)
body = json.loads(str(response.data, "utf8"))
self.assertDictEqual(body, {"message": "Invalid verification."})
response = self.app.post(f"/api/auth/verify?key={verification.key}")
self.assertEqual(response.status_code, 200)
body = json.loads(str(response.data, "utf8"))
self.assertDictEqual(body, {"message": "You've successfully verified your account."})
# a successful verification deletes its row from the verification table
self.assertEqual(Verification.query.count(), verification_count - 1)
def test_login(self):
# first register a user and verify them, we can just use the verification test above
self.test_verify()
response = self.app.post("/api/auth/login", data=json.dumps(dict(
username='test', password='incorrectpassword'
)), content_type='application/json')
self.assertEqual(response.status_code, 400)
body = json.loads(str(response.data, "utf8"))
self.assertEqual(body, {"message": "Incorrect username and/or password."})
response = self.app.post("/api/auth/login", data=json.dumps(dict(
username='test', password='test123'
)), content_type='application/json')
self.assertEqual(response.status_code, 200)
body = json.loads(str(response.data, "utf8"))
self.assertTrue('token' in body)
| true | true |
f737cfc63e3de1d5c3969e01c6843e5da2a0cd68 | 1,676 | py | Python | usb_test.py | SicariusNoctis/eagle-eye-tracker | 31e160057f1d2fa2c5fbd94ba4f5e9d064481c77 | [
"MIT"
] | 5 | 2018-02-10T00:59:29.000Z | 2018-08-18T06:38:45.000Z | usb_test.py | SicariusNoctis/eagle-eye-tracker | 31e160057f1d2fa2c5fbd94ba4f5e9d064481c77 | [
"MIT"
] | 7 | 2018-05-11T21:48:00.000Z | 2018-08-07T11:31:51.000Z | usb_test.py | SicariusNoctis/eagle-eye-tracker | 31e160057f1d2fa2c5fbd94ba4f5e9d064481c77 | [
"MIT"
] | 1 | 2019-09-10T01:03:25.000Z | 2019-09-10T01:03:25.000Z | #!/usr/bin/env python2
import time
import nxt.usbsock
import nxt.locator
class CommunicatorNXT(object):
def __init__(self):
self.remote_inbox = 1
self.local_inbox = 2
self.mac_address = '00:16:53:01:B8:C3'
# self.brick = nxt.usbsock.USBSock(self.mac_address).connect()
self.brick = nxt.locator.find_one_brick()
def send_test(self):
for inbox in range(20):
self.brick.message_write(inbox, "{} test\n".format(inbox))
def recv_test(self):
for inbox in range(20):
try:
print(inbox, self.brick.message_read(inbox, 0, remove=True))
except nxt.error.DirProtError:
pass
def send_msg(self, msg):
self.brick.message_write(self.remote_inbox, msg)
def recv_msg(self):
msg = self.brick.message_read(self.local_inbox, 0, remove=True)[1][:-1]
return msg
# Averages ~3ms ping from USB test
def ping_test(self):
start_time = time.clock()
msg = "ping {}".format(start_time)
communicator.send_msg(msg)
while True:
reply = ""
try:
reply = communicator.recv_msg()
print(reply)
except nxt.error.DirProtError:
pass
if reply == msg:
print(time.clock() - start_time)
return
communicator = CommunicatorNXT()
while True:
communicator.ping_test()
print('Ping test success!')
# communicator.send_msg("PC says hi")
# try:
# print(communicator.recv_msg())
# except nxt.error.DirProtError as ex:
# print(ex)
# pass
| 26.603175 | 79 | 0.576969 |
import time
import nxt.usbsock
import nxt.locator
class CommunicatorNXT(object):
def __init__(self):
self.remote_inbox = 1
self.local_inbox = 2
self.mac_address = '00:16:53:01:B8:C3'
self.brick = nxt.locator.find_one_brick()
def send_test(self):
for inbox in range(20):
self.brick.message_write(inbox, "{} test\n".format(inbox))
def recv_test(self):
for inbox in range(20):
try:
print(inbox, self.brick.message_read(inbox, 0, remove=True))
except nxt.error.DirProtError:
pass
def send_msg(self, msg):
self.brick.message_write(self.remote_inbox, msg)
def recv_msg(self):
msg = self.brick.message_read(self.local_inbox, 0, remove=True)[1][:-1]
return msg
def ping_test(self):
start_time = time.clock()
msg = "ping {}".format(start_time)
communicator.send_msg(msg)
while True:
reply = ""
try:
reply = communicator.recv_msg()
print(reply)
except nxt.error.DirProtError:
pass
if reply == msg:
print(time.clock() - start_time)
return
communicator = CommunicatorNXT()
while True:
communicator.ping_test()
print('Ping test success!')
| true | true |
f737d1710e6966d7d6163431e2a298e0f833f286 | 9,309 | py | Python | zigbee/packet_parser.py | chawasit/BackyardZigbee | 522e10a73f7b5add6870de8e1e907f486de60cbd | [
"Unlicense"
] | null | null | null | zigbee/packet_parser.py | chawasit/BackyardZigbee | 522e10a73f7b5add6870de8e1e907f486de60cbd | [
"Unlicense"
] | null | null | null | zigbee/packet_parser.py | chawasit/BackyardZigbee | 522e10a73f7b5add6870de8e1e907f486de60cbd | [
"Unlicense"
] | null | null | null | # Copyright (c) 2015 Supakorn Yukonthong
import logging
import struct
class Parser:
def __init__(self, log_level=None):
self.logger = logging
self.logger.basicConfig(level=log_level)
self.logger.debug("DEBUG MODE ON")
def parse(self, bc):
self.logger.debug("Input Byte code : " + str(bc))
packet_temp = {}
if ord(bc[0]) == 0x54 and ord(bc[1]) == 0xfe:
cmd = bytearray([0, 0, ord(bc[2]), ord(bc[3])])
cmd_pack = struct.unpack('>I', cmd)[0]
# device announce
# Byte Size for each
# 0x54 0xfe [CMD:2] [byte count:1] [IEEE:8] [SRT:2] [CAP:1]
if cmd_pack == 1:
IEEE_ADDR = ''
for i in range(5, 13):
IEEE_ADDR += '%02X' % (ord(bc[i]))
SRT_ADDR = bytearray([0, 0, ord(bc[13]), ord(bc[14])])
CAP = bytearray([0, 0, 0, ord(bc[15])])
packet_temp['CMD'] = 1
packet_temp['IEEE_ADDR'] = IEEE_ADDR
packet_temp['SHORT_ADDR'] = struct.unpack('>I', SRT_ADDR)[0]
packet_temp['CAP'] = struct.unpack('>I', CAP)[0]
elif cmd_pack == 2:
packet_temp['CMD'] = 2
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['EP'] = ord(bc[7])
packet_temp['CLUSTER_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[8]), ord(bc[9])]))[0]
packet_temp['ATTR_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[10]), ord(bc[11])]))[0]
packet_temp['DATA_TYPE'] = ord(bc[12])
# check Data type
if packet_temp['DATA_TYPE'] == 0x10:
# ZCL_DATATYPE_BOOLEAN
packet_temp['DATA'] = ord(bc[13])
else:
packet_temp['DATA_LENGTH'] = struct.unpack('>I', bytearray([0, 0, ord(bc[13]), ord(bc[14])]))[0]
# uint8
if packet_temp['DATA_TYPE'] == 0x20:
packet_temp['DATA'] = ord(bc[15])
# 32-bit BitMap
elif packet_temp['DATA_TYPE'] == 0x1b:
packet_temp['DATA'] = bin(
struct.unpack('>I', bytearray([ord(bc[15]), ord(bc[16]), ord(bc[17]), ord(bc[18])]))[0])
# print str(ord(bc[15]))
# print str(ord(bc[16]))
# print str(ord(bc[17]))
# print str(ord(bc[18]))
# Signed 16-bit integer
elif packet_temp['DATA_TYPE'] == 0x29:
packet_temp['DATA'] = struct.unpack('<h', bytearray([ord(bc[15]), ord(bc[16])]))[0]
# Unsigned 16-bit integer
elif packet_temp['DATA_TYPE'] == 0x21:
packet_temp['DATA'] = struct.unpack('<H', bytearray([ord(bc[15]), ord(bc[16])]))[0]
# 8-bit enumeration
elif packet_temp['DATA_TYPE'] == 0x30:
packet_temp['DATA'] = ord(bc[15])
# character String,first byte is size of string
elif packet_temp['DATA_TYPE'] == 0x42:
packet_temp['DATA'] = ''
for i in range(0, packet_temp['DATA_LENGTH'] - 1):
packet_temp['DATA'] += bc[16 + i]
self.logger.debug("NO DATA MATCHING")
elif cmd_pack == 3:
packet_temp['CMD'] = 3
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['TIMEOUT'] = struct.unpack('>I', bytearray([0, 0, ord(bc[7]), ord(bc[8])]))[0]
elif cmd_pack == 4:
CacheDeviceAmount = (ord(bc[4]) - 4) / 2
packet_temp['CMD'] = 4
packet_temp['CacheDeviceInPacket'] = CacheDeviceAmount
packet_temp['StartIndex'] = struct.unpack('>I', bytearray([0, 0, ord(bc[7]), ord(bc[8])]))[0]
packet_temp['CacheDeviceAmount'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
CacheDeviceTbList = []
for i in range(0, packet_temp['CacheDeviceInPacket']):
CacheDeviceTbList.append(
struct.unpack('>I', bytearray([0, 0, ord(bc[9 + (i * 2)]), ord(bc[10 + (i * 2)])]))[0])
packet_temp['CacheDeviceTable'] = CacheDeviceTbList
elif cmd_pack == 5:
packet_temp['CMD'] = 5
packet_temp['STATUS'] = ord(bc[5])
elif cmd_pack == 6:
packet_temp['CMD'] = 6
ActiveEPCount = ord(bc[7])
ActiveEPList = []
for i in range(0, ActiveEPCount):
ActiveEPList.append(ord(bc[8 + i]))
packet_temp['ACTIVEEPLIST'] = ActiveEPList
packet_temp['ACTIVEEPLISTCOUNT'] = ActiveEPCount
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
elif cmd_pack == 7:
packet_temp['CMD'] = 7
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['EP'] = ord(bc[7])
packet_temp['APPLICATION_PROFILE_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[8]), ord(bc[9])]))[
0]
packet_temp['APPLICATION_DEVICE_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[10]), ord(bc[11])]))[
0]
packet_temp['APPLICATION_DEVICE_VERSION'] = ord(bc[12])
packet_temp['RESERVED'] = ord(bc[13])
packet_temp['APPLICATION_NUM_IN_CLUSTERS'] = ord(bc[14])
APPLICATION_IN_CLUSTERS_LIST = []
APPLICATION_OUT_CLUSTERS_LIST = []
index_count = 15
for i in range(0, packet_temp['APPLICATION_NUM_IN_CLUSTERS']):
APPLICATION_IN_CLUSTERS_LIST.append(
struct.unpack('>I', bytearray([0, 0, ord(bc[15 + (i * 2)]), ord(bc[16 + (i * 2)])]))[0])
index_count = index_count + 2
packet_temp['APPLICATION_IN_CLUSTERS'] = APPLICATION_IN_CLUSTERS_LIST
packet_temp['APPLICATION_NUM_OUT_CLUSTERS'] = ord(bc[index_count])
index_count = index_count + 1
for i in range(0, packet_temp['APPLICATION_NUM_OUT_CLUSTERS']):
APPLICATION_OUT_CLUSTERS_LIST.append(struct.unpack('>I', bytearray(
[0, 0, ord(bc[index_count + (i * 2)]), ord(bc[index_count + 1 + (i * 2)])]))[0])
packet_temp['APPLICATION_OUT_CLUSTERS'] = APPLICATION_OUT_CLUSTERS_LIST
elif cmd_pack == 8:
packet_temp['CMD'] = 8
packet_temp['STATUS'] = ord(bc[5])
elif cmd_pack == 9:
packet_temp['CMD'] = 9
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
# packet_temp['EP'] = ord(bc[7])
packet_temp['CLUSTER_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[8]), ord(bc[9])]))[0]
# check cluster id (64513 is customize cluster for GEKKO)
if packet_temp['CLUSTER_ID'] == 64513:
packet_temp['REGISTER_COUNT'] = ord(bc[10])
packet_temp['REGISTERS'] = []
for i in range(0, packet_temp['REGISTER_COUNT']):
packet_temp['REGISTERS'].append(ord(bc[11 + i]))
packet_temp['LOGO_PACKET_TYPE'] = ord(bc[11])
# robotic
packet_temp['SENSOR1'] = \
struct.unpack('>I', bytearray([0, 0, ord(bc[12]), ord(bc[13])]))[0]
packet_temp['SENSOR2'] = \
struct.unpack('>I', bytearray([0, 0, ord(bc[14]), ord(bc[15])]))[0]
elif cmd_pack == 10:
packet_temp['CMD'] = 10
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[13]), ord(bc[14])]))[0]
IEEE_ADDR = ''
for i in range(5, 13):
IEEE_ADDR += '%02X' % (ord(bc[i]))
packet_temp['IEEE_ADDR'] = IEEE_ADDR
elif cmd_pack == 11:
packet_temp['CMD'] = 11
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[13]), ord(bc[14])]))[0]
IEEE_ADDR = ''
for i in range(5, 13):
IEEE_ADDR += '%02X' % (ord(bc[i]))
packet_temp['IEEE_ADDR'] = IEEE_ADDR
else:
self.logger.debug("BAD HEADER")
self.logger.debug("Packet : " + str(packet_temp))
return packet_temp
if __name__ == "__main__":
test = Parser(log_level=logging.DEBUG)
aa = [chr(0x54), chr(0xfe), chr(0), chr(1), chr(11), chr(0x0), chr(0x12), chr(0x4b), chr(0x0), chr(0x7), chr(0x1a),
chr(0x6e), chr(0x8b), chr(0x35), chr(0xf6), chr(0x8e)]
test.parse(aa)
| 53.5 | 120 | 0.486196 |
import logging
import struct
class Parser:
def __init__(self, log_level=None):
self.logger = logging
self.logger.basicConfig(level=log_level)
self.logger.debug("DEBUG MODE ON")
def parse(self, bc):
self.logger.debug("Input Byte code : " + str(bc))
packet_temp = {}
if ord(bc[0]) == 0x54 and ord(bc[1]) == 0xfe:
cmd = bytearray([0, 0, ord(bc[2]), ord(bc[3])])
cmd_pack = struct.unpack('>I', cmd)[0]
if cmd_pack == 1:
IEEE_ADDR = ''
for i in range(5, 13):
IEEE_ADDR += '%02X' % (ord(bc[i]))
SRT_ADDR = bytearray([0, 0, ord(bc[13]), ord(bc[14])])
CAP = bytearray([0, 0, 0, ord(bc[15])])
packet_temp['CMD'] = 1
packet_temp['IEEE_ADDR'] = IEEE_ADDR
packet_temp['SHORT_ADDR'] = struct.unpack('>I', SRT_ADDR)[0]
packet_temp['CAP'] = struct.unpack('>I', CAP)[0]
elif cmd_pack == 2:
packet_temp['CMD'] = 2
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['EP'] = ord(bc[7])
packet_temp['CLUSTER_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[8]), ord(bc[9])]))[0]
packet_temp['ATTR_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[10]), ord(bc[11])]))[0]
packet_temp['DATA_TYPE'] = ord(bc[12])
if packet_temp['DATA_TYPE'] == 0x10:
packet_temp['DATA'] = ord(bc[13])
else:
packet_temp['DATA_LENGTH'] = struct.unpack('>I', bytearray([0, 0, ord(bc[13]), ord(bc[14])]))[0]
if packet_temp['DATA_TYPE'] == 0x20:
packet_temp['DATA'] = ord(bc[15])
elif packet_temp['DATA_TYPE'] == 0x1b:
packet_temp['DATA'] = bin(
struct.unpack('>I', bytearray([ord(bc[15]), ord(bc[16]), ord(bc[17]), ord(bc[18])]))[0])
elif packet_temp['DATA_TYPE'] == 0x29:
packet_temp['DATA'] = struct.unpack('<h', bytearray([ord(bc[15]), ord(bc[16])]))[0]
elif packet_temp['DATA_TYPE'] == 0x21:
packet_temp['DATA'] = struct.unpack('<H', bytearray([ord(bc[15]), ord(bc[16])]))[0]
elif packet_temp['DATA_TYPE'] == 0x30:
packet_temp['DATA'] = ord(bc[15])
elif packet_temp['DATA_TYPE'] == 0x42:
packet_temp['DATA'] = ''
for i in range(0, packet_temp['DATA_LENGTH'] - 1):
packet_temp['DATA'] += bc[16 + i]
self.logger.debug("NO DATA MATCHING")
elif cmd_pack == 3:
packet_temp['CMD'] = 3
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['TIMEOUT'] = struct.unpack('>I', bytearray([0, 0, ord(bc[7]), ord(bc[8])]))[0]
elif cmd_pack == 4:
CacheDeviceAmount = (ord(bc[4]) - 4) / 2
packet_temp['CMD'] = 4
packet_temp['CacheDeviceInPacket'] = CacheDeviceAmount
packet_temp['StartIndex'] = struct.unpack('>I', bytearray([0, 0, ord(bc[7]), ord(bc[8])]))[0]
packet_temp['CacheDeviceAmount'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
CacheDeviceTbList = []
for i in range(0, packet_temp['CacheDeviceInPacket']):
CacheDeviceTbList.append(
struct.unpack('>I', bytearray([0, 0, ord(bc[9 + (i * 2)]), ord(bc[10 + (i * 2)])]))[0])
packet_temp['CacheDeviceTable'] = CacheDeviceTbList
elif cmd_pack == 5:
packet_temp['CMD'] = 5
packet_temp['STATUS'] = ord(bc[5])
elif cmd_pack == 6:
packet_temp['CMD'] = 6
ActiveEPCount = ord(bc[7])
ActiveEPList = []
for i in range(0, ActiveEPCount):
ActiveEPList.append(ord(bc[8 + i]))
packet_temp['ACTIVEEPLIST'] = ActiveEPList
packet_temp['ACTIVEEPLISTCOUNT'] = ActiveEPCount
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
elif cmd_pack == 7:
packet_temp['CMD'] = 7
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['EP'] = ord(bc[7])
packet_temp['APPLICATION_PROFILE_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[8]), ord(bc[9])]))[
0]
packet_temp['APPLICATION_DEVICE_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[10]), ord(bc[11])]))[
0]
packet_temp['APPLICATION_DEVICE_VERSION'] = ord(bc[12])
packet_temp['RESERVED'] = ord(bc[13])
packet_temp['APPLICATION_NUM_IN_CLUSTERS'] = ord(bc[14])
APPLICATION_IN_CLUSTERS_LIST = []
APPLICATION_OUT_CLUSTERS_LIST = []
index_count = 15
for i in range(0, packet_temp['APPLICATION_NUM_IN_CLUSTERS']):
APPLICATION_IN_CLUSTERS_LIST.append(
struct.unpack('>I', bytearray([0, 0, ord(bc[15 + (i * 2)]), ord(bc[16 + (i * 2)])]))[0])
index_count = index_count + 2
packet_temp['APPLICATION_IN_CLUSTERS'] = APPLICATION_IN_CLUSTERS_LIST
packet_temp['APPLICATION_NUM_OUT_CLUSTERS'] = ord(bc[index_count])
index_count = index_count + 1
for i in range(0, packet_temp['APPLICATION_NUM_OUT_CLUSTERS']):
APPLICATION_OUT_CLUSTERS_LIST.append(struct.unpack('>I', bytearray(
[0, 0, ord(bc[index_count + (i * 2)]), ord(bc[index_count + 1 + (i * 2)])]))[0])
packet_temp['APPLICATION_OUT_CLUSTERS'] = APPLICATION_OUT_CLUSTERS_LIST
elif cmd_pack == 8:
packet_temp['CMD'] = 8
packet_temp['STATUS'] = ord(bc[5])
elif cmd_pack == 9:
packet_temp['CMD'] = 9
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[5]), ord(bc[6])]))[0]
packet_temp['CLUSTER_ID'] = struct.unpack('>I', bytearray([0, 0, ord(bc[8]), ord(bc[9])]))[0]
if packet_temp['CLUSTER_ID'] == 64513:
packet_temp['REGISTER_COUNT'] = ord(bc[10])
packet_temp['REGISTERS'] = []
for i in range(0, packet_temp['REGISTER_COUNT']):
packet_temp['REGISTERS'].append(ord(bc[11 + i]))
packet_temp['LOGO_PACKET_TYPE'] = ord(bc[11])
packet_temp['SENSOR1'] = \
struct.unpack('>I', bytearray([0, 0, ord(bc[12]), ord(bc[13])]))[0]
packet_temp['SENSOR2'] = \
struct.unpack('>I', bytearray([0, 0, ord(bc[14]), ord(bc[15])]))[0]
elif cmd_pack == 10:
packet_temp['CMD'] = 10
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[13]), ord(bc[14])]))[0]
IEEE_ADDR = ''
for i in range(5, 13):
IEEE_ADDR += '%02X' % (ord(bc[i]))
packet_temp['IEEE_ADDR'] = IEEE_ADDR
elif cmd_pack == 11:
packet_temp['CMD'] = 11
packet_temp['SHORT_ADDR'] = struct.unpack('>I', bytearray([0, 0, ord(bc[13]), ord(bc[14])]))[0]
IEEE_ADDR = ''
for i in range(5, 13):
IEEE_ADDR += '%02X' % (ord(bc[i]))
packet_temp['IEEE_ADDR'] = IEEE_ADDR
else:
self.logger.debug("BAD HEADER")
self.logger.debug("Packet : " + str(packet_temp))
return packet_temp
if __name__ == "__main__":
test = Parser(log_level=logging.DEBUG)
aa = [chr(0x54), chr(0xfe), chr(0), chr(1), chr(11), chr(0x0), chr(0x12), chr(0x4b), chr(0x0), chr(0x7), chr(0x1a),
chr(0x6e), chr(0x8b), chr(0x35), chr(0xf6), chr(0x8e)]
test.parse(aa)
| true | true |
f737d18b4f854215789d4068e868db9ac33e324e | 1,001 | py | Python | Algorithms/Medium/34. Find First and Last Position of Element in Sorted Array/answer.py | KenWoo/Algorithm | 4012a2f0a099a502df1e5df2e39faa75fe6463e8 | [
"Apache-2.0"
] | null | null | null | Algorithms/Medium/34. Find First and Last Position of Element in Sorted Array/answer.py | KenWoo/Algorithm | 4012a2f0a099a502df1e5df2e39faa75fe6463e8 | [
"Apache-2.0"
] | null | null | null | Algorithms/Medium/34. Find First and Last Position of Element in Sorted Array/answer.py | KenWoo/Algorithm | 4012a2f0a099a502df1e5df2e39faa75fe6463e8 | [
"Apache-2.0"
] | null | null | null | from typing import List
class Solution:
def searchRange(self, nums: List[int], target: int) -> List[int]:
if not nums:
return [-1, -1]
N = len(nums)
left = 0
right = N - 1
while left < right:
mid = (left + right) // 2
if nums[mid] == target:
right = mid
elif nums[mid] < target:
left = mid + 1
else:
right = mid - 1
start = -1 if nums[left] != target else left
left = 0
right = N - 1
while left < right:
mid = (left + right + 1) // 2
if nums[mid] == target:
left = mid
elif nums[mid] < target:
left = mid + 1
else:
right = mid - 1
end = -1 if nums[right] != target else right
return [start, end]
if __name__ == "__main__":
s = Solution()
result = s.searchRange([2, 2, 3], 3)
print(result)
| 26.342105 | 69 | 0.436563 | from typing import List
class Solution:
def searchRange(self, nums: List[int], target: int) -> List[int]:
if not nums:
return [-1, -1]
N = len(nums)
left = 0
right = N - 1
while left < right:
mid = (left + right) // 2
if nums[mid] == target:
right = mid
elif nums[mid] < target:
left = mid + 1
else:
right = mid - 1
start = -1 if nums[left] != target else left
left = 0
right = N - 1
while left < right:
mid = (left + right + 1) // 2
if nums[mid] == target:
left = mid
elif nums[mid] < target:
left = mid + 1
else:
right = mid - 1
end = -1 if nums[right] != target else right
return [start, end]
if __name__ == "__main__":
s = Solution()
result = s.searchRange([2, 2, 3], 3)
print(result)
| true | true |
f737d1a7e71abc82e32858b924b4b9ad9c4dedd6 | 6,981 | py | Python | src/toil/provisioners/abstractProvisioner.py | david4096/toil | 491e3cceafc2462395bb83ce759da7b008bb27f3 | [
"Apache-2.0"
] | null | null | null | src/toil/provisioners/abstractProvisioner.py | david4096/toil | 491e3cceafc2462395bb83ce759da7b008bb27f3 | [
"Apache-2.0"
] | 1 | 2017-10-28T00:39:00.000Z | 2017-10-28T00:39:00.000Z | src/toil/provisioners/abstractProvisioner.py | david4096/toil | 491e3cceafc2462395bb83ce759da7b008bb27f3 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import object
import logging
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from bd2k.util.retry import never
from future.utils import with_metaclass
log = logging.getLogger(__name__)
Shape = namedtuple("_Shape", "wallTime memory cores disk preemptable")
"""
Represents a job or a node's "shape", in terms of the dimensions of memory, cores, disk and
wall-time allocation.
The wallTime attribute stores the number of seconds of a node allocation, e.g. 3600 for AWS,
or 60 for Azure. FIXME: and for jobs?
The memory and disk attributes store the number of bytes required by a job (or provided by a
node) in RAM or on disk (SSD or HDD), respectively.
"""
class AbstractProvisioner(with_metaclass(ABCMeta, object)):
"""
An abstract base class to represent the interface for provisioning worker nodes to use in a
Toil cluster.
"""
def __init__(self, config=None):
"""
Initialize provisioner. If config and batchSystem are not specified, the
provisioner is being used to manage nodes without a workflow
:param config: Config from common.py
:param batchSystem: The batchSystem used during run
"""
self.config = config
self.stop = False
self.staticNodesDict = {} # dict with keys of nodes private IPs, val is nodeInfo
self.static = {}
def getStaticNodes(self, preemptable):
return self.static[preemptable]
@staticmethod
def retryPredicate(e):
"""
Return true if the exception e should be retried by the cluster scaler.
For example, should return true if the exception was due to exceeding an API rate limit.
The error will be retried with exponential backoff.
:param e: exception raised during execution of setNodeCount
:return: boolean indicating whether the exception e should be retried
"""
return never(e)
def setStaticNodes(self, nodes, preemptable):
"""
Used to track statically provisioned nodes. These nodes are
treated differently than autoscaled nodes in that they should not
be automatically terminated.
:param nodes: list of Node objects
"""
prefix = 'non-' if not preemptable else ''
log.debug("Adding %s to %spreemptable static nodes", nodes, prefix)
if nodes is not None:
self.static[preemptable] = {node.privateIP : node for node in nodes}
@abstractmethod
def addNodes(self, nodeType, numNodes, preemptable):
"""
Used to add worker nodes to the cluster
:param numNodes: The number of nodes to add
:param preemptable: whether or not the nodes will be preemptable
:return: number of nodes successfully added
"""
raise NotImplementedError
@abstractmethod
def terminateNodes(self, nodes):
"""
Terminate the nodes represented by given Node objects
:param nodes: list of Node objects
"""
raise NotImplementedError
@abstractmethod
def getProvisionedWorkers(self, nodeType, preemptable):
"""
Gets all nodes of the given preemptability from the provisioner.
Includes both static and autoscaled nodes.
:param preemptable: Boolean value indicating whether to return preemptable nodes or
non-preemptable nodes
:return: list of Node objects
"""
raise NotImplementedError
@abstractmethod
def remainingBillingInterval(self, node):
"""
Calculate how much of a node's allocated billing interval is
left in this cycle.
:param node: Node object
:return: float from 0 -> 1.0 representing percentage of pre-paid time left in cycle
"""
raise NotImplementedError
@abstractmethod
def getNodeShape(self, nodeType=None, preemptable=False):
"""
The shape of a preemptable or non-preemptable node managed by this provisioner. The node
shape defines key properties of a machine, such as its number of cores or the time
between billing intervals.
:param str nodeType: Node type name to return the shape of.
:rtype: Shape
"""
raise NotImplementedError
@classmethod
@abstractmethod
def rsyncLeader(cls, clusterName, args, **kwargs):
"""
Rsyncs to the leader of the cluster with the specified name. The arguments are passed directly to
Rsync.
:param clusterName: name of the cluster to target
:param args: list of string arguments to rsync. Identical to the normal arguments to rsync, but the
host name of the remote host can be omitted. ex) ['/localfile', ':/remotedest']
:param \**kwargs:
See below
:Keyword Arguments:
* *strict*: if False, strict host key checking is disabled. (Enabled by default.)
"""
raise NotImplementedError
@classmethod
@abstractmethod
def launchCluster(cls, instanceType, keyName, clusterName, spotBid=None):
"""
Launches a cluster with the specified instance type for the leader with the specified name.
:param instanceType: desired type of the leader instance
:param keyName: name of the ssh key pair to launch the instance with
:param clusterName: desired identifier of the cluster
:param spotBid: how much to bid for the leader instance. If none, use on demand pricing.
:return:
"""
raise NotImplementedError
@classmethod
@abstractmethod
def sshLeader(cls, clusterName, args, **kwargs):
"""
SSH into the leader instance of the specified cluster with the specified arguments to SSH.
:param clusterName: name of the cluster to target
:param args: list of string arguments to ssh.
:param strict: If False, strict host key checking is disabled. (Enabled by default.)
"""
raise NotImplementedError
@classmethod
@abstractmethod
def destroyCluster(cls, clusterName):
"""
Terminates all nodes in the specified cluster and cleans up all resources associated with the
cluser.
:param clusterName: identifier of the cluster to terminate.
"""
raise NotImplementedError
| 35.257576 | 107 | 0.67741 |
from builtins import object
import logging
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from bd2k.util.retry import never
from future.utils import with_metaclass
log = logging.getLogger(__name__)
Shape = namedtuple("_Shape", "wallTime memory cores disk preemptable")
class AbstractProvisioner(with_metaclass(ABCMeta, object)):
def __init__(self, config=None):
self.config = config
self.stop = False
self.staticNodesDict = {}
self.static = {}
def getStaticNodes(self, preemptable):
return self.static[preemptable]
@staticmethod
def retryPredicate(e):
return never(e)
def setStaticNodes(self, nodes, preemptable):
prefix = 'non-' if not preemptable else ''
log.debug("Adding %s to %spreemptable static nodes", nodes, prefix)
if nodes is not None:
self.static[preemptable] = {node.privateIP : node for node in nodes}
@abstractmethod
def addNodes(self, nodeType, numNodes, preemptable):
raise NotImplementedError
@abstractmethod
def terminateNodes(self, nodes):
raise NotImplementedError
@abstractmethod
def getProvisionedWorkers(self, nodeType, preemptable):
raise NotImplementedError
@abstractmethod
def remainingBillingInterval(self, node):
raise NotImplementedError
@abstractmethod
def getNodeShape(self, nodeType=None, preemptable=False):
raise NotImplementedError
@classmethod
@abstractmethod
def rsyncLeader(cls, clusterName, args, **kwargs):
raise NotImplementedError
@classmethod
@abstractmethod
def launchCluster(cls, instanceType, keyName, clusterName, spotBid=None):
raise NotImplementedError
@classmethod
@abstractmethod
def sshLeader(cls, clusterName, args, **kwargs):
raise NotImplementedError
@classmethod
@abstractmethod
def destroyCluster(cls, clusterName):
raise NotImplementedError
| true | true |
f737d293946a880afe9fe48eb38305bc94ad3c9a | 1,590 | py | Python | recommender/movierecommender/management/commands/load_movies.py | ibm-developer-skills-network/oroir-Build-a-Personal-Movie-Recommender-with-Django | fbc681cdea067c0cee91c158c632f83cff9db936 | [
"Apache-2.0"
] | null | null | null | recommender/movierecommender/management/commands/load_movies.py | ibm-developer-skills-network/oroir-Build-a-Personal-Movie-Recommender-with-Django | fbc681cdea067c0cee91c158c632f83cff9db936 | [
"Apache-2.0"
] | null | null | null | recommender/movierecommender/management/commands/load_movies.py | ibm-developer-skills-network/oroir-Build-a-Personal-Movie-Recommender-with-Django | fbc681cdea067c0cee91c158c632f83cff9db936 | [
"Apache-2.0"
] | null | null | null | import csv
import pandas as pd
from django.core.management import BaseCommand
from ...models import Movie
class Command(BaseCommand):
help = 'Load a movie csv file into the database'
def add_arguments(self, parser):
parser.add_argument('--path', type=str)
def handle(self, *args, **kwargs):
print("Clean old movie data")
Movie.objects.all().delete()
path = kwargs['path']
movie_df = pd.read_csv(path)
for index, row in movie_df.iterrows():
imdb_id = row["imdb_id"]
genres = row["genres"]
release_date = row["release_date"]
original_language = row["original_language"]
original_title = row["original_title"]
overview = row["overview"]
vote_average = row["vote_average"]
vote_count = row["vote_count"]
poster_path = row["poster_path"]
#print(f"{imdb_id} {original_title} {genres} {overview} {vote_average} {poster_path}")
movie = Movie(imdb_id=imdb_id,
genres=genres,
original_title=original_title,
original_language=original_language,
release_date=release_date,
overview=overview,
vote_average=vote_average,
vote_count=vote_count,
poster_path=poster_path)
movie.save()
print(f"{imdb_id} saved...")
# python manage.py load_movies --path movies.csv | 38.780488 | 98 | 0.557233 | import csv
import pandas as pd
from django.core.management import BaseCommand
from ...models import Movie
class Command(BaseCommand):
help = 'Load a movie csv file into the database'
def add_arguments(self, parser):
parser.add_argument('--path', type=str)
def handle(self, *args, **kwargs):
print("Clean old movie data")
Movie.objects.all().delete()
path = kwargs['path']
movie_df = pd.read_csv(path)
for index, row in movie_df.iterrows():
imdb_id = row["imdb_id"]
genres = row["genres"]
release_date = row["release_date"]
original_language = row["original_language"]
original_title = row["original_title"]
overview = row["overview"]
vote_average = row["vote_average"]
vote_count = row["vote_count"]
poster_path = row["poster_path"]
movie = Movie(imdb_id=imdb_id,
genres=genres,
original_title=original_title,
original_language=original_language,
release_date=release_date,
overview=overview,
vote_average=vote_average,
vote_count=vote_count,
poster_path=poster_path)
movie.save()
print(f"{imdb_id} saved...")
| true | true |
f737d2c6fe7baa4f6ad13420d61f7654a55cb13b | 587 | py | Python | exercises/solution_01_11.py | tuomastik/spacy-course | bb7cba6aea221289cf078f36233813794c32f84c | [
"MIT"
] | 4 | 2019-12-31T05:45:44.000Z | 2021-04-20T23:20:03.000Z | exercises/solution_01_11.py | tuomastik/spacy-course | bb7cba6aea221289cf078f36233813794c32f84c | [
"MIT"
] | null | null | null | exercises/solution_01_11.py | tuomastik/spacy-course | bb7cba6aea221289cf078f36233813794c32f84c | [
"MIT"
] | 2 | 2019-10-05T15:13:14.000Z | 2021-06-23T18:36:39.000Z | import spacy
# Import the Matcher
from spacy.matcher import Matcher
nlp = spacy.load("en_core_web_sm")
doc = nlp("New iPhone X release date leaked as Apple reveals pre-orders by mistake")
# Initialize the Matcher with the shared vocabulary
matcher = Matcher(nlp.vocab)
# Create a pattern matching two tokens: "iPhone" and "X"
pattern = [{"TEXT": "iPhone"}, {"TEXT": "X"}]
# Add the pattern to the matcher
matcher.add("IPHONE_X_PATTERN", None, pattern)
# Use the matcher on the doc
matches = matcher(doc)
print("Matches:", [doc[start:end].text for match_id, start, end in matches])
| 27.952381 | 84 | 0.732538 | import spacy
from spacy.matcher import Matcher
nlp = spacy.load("en_core_web_sm")
doc = nlp("New iPhone X release date leaked as Apple reveals pre-orders by mistake")
matcher = Matcher(nlp.vocab)
pattern = [{"TEXT": "iPhone"}, {"TEXT": "X"}]
matcher.add("IPHONE_X_PATTERN", None, pattern)
matches = matcher(doc)
print("Matches:", [doc[start:end].text for match_id, start, end in matches])
| true | true |
f737d32efd3f2b5cdb50bf398eea7aa01fa1fd1d | 39 | py | Python | camera/__init__.py | projectweekend/Pi-Camera-Time-Lapse | bd91c682dbb1280c2f14c6f5a454d1dc10fc0fda | [
"MIT"
] | 7 | 2015-02-25T08:23:57.000Z | 2017-12-21T17:07:49.000Z | camera/__init__.py | projectweekend/Pi-Camera-Time-Lapse | bd91c682dbb1280c2f14c6f5a454d1dc10fc0fda | [
"MIT"
] | 2 | 2015-06-04T15:14:11.000Z | 2015-09-17T18:27:38.000Z | camera/__init__.py | projectweekend/Pi-Camera-Time-Lapse | bd91c682dbb1280c2f14c6f5a454d1dc10fc0fda | [
"MIT"
] | 4 | 2015-04-04T08:14:54.000Z | 2018-03-10T04:28:17.000Z | from .camera import ConfigurableCamera
| 19.5 | 38 | 0.871795 | from .camera import ConfigurableCamera
| true | true |
f737d3c627db4142c5f5067a37535a346f3a9ec6 | 5,202 | py | Python | src/sentry/integrations/slack/event_endpoint.py | uandco/sentry | 5b8d45cb71c6617dac8e64265848623fbfce9c99 | [
"BSD-3-Clause"
] | 2 | 2019-03-04T12:45:54.000Z | 2019-03-04T12:45:55.000Z | src/sentry/integrations/slack/event_endpoint.py | uandco/sentry | 5b8d45cb71c6617dac8e64265848623fbfce9c99 | [
"BSD-3-Clause"
] | 1 | 2020-11-05T14:54:44.000Z | 2020-11-19T21:54:19.000Z | src/sentry/integrations/slack/event_endpoint.py | uandco/sentry | 5b8d45cb71c6617dac8e64265848623fbfce9c99 | [
"BSD-3-Clause"
] | 1 | 2017-02-09T06:36:57.000Z | 2017-02-09T06:36:57.000Z | from __future__ import absolute_import
import json
import re
import six
from collections import defaultdict
from django.conf import settings
from django.db.models import Q
from sentry import http
from sentry.api.base import Endpoint
from sentry.incidents.models import Incident
from sentry.models import Group, Project
from .requests import SlackEventRequest, SlackRequestError
from .utils import (
build_group_attachment,
build_incident_attachment,
logger,
)
# XXX(dcramer): this could be more tightly bound to our configured domain,
# but slack limits what we can unfurl anyways so its probably safe
_link_regexp = re.compile(r'^https?\://[^/]+/[^/]+/[^/]+/(issues|incidents)/(\d+)')
_org_slug_regexp = re.compile(r'^https?\://[^/]+/organizations/([^/]+)/')
def unfurl_issues(integration, issue_map):
results = {
g.id: g for g in Group.objects.filter(
id__in=set(issue_map.keys()),
project__in=Project.objects.filter(
organization__in=integration.organizations.all(),
)
)
}
if not results:
return {}
return {
v: build_group_attachment(results[k]) for k, v in six.iteritems(issue_map)
if k in results
}
def unfurl_incidents(integration, incident_map):
filter_query = Q()
# Since we don't have real ids here, we have to also extract the org slug
# from the url so that we can make sure the identifiers correspond to the
# correct organization.
for identifier, url in six.iteritems(incident_map):
org_slug = _org_slug_regexp.match(url).group(1)
filter_query |= Q(identifier=identifier, organization__slug=org_slug)
results = {
i.identifier: i for i in Incident.objects.filter(
filter_query,
# Filter by integration organization here as well to make sure that
# we have permission to access these incidents.
organization__in=integration.organizations.all(),
)
}
if not results:
return {}
return {
v: build_incident_attachment(results[k]) for k, v in six.iteritems(incident_map)
if k in results
}
# XXX(dcramer): a lot of this is copied from sentry-plugins right now, and will
# need refactored
class SlackEventEndpoint(Endpoint):
event_handlers = {
'issues': unfurl_issues,
'incidents': unfurl_incidents,
}
authentication_classes = ()
permission_classes = ()
def _parse_url(self, link):
"""
Extracts event type and id from a url.
:param link: Url to parse to information from
:return: If successful, a tuple containing the event_type and id. If we
were unsuccessful at matching, a tuple containing two None values
"""
match = _link_regexp.match(link)
if not match:
return None, None
try:
return match.group(1), int(match.group(2))
except (TypeError, ValueError):
return None, None
def on_url_verification(self, request, data):
return self.respond({
'challenge': data['challenge'],
})
def on_link_shared(self, request, integration, token, data):
parsed_events = defaultdict(dict)
for item in data['links']:
event_type, instance_id = self._parse_url(item['url'])
if not instance_id:
continue
parsed_events[event_type][instance_id] = item['url']
if not parsed_events:
return
results = {}
for event_type, instance_map in parsed_events.items():
results.update(self.event_handlers[event_type](integration, instance_map))
if not results:
return
if settings.SLACK_INTEGRATION_USE_WST:
access_token = integration.metadata['access_token']
else:
access_token = integration.metadata['user_access_token']
payload = {
'token': access_token,
'channel': data['channel'],
'ts': data['message_ts'],
'unfurls': json.dumps(results),
}
session = http.build_session()
req = session.post('https://slack.com/api/chat.unfurl', data=payload)
req.raise_for_status()
resp = req.json()
if not resp.get('ok'):
logger.error('slack.event.unfurl-error', extra={'response': resp})
return self.respond()
# TODO(dcramer): implement app_uninstalled and tokens_revoked
def post(self, request):
try:
slack_request = SlackEventRequest(request)
slack_request.validate()
except SlackRequestError as e:
return self.respond(status=e.status)
if slack_request.is_challenge():
return self.on_url_verification(request, slack_request.data)
if slack_request.type == 'link_shared':
resp = self.on_link_shared(
request,
slack_request.integration,
slack_request.data.get('token'),
slack_request.data.get('event'),
)
if resp:
return resp
return self.respond()
| 31.337349 | 88 | 0.626682 | from __future__ import absolute_import
import json
import re
import six
from collections import defaultdict
from django.conf import settings
from django.db.models import Q
from sentry import http
from sentry.api.base import Endpoint
from sentry.incidents.models import Incident
from sentry.models import Group, Project
from .requests import SlackEventRequest, SlackRequestError
from .utils import (
build_group_attachment,
build_incident_attachment,
logger,
)
_link_regexp = re.compile(r'^https?\://[^/]+/[^/]+/[^/]+/(issues|incidents)/(\d+)')
_org_slug_regexp = re.compile(r'^https?\://[^/]+/organizations/([^/]+)/')
def unfurl_issues(integration, issue_map):
results = {
g.id: g for g in Group.objects.filter(
id__in=set(issue_map.keys()),
project__in=Project.objects.filter(
organization__in=integration.organizations.all(),
)
)
}
if not results:
return {}
return {
v: build_group_attachment(results[k]) for k, v in six.iteritems(issue_map)
if k in results
}
def unfurl_incidents(integration, incident_map):
filter_query = Q()
# from the url so that we can make sure the identifiers correspond to the
# correct organization.
for identifier, url in six.iteritems(incident_map):
org_slug = _org_slug_regexp.match(url).group(1)
filter_query |= Q(identifier=identifier, organization__slug=org_slug)
results = {
i.identifier: i for i in Incident.objects.filter(
filter_query,
# Filter by integration organization here as well to make sure that
# we have permission to access these incidents.
organization__in=integration.organizations.all(),
)
}
if not results:
return {}
return {
v: build_incident_attachment(results[k]) for k, v in six.iteritems(incident_map)
if k in results
}
# XXX(dcramer): a lot of this is copied from sentry-plugins right now, and will
# need refactored
class SlackEventEndpoint(Endpoint):
event_handlers = {
'issues': unfurl_issues,
'incidents': unfurl_incidents,
}
authentication_classes = ()
permission_classes = ()
def _parse_url(self, link):
match = _link_regexp.match(link)
if not match:
return None, None
try:
return match.group(1), int(match.group(2))
except (TypeError, ValueError):
return None, None
def on_url_verification(self, request, data):
return self.respond({
'challenge': data['challenge'],
})
def on_link_shared(self, request, integration, token, data):
parsed_events = defaultdict(dict)
for item in data['links']:
event_type, instance_id = self._parse_url(item['url'])
if not instance_id:
continue
parsed_events[event_type][instance_id] = item['url']
if not parsed_events:
return
results = {}
for event_type, instance_map in parsed_events.items():
results.update(self.event_handlers[event_type](integration, instance_map))
if not results:
return
if settings.SLACK_INTEGRATION_USE_WST:
access_token = integration.metadata['access_token']
else:
access_token = integration.metadata['user_access_token']
payload = {
'token': access_token,
'channel': data['channel'],
'ts': data['message_ts'],
'unfurls': json.dumps(results),
}
session = http.build_session()
req = session.post('https://slack.com/api/chat.unfurl', data=payload)
req.raise_for_status()
resp = req.json()
if not resp.get('ok'):
logger.error('slack.event.unfurl-error', extra={'response': resp})
return self.respond()
# TODO(dcramer): implement app_uninstalled and tokens_revoked
def post(self, request):
try:
slack_request = SlackEventRequest(request)
slack_request.validate()
except SlackRequestError as e:
return self.respond(status=e.status)
if slack_request.is_challenge():
return self.on_url_verification(request, slack_request.data)
if slack_request.type == 'link_shared':
resp = self.on_link_shared(
request,
slack_request.integration,
slack_request.data.get('token'),
slack_request.data.get('event'),
)
if resp:
return resp
return self.respond()
| true | true |
f737d416ec73c95a39aa1b7f244b521f02e71b7b | 2,890 | py | Python | apps/network/src/app/workers/worker.py | next-fernandocerezal/PyGrid | b82793b0beecd26338c102573a9891c2e86707c8 | [
"Apache-2.0"
] | 1 | 2021-10-05T18:57:02.000Z | 2021-10-05T18:57:02.000Z | apps/network/src/app/workers/worker.py | next-fernandocerezal/PyGrid | b82793b0beecd26338c102573a9891c2e86707c8 | [
"Apache-2.0"
] | null | null | null | apps/network/src/app/workers/worker.py | next-fernandocerezal/PyGrid | b82793b0beecd26338c102573a9891c2e86707c8 | [
"Apache-2.0"
] | null | null | null | import json
import re
import time
import requests
from ..codes import MSG_FIELD, NODE_EVENTS, WORKER_PROPERTIES
from ..utils.wrappers import threaded
class Worker(object):
"""Worker class for running PySyft models for training and inference."""
def __init__(self, id: str, socket):
"""
Args:
id: ID of the worker.
socket: Socket descriptor used to send/receive messages.
"""
self._id = id
self._socket = socket
self._ping = 0
self._status = WORKER_PROPERTIES.ONLINE
self.connected_nodes = {}
self.hosted_models = {}
self.hosted_datasets = {}
self.cpu_percent = 0
self.mem_usage = 0
@property
def status(self):
"""str: Return the status of the Worker instance."""
if not self._socket:
return WORKER_PROPERTIES.OFFLINE
elif self._ping < WORKER_PROPERTIES.PING_THRESHOLD:
return WORKER_PROPERTIES.ONLINE
else:
return WORKER_PROPERTIES.BUSY
@property
def address(self):
"""str: Return the address of the Worker instance."""
if self._socket:
addr = self._socket.environ["REMOTE_ADDR"]
return re.sub("[:f]", "", addr)
@property
def location(self):
""":obj:`dict` of :obj:`str`: Return the location of the Worker instance."""
if self.address:
url = "http://ip-api.com/json/{}".format(self.address)
r = requests.get(url)
result = json.loads(r.text)
if result["status"] == "success":
return {
"region": result["regionName"],
"country": result["country"],
"city": result["city"],
}
else:
return {}
def send(self, message):
"""Send a message from the Worker instance."""
self._socket.send(message)
# Run it in a different thread
@threaded
def monitor(self):
"""Monitor the worker and send JSON message across the network."""
while self._socket:
self.__begin = time.time()
self._socket.send(json.dumps({MSG_FIELD.TYPE: NODE_EVENTS.MONITOR}))
time.sleep(WORKER_PROPERTIES.HEALTH_CHECK_INTERVAL)
def update_node_infos(self, message):
"""Update information for the connected nodes, hosted models and
datasets as well as information on CPU and memory usage."""
if self.__begin:
end = time.time()
self._ping = (end - self.__begin) * 1000
self.connected_nodes = message[MSG_FIELD.NODES]
self.hosted_models = message[MSG_FIELD.MODELS]
self.hosted_datasets = message[MSG_FIELD.DATASETS]
self.cpu_percent = message[MSG_FIELD.CPU]
self.mem_usage = message[MSG_FIELD.MEM_USAGE]
| 33.218391 | 84 | 0.589965 | import json
import re
import time
import requests
from ..codes import MSG_FIELD, NODE_EVENTS, WORKER_PROPERTIES
from ..utils.wrappers import threaded
class Worker(object):
def __init__(self, id: str, socket):
self._id = id
self._socket = socket
self._ping = 0
self._status = WORKER_PROPERTIES.ONLINE
self.connected_nodes = {}
self.hosted_models = {}
self.hosted_datasets = {}
self.cpu_percent = 0
self.mem_usage = 0
@property
def status(self):
if not self._socket:
return WORKER_PROPERTIES.OFFLINE
elif self._ping < WORKER_PROPERTIES.PING_THRESHOLD:
return WORKER_PROPERTIES.ONLINE
else:
return WORKER_PROPERTIES.BUSY
@property
def address(self):
if self._socket:
addr = self._socket.environ["REMOTE_ADDR"]
return re.sub("[:f]", "", addr)
@property
def location(self):
if self.address:
url = "http://ip-api.com/json/{}".format(self.address)
r = requests.get(url)
result = json.loads(r.text)
if result["status"] == "success":
return {
"region": result["regionName"],
"country": result["country"],
"city": result["city"],
}
else:
return {}
def send(self, message):
self._socket.send(message)
@threaded
def monitor(self):
while self._socket:
self.__begin = time.time()
self._socket.send(json.dumps({MSG_FIELD.TYPE: NODE_EVENTS.MONITOR}))
time.sleep(WORKER_PROPERTIES.HEALTH_CHECK_INTERVAL)
def update_node_infos(self, message):
if self.__begin:
end = time.time()
self._ping = (end - self.__begin) * 1000
self.connected_nodes = message[MSG_FIELD.NODES]
self.hosted_models = message[MSG_FIELD.MODELS]
self.hosted_datasets = message[MSG_FIELD.DATASETS]
self.cpu_percent = message[MSG_FIELD.CPU]
self.mem_usage = message[MSG_FIELD.MEM_USAGE]
| true | true |
f737d559e8be3e1a63af2c0350c54df4358f479d | 2,718 | py | Python | helpers/Testing_augmented_Lagrangian.py | ContactEngineering/Adhesion | acc46ad9bfe49fec667cb9a116ebde426faa38c4 | [
"MIT"
] | null | null | null | helpers/Testing_augmented_Lagrangian.py | ContactEngineering/Adhesion | acc46ad9bfe49fec667cb9a116ebde426faa38c4 | [
"MIT"
] | 4 | 2021-08-18T07:30:57.000Z | 2022-03-05T11:05:09.000Z | helpers/Testing_augmented_Lagrangian.py | ContactEngineering/Adhesion | acc46ad9bfe49fec667cb9a116ebde426faa38c4 | [
"MIT"
] | null | null | null | #
# Copyright 2020 Antoine Sanner
# 2020 Lars Pastewka
# 2015-2016 Till Junge
#
# ### MIT license
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# coding: utf-8
## Testing the Augmented Lagrangian of Adhesion
# The implementation of the augmented Lagrangian in Tools follows closely the description of the `LANCELOT` algorithm described in Bierlaire (2006)
# The function `augmented_lagrangian` has the form of custom minimizer for [scipy.optimize.minimize](http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.optimize.minimize.html)
# In[4]:
import sys
import os
import numpy as np
import scipy.optimize
sys.path.append(os.path.join(os.getcwd(), "../PyCo/Tools/"))
from AugmentedLagrangian import augmented_lagrangian
### Book example
# Example 20.5: Minimise the fuction $f(x)$
# $$\min_{x\in\mathbb{R}^2} 2(x_1^2+x_2^2 -1)-x_1$$
# under the constraint
# $$ x_1^2 + x_2^2 = 1$$
# ugly workaround to get a fresh AugmentedLagrangian without module loads
# In[9]:
# fname = "../PyCo/Tools/AugmentedLagrangian.py"
# with open(fname) as filehandle:
# content = ''.join((line for line in filehandle))
# exec(content)
# In[11]:
def fun(x):
return (x[0]**2 + x[1]**2 - 1) - x[0]
def constraint(x):
return x[0]**2 + x[1]**2 - 1
tol = 1.e-2
result = scipy.optimize.minimize(fun, x0=np.array((-1, .1)),
constraints={'type':'eq','fun':constraint},
method=augmented_lagrangian, tol=tol,
options={'multiplier0': np.array((0.)),
'disp': True,
'store_iterates': 'iterate'})
print(result)
| 34.405063 | 189 | 0.682855 |
imize
sys.path.append(os.path.join(os.getcwd(), "../PyCo/Tools/"))
from AugmentedLagrangian import augmented_lagrangian
:
return (x[0]**2 + x[1]**2 - 1) - x[0]
def constraint(x):
return x[0]**2 + x[1]**2 - 1
tol = 1.e-2
result = scipy.optimize.minimize(fun, x0=np.array((-1, .1)),
constraints={'type':'eq','fun':constraint},
method=augmented_lagrangian, tol=tol,
options={'multiplier0': np.array((0.)),
'disp': True,
'store_iterates': 'iterate'})
print(result)
| true | true |
f737d6d30c063155e7d6bce682455c215f1ea72d | 45,415 | py | Python | eth/abc.py | davesque/py-evm | d2ccc1e8a9be1cb92cb15a8bb117bb6a1972636e | [
"MIT"
] | null | null | null | eth/abc.py | davesque/py-evm | d2ccc1e8a9be1cb92cb15a8bb117bb6a1972636e | [
"MIT"
] | null | null | null | eth/abc.py | davesque/py-evm | d2ccc1e8a9be1cb92cb15a8bb117bb6a1972636e | [
"MIT"
] | null | null | null | from abc import (
ABC,
abstractmethod
)
from typing import (
Any,
Callable,
ContextManager,
Dict,
Iterable,
Iterator,
MutableMapping,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
from uuid import UUID
import rlp
from eth_bloom import BloomFilter
from eth_typing import (
Address,
BlockNumber,
Hash32,
)
from eth_keys.datatypes import PrivateKey
from eth.constants import (
BLANK_ROOT_HASH,
)
from eth.exceptions import VMError
from eth.typing import (
BytesOrView,
JournalDBCheckpoint,
AccountState,
HeaderParams,
)
from eth.tools.logging import ExtendedDebugLogger
T = TypeVar('T')
class MiningHeaderAPI(rlp.Serializable, ABC):
parent_hash: Hash32
uncles_hash: Hash32
coinbase: Address
state_root: Hash32
transaction_root: Hash32
receipt_root: Hash32
bloom: int
difficulty: int
block_number: BlockNumber
gas_limit: int
gas_used: int
timestamp: int
extra_data: bytes
class BlockHeaderAPI(MiningHeaderAPI):
mix_hash: Hash32
nonce: bytes
class LogAPI(rlp.Serializable, ABC):
address: Address
topics: Sequence[int]
data: bytes
@property
@abstractmethod
def bloomables(self) -> Tuple[bytes, ...]:
...
class ReceiptAPI(rlp.Serializable, ABC):
state_root: bytes
gas_used: int
bloom: int
logs: Sequence[LogAPI]
@property
@abstractmethod
def bloom_filter(self) -> BloomFilter:
...
class BaseTransactionAPI(ABC):
@abstractmethod
def validate(self) -> None:
...
@property
@abstractmethod
def intrinsic_gas(self) -> int:
...
@abstractmethod
def get_intrinsic_gas(self) -> int:
...
@abstractmethod
def gas_used_by(self, computation: 'ComputationAPI') -> int:
...
@abstractmethod
def copy(self: T, **overrides: Any) -> T:
...
class TransactionFieldsAPI(ABC):
nonce: int
gas_price: int
gas: int
to: Address
value: int
data: bytes
v: int
r: int
s: int
@property
@abstractmethod
def hash(self) -> bytes:
...
class UnsignedTransactionAPI(rlp.Serializable, BaseTransactionAPI):
nonce: int
gas_price: int
gas: int
to: Address
value: int
data: bytes
#
# API that must be implemented by all Transaction subclasses.
#
@abstractmethod
def as_signed_transaction(self, private_key: PrivateKey) -> 'SignedTransactionAPI':
"""
Return a version of this transaction which has been signed using the
provided `private_key`
"""
...
class SignedTransactionAPI(rlp.Serializable, BaseTransactionAPI, TransactionFieldsAPI):
@classmethod
@abstractmethod
def from_base_transaction(cls, transaction: 'SignedTransactionAPI') -> 'SignedTransactionAPI':
...
@property
@abstractmethod
def sender(self) -> Address:
...
# +-------------------------------------------------------------+
# | API that must be implemented by all Transaction subclasses. |
# +-------------------------------------------------------------+
#
# Validation
#
@abstractmethod
def validate(self) -> None:
...
#
# Signature and Sender
#
@property
@abstractmethod
def is_signature_valid(self) -> bool:
...
@abstractmethod
def check_signature_validity(self) -> None:
"""
Checks signature validity, raising a ValidationError if the signature
is invalid.
"""
...
@abstractmethod
def get_sender(self) -> Address:
"""
Get the 20-byte address which sent this transaction.
This can be a slow operation. ``transaction.sender`` is always preferred.
"""
...
#
# Conversion to and creation of unsigned transactions.
#
@abstractmethod
def get_message_for_signing(self) -> bytes:
"""
Return the bytestring that should be signed in order to create a signed transactions
"""
...
@classmethod
@abstractmethod
def create_unsigned_transaction(cls,
*,
nonce: int,
gas_price: int,
gas: int,
to: Address,
value: int,
data: bytes) -> UnsignedTransactionAPI:
"""
Create an unsigned transaction.
"""
...
class BlockAPI(rlp.Serializable, ABC):
transaction_class: Type[SignedTransactionAPI] = None
@classmethod
@abstractmethod
def get_transaction_class(cls) -> Type[SignedTransactionAPI]:
...
@classmethod
@abstractmethod
def from_header(cls, header: BlockHeaderAPI, chaindb: 'ChainDatabaseAPI') -> 'BlockAPI':
...
@property
@abstractmethod
def hash(self) -> Hash32:
...
@property
@abstractmethod
def number(self) -> int:
...
@property
@abstractmethod
def is_genesis(self) -> bool:
...
class DatabaseAPI(MutableMapping[bytes, bytes], ABC):
@abstractmethod
def set(self, key: bytes, value: bytes) -> None:
...
@abstractmethod
def exists(self, key: bytes) -> bool:
...
@abstractmethod
def delete(self, key: bytes) -> None:
...
class AtomicDatabaseAPI(DatabaseAPI):
@abstractmethod
def atomic_batch(self) -> ContextManager[DatabaseAPI]:
...
class HeaderDatabaseAPI(ABC):
db: AtomicDatabaseAPI
@abstractmethod
def __init__(self, db: AtomicDatabaseAPI) -> None:
...
#
# Canonical Chain API
#
@abstractmethod
def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32:
...
@abstractmethod
def get_canonical_block_header_by_number(self, block_number: BlockNumber) -> BlockHeaderAPI:
...
@abstractmethod
def get_canonical_head(self) -> BlockHeaderAPI:
...
#
# Header API
#
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
...
@abstractmethod
def get_score(self, block_hash: Hash32) -> int:
...
@abstractmethod
def header_exists(self, block_hash: Hash32) -> bool:
...
@abstractmethod
def persist_header(self,
header: BlockHeaderAPI
) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]:
...
@abstractmethod
def persist_header_chain(self,
headers: Sequence[BlockHeaderAPI]
) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]:
...
class ChainDatabaseAPI(HeaderDatabaseAPI):
#
# Header API
#
@abstractmethod
def get_block_uncles(self, uncles_hash: Hash32) -> Tuple[BlockHeaderAPI, ...]:
...
#
# Block API
#
@abstractmethod
def persist_block(self,
block: BlockAPI
) -> Tuple[Tuple[Hash32, ...], Tuple[Hash32, ...]]:
...
@abstractmethod
def persist_uncles(self, uncles: Tuple[BlockHeaderAPI]) -> Hash32:
...
#
# Transaction API
#
@abstractmethod
def add_receipt(self,
block_header: BlockHeaderAPI,
index_key: int, receipt: ReceiptAPI) -> Hash32:
...
@abstractmethod
def add_transaction(self,
block_header: BlockHeaderAPI,
index_key: int, transaction: SignedTransactionAPI) -> Hash32:
...
@abstractmethod
def get_block_transactions(
self,
block_header: BlockHeaderAPI,
transaction_class: Type[SignedTransactionAPI]) -> Sequence[SignedTransactionAPI]:
...
@abstractmethod
def get_block_transaction_hashes(self, block_header: BlockHeaderAPI) -> Tuple[Hash32, ...]:
...
@abstractmethod
def get_receipt_by_index(self,
block_number: BlockNumber,
receipt_index: int) -> ReceiptAPI:
...
@abstractmethod
def get_receipts(self,
header: BlockHeaderAPI,
receipt_class: Type[ReceiptAPI]) -> Tuple[ReceiptAPI, ...]:
...
@abstractmethod
def get_transaction_by_index(
self,
block_number: BlockNumber,
transaction_index: int,
transaction_class: Type[SignedTransactionAPI]) -> SignedTransactionAPI:
...
@abstractmethod
def get_transaction_index(self, transaction_hash: Hash32) -> Tuple[BlockNumber, int]:
...
#
# Raw Database API
#
@abstractmethod
def exists(self, key: bytes) -> bool:
...
@abstractmethod
def get(self, key: bytes) -> bytes:
...
@abstractmethod
def persist_trie_data_dict(self, trie_data_dict: Dict[Hash32, bytes]) -> None:
...
class GasMeterAPI(ABC):
gas_refunded: int
gas_remaining: int
#
# Write API
#
@abstractmethod
def consume_gas(self, amount: int, reason: str) -> None:
...
@abstractmethod
def return_gas(self, amount: int) -> None:
...
@abstractmethod
def refund_gas(self, amount: int) -> None:
...
class MessageAPI(ABC):
"""
A message for VM computation.
"""
code: bytes
_code_address: Address
create_address: Address
data: BytesOrView
depth: int
gas: int
is_static: bool
sender: Address
should_transfer_value: bool
_storage_address: Address
to: Address
value: int
__slots__ = [
'code',
'_code_address',
'create_address',
'data',
'depth',
'gas',
'is_static',
'sender',
'should_transfer_value',
'_storage_address'
'to',
'value',
]
@property
@abstractmethod
def code_address(self) -> Address:
...
@property
@abstractmethod
def storage_address(self) -> Address:
...
@property
@abstractmethod
def is_create(self) -> bool:
...
@property
@abstractmethod
def data_as_bytes(self) -> bytes:
...
class OpcodeAPI(ABC):
mnemonic: str
@abstractmethod
def __call__(self, computation: 'ComputationAPI') -> None:
...
@classmethod
@abstractmethod
def as_opcode(cls: Type[T],
logic_fn: Callable[['ComputationAPI'], None],
mnemonic: str,
gas_cost: int) -> Type[T]:
...
@abstractmethod
def __copy__(self) -> 'OpcodeAPI':
...
@abstractmethod
def __deepcopy__(self, memo: Any) -> 'OpcodeAPI':
...
class TransactionContextAPI(ABC):
@abstractmethod
def __init__(self, gas_price: int, origin: Address) -> None:
...
@abstractmethod
def get_next_log_counter(self) -> int:
...
@property
@abstractmethod
def gas_price(self) -> int:
...
@property
@abstractmethod
def origin(self) -> Address:
...
class MemoryAPI(ABC):
@abstractmethod
def extend(self, start_position: int, size: int) -> None:
...
@abstractmethod
def __len__(self) -> int:
...
@abstractmethod
def write(self, start_position: int, size: int, value: bytes) -> None:
...
@abstractmethod
def read(self, start_position: int, size: int) -> memoryview:
...
@abstractmethod
def read_bytes(self, start_position: int, size: int) -> bytes:
...
class StackAPI(ABC):
@abstractmethod
def push_int(self, value: int) -> None:
...
@abstractmethod
def push_bytes(self, value: bytes) -> None:
...
@abstractmethod
def pop1_bytes(self) -> bytes:
...
@abstractmethod
def pop1_int(self) -> int:
...
@abstractmethod
def pop1_any(self) -> Union[int, bytes]:
...
@abstractmethod
def pop_any(self, num_items: int) -> Tuple[Union[int, bytes], ...]:
...
@abstractmethod
def pop_ints(self, num_items: int) -> Tuple[int, ...]:
...
@abstractmethod
def pop_bytes(self, num_items: int) -> Tuple[bytes, ...]:
...
@abstractmethod
def swap(self, position: int) -> None:
...
@abstractmethod
def dup(self, position: int) -> None:
...
class CodeStreamAPI(ABC):
pc: int
@abstractmethod
def read(self, size: int) -> bytes:
...
@abstractmethod
def __len__(self) -> int:
...
@abstractmethod
def __getitem__(self, i: int) -> int:
...
@abstractmethod
def __iter__(self) -> Iterator[int]:
...
@abstractmethod
def peek(self) -> int:
...
@abstractmethod
def seek(self, pc: int) -> ContextManager['CodeStreamAPI']:
...
@abstractmethod
def is_valid_opcode(self, position: int) -> bool:
...
class StackManipulationAPI(ABC):
@abstractmethod
def stack_pop_ints(self, num_items: int) -> Tuple[int, ...]:
...
@abstractmethod
def stack_pop_bytes(self, num_items: int) -> Tuple[bytes, ...]:
...
@abstractmethod
def stack_pop_any(self, num_items: int) -> Tuple[Union[int, bytes], ...]:
...
@abstractmethod
def stack_pop1_int(self) -> int:
...
@abstractmethod
def stack_pop1_bytes(self) -> bytes:
...
@abstractmethod
def stack_pop1_any(self) -> Union[int, bytes]:
...
@abstractmethod
def stack_push_int(self, value: int) -> None:
...
@abstractmethod
def stack_push_bytes(self, value: bytes) -> None:
...
class ExecutionContextAPI(ABC):
coinbase: Address
timestamp: int
block_number: int
difficulty: int
gas_limit: int
prev_hashes: Sequence[Hash32]
class ComputationAPI(ContextManager['ComputationAPI'], StackManipulationAPI):
msg: MessageAPI
logger: ExtendedDebugLogger
code: CodeStreamAPI
opcodes: Dict[int, OpcodeAPI] = None
state: 'StateAPI'
return_data: bytes
@abstractmethod
def __init__(self,
state: 'StateAPI',
message: MessageAPI,
transaction_context: TransactionContextAPI) -> None:
...
#
# Convenience
#
@property
@abstractmethod
def is_origin_computation(self) -> bool:
...
#
# Error handling
#
@property
@abstractmethod
def is_success(self) -> bool:
...
@property
@abstractmethod
def is_error(self) -> bool:
...
@property
@abstractmethod
def error(self) -> VMError:
...
@error.setter
def error(self, value: VMError) -> None:
# See: https://github.com/python/mypy/issues/4165
# Since we can't also decorate this with abstract method we want to be
# sure that the setter doesn't actually get used as a noop.
raise NotImplementedError
@abstractmethod
def raise_if_error(self) -> None:
...
@property
@abstractmethod
def should_burn_gas(self) -> bool:
...
@property
@abstractmethod
def should_return_gas(self) -> bool:
...
@property
@abstractmethod
def should_erase_return_data(self) -> bool:
...
#
# Memory Management
#
@abstractmethod
def extend_memory(self, start_position: int, size: int) -> None:
...
@abstractmethod
def memory_write(self, start_position: int, size: int, value: bytes) -> None:
...
@abstractmethod
def memory_read(self, start_position: int, size: int) -> memoryview:
...
@abstractmethod
def memory_read_bytes(self, start_position: int, size: int) -> bytes:
...
#
# Gas Consumption
#
@abstractmethod
def get_gas_meter(self) -> GasMeterAPI:
...
@abstractmethod
def consume_gas(self, amount: int, reason: str) -> None:
...
@abstractmethod
def return_gas(self, amount: int) -> None:
...
@abstractmethod
def refund_gas(self, amount: int) -> None:
...
@abstractmethod
def get_gas_refund(self) -> int:
...
@abstractmethod
def get_gas_used(self) -> int:
...
@abstractmethod
def get_gas_remaining(self) -> int:
...
#
# Stack management
#
@abstractmethod
def stack_swap(self, position: int) -> None:
...
@abstractmethod
def stack_dup(self, position: int) -> None:
...
#
# Computation result
#
@property
@abstractmethod
def output(self) -> bytes:
...
@output.setter
def output(self, value: bytes) -> None:
# See: https://github.com/python/mypy/issues/4165
# Since we can't also decorate this with abstract method we want to be
# sure that the setter doesn't actually get used as a noop.
raise NotImplementedError
#
# Runtime operations
#
@abstractmethod
def prepare_child_message(self,
gas: int,
to: Address,
value: int,
data: BytesOrView,
code: bytes,
**kwargs: Any) -> MessageAPI:
...
@abstractmethod
def apply_child_computation(self, child_msg: MessageAPI) -> 'ComputationAPI':
...
@abstractmethod
def generate_child_computation(self, child_msg: MessageAPI) -> 'ComputationAPI':
...
@abstractmethod
def add_child_computation(self, child_computation: 'ComputationAPI') -> None:
...
#
# Account management
#
@abstractmethod
def register_account_for_deletion(self, beneficiary: Address) -> None:
...
@abstractmethod
def get_accounts_for_deletion(self) -> Tuple[Tuple[Address, Address], ...]:
...
#
# EVM logging
#
@abstractmethod
def add_log_entry(self, account: Address, topics: Tuple[int, ...], data: bytes) -> None:
...
@abstractmethod
def get_raw_log_entries(self) -> Tuple[Tuple[int, bytes, Tuple[int, ...], bytes], ...]:
...
@abstractmethod
def get_log_entries(self) -> Tuple[Tuple[bytes, Tuple[int, ...], bytes], ...]:
...
#
# State Transition
#
@abstractmethod
def apply_message(self) -> 'ComputationAPI':
...
@abstractmethod
def apply_create_message(self) -> 'ComputationAPI':
...
@classmethod
@abstractmethod
def apply_computation(cls,
state: 'StateAPI',
message: MessageAPI,
transaction_context: TransactionContextAPI) -> 'ComputationAPI':
...
#
# Opcode API
#
@property
@abstractmethod
def precompiles(self) -> Dict[Address, Callable[['ComputationAPI'], None]]:
...
@abstractmethod
def get_opcode_fn(self, opcode: int) -> OpcodeAPI:
...
class AccountStorageDatabaseAPI(ABC):
@abstractmethod
def get(self, slot: int, from_journal: bool=True) -> int:
...
@abstractmethod
def set(self, slot: int, value: int) -> None:
...
@abstractmethod
def delete(self) -> None:
...
@abstractmethod
def record(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def discard(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def commit(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def make_storage_root(self) -> None:
...
@property
@abstractmethod
def has_changed_root(self) -> bool:
...
@abstractmethod
def get_changed_root(self) -> Hash32:
...
@abstractmethod
def persist(self, db: DatabaseAPI) -> None:
...
class AccountDatabaseAPI(ABC):
@abstractmethod
def __init__(self, db: AtomicDatabaseAPI, state_root: Hash32 = BLANK_ROOT_HASH) -> None:
...
@property
@abstractmethod
def state_root(self) -> Hash32:
...
@abstractmethod
def has_root(self, state_root: bytes) -> bool:
...
#
# Storage
#
@abstractmethod
def get_storage(self, address: Address, slot: int, from_journal: bool=True) -> int:
...
@abstractmethod
def set_storage(self, address: Address, slot: int, value: int) -> None:
...
@abstractmethod
def delete_storage(self, address: Address) -> None:
...
#
# Balance
#
@abstractmethod
def get_balance(self, address: Address) -> int:
...
@abstractmethod
def set_balance(self, address: Address, balance: int) -> None:
...
#
# Nonce
#
@abstractmethod
def get_nonce(self, address: Address) -> int:
...
@abstractmethod
def set_nonce(self, address: Address, nonce: int) -> None:
...
@abstractmethod
def increment_nonce(self, address: Address) -> None:
...
#
# Code
#
@abstractmethod
def set_code(self, address: Address, code: bytes) -> None:
...
@abstractmethod
def get_code(self, address: Address) -> bytes:
...
@abstractmethod
def get_code_hash(self, address: Address) -> Hash32:
...
@abstractmethod
def delete_code(self, address: Address) -> None:
...
#
# Account Methods
#
@abstractmethod
def account_has_code_or_nonce(self, address: Address) -> bool:
...
@abstractmethod
def delete_account(self, address: Address) -> None:
...
@abstractmethod
def account_exists(self, address: Address) -> bool:
...
@abstractmethod
def touch_account(self, address: Address) -> None:
...
@abstractmethod
def account_is_empty(self, address: Address) -> bool:
...
#
# Record and discard API
#
@abstractmethod
def record(self) -> JournalDBCheckpoint:
...
@abstractmethod
def discard(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def commit(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def make_state_root(self) -> Hash32:
"""
Generate the state root with all the current changes in AccountDB
Current changes include every pending change to storage, as well as all account changes.
After generating all the required tries, the final account state root is returned.
This is an expensive operation, so should be called as little as possible. For example,
pre-Byzantium, this is called after every transaction, because we need the state root
in each receipt. Byzantium+, we only need state roots at the end of the block,
so we *only* call it right before persistance.
:return: the new state root
"""
...
@abstractmethod
def persist(self) -> None:
"""
Send changes to underlying database, including the trie state
so that it will forever be possible to read the trie from this checkpoint.
:meth:`make_state_root` must be explicitly called before this method.
Otherwise persist will raise a ValidationError.
"""
...
class TransactionExecutorAPI(ABC):
@abstractmethod
def __init__(self, vm_state: 'StateAPI') -> None:
...
@abstractmethod
def __call__(self, transaction: SignedTransactionAPI) -> 'ComputationAPI':
...
@abstractmethod
def validate_transaction(self, transaction: SignedTransactionAPI) -> None:
...
@abstractmethod
def build_evm_message(self, transaction: SignedTransactionAPI) -> MessageAPI:
...
@abstractmethod
def build_computation(self,
message: MessageAPI,
transaction: SignedTransactionAPI) -> 'ComputationAPI':
...
@abstractmethod
def finalize_computation(self,
transaction: SignedTransactionAPI,
computation: 'ComputationAPI') -> 'ComputationAPI':
...
class ConfigurableAPI(ABC):
@classmethod
def configure(cls: Type[T],
__name__: str=None,
**overrides: Any) -> Type[T]:
...
class StateAPI(ConfigurableAPI):
#
# Set from __init__
#
execution_context: ExecutionContextAPI
computation_class: Type[ComputationAPI]
transaction_context_class: Type[TransactionContextAPI]
account_db_class: Type[AccountDatabaseAPI]
transaction_executor_class: Type[TransactionExecutorAPI] = None
@abstractmethod
def __init__(
self,
db: AtomicDatabaseAPI,
execution_context: ExecutionContextAPI,
state_root: bytes) -> None:
...
@property
@abstractmethod
def logger(self) -> ExtendedDebugLogger:
...
#
# Block Object Properties (in opcodes)
#
@property
@abstractmethod
def coinbase(self) -> Address:
...
@property
@abstractmethod
def timestamp(self) -> int:
...
@property
@abstractmethod
def block_number(self) -> int:
...
@property
@abstractmethod
def difficulty(self) -> int:
...
@property
@abstractmethod
def gas_limit(self) -> int:
...
#
# Access to account db
#
@classmethod
@abstractmethod
def get_account_db_class(cls) -> Type[AccountDatabaseAPI]:
...
@property
@abstractmethod
def state_root(self) -> Hash32:
...
@abstractmethod
def make_state_root(self) -> Hash32:
...
@abstractmethod
def get_storage(self, address: Address, slot: int, from_journal: bool=True) -> int:
...
@abstractmethod
def set_storage(self, address: Address, slot: int, value: int) -> None:
...
@abstractmethod
def delete_storage(self, address: Address) -> None:
...
@abstractmethod
def delete_account(self, address: Address) -> None:
...
@abstractmethod
def get_balance(self, address: Address) -> int:
...
@abstractmethod
def set_balance(self, address: Address, balance: int) -> None:
...
@abstractmethod
def delta_balance(self, address: Address, delta: int) -> None:
...
@abstractmethod
def get_nonce(self, address: Address) -> int:
...
@abstractmethod
def set_nonce(self, address: Address, nonce: int) -> None:
...
@abstractmethod
def increment_nonce(self, address: Address) -> None:
...
@abstractmethod
def get_code(self, address: Address) -> bytes:
...
@abstractmethod
def set_code(self, address: Address, code: bytes) -> None:
...
@abstractmethod
def get_code_hash(self, address: Address) -> Hash32:
...
@abstractmethod
def delete_code(self, address: Address) -> None:
...
@abstractmethod
def has_code_or_nonce(self, address: Address) -> bool:
...
@abstractmethod
def account_exists(self, address: Address) -> bool:
...
@abstractmethod
def touch_account(self, address: Address) -> None:
...
@abstractmethod
def account_is_empty(self, address: Address) -> bool:
...
#
# Access self._chaindb
#
@abstractmethod
def snapshot(self) -> Tuple[Hash32, UUID]:
...
@abstractmethod
def revert(self, snapshot: Tuple[Hash32, UUID]) -> None:
...
@abstractmethod
def commit(self, snapshot: Tuple[Hash32, UUID]) -> None:
...
@abstractmethod
def persist(self) -> None:
...
#
# Access self.prev_hashes (Read-only)
#
@abstractmethod
def get_ancestor_hash(self, block_number: int) -> Hash32:
...
#
# Computation
#
@abstractmethod
def get_computation(self,
message: MessageAPI,
transaction_context: TransactionContextAPI) -> ComputationAPI:
...
#
# Transaction context
#
@classmethod
@abstractmethod
def get_transaction_context_class(cls) -> Type[TransactionContextAPI]:
...
#
# Execution
#
@abstractmethod
def apply_transaction(self, transaction: SignedTransactionAPI) -> ComputationAPI:
"""
Apply transaction to the vm state
:param transaction: the transaction to apply
:return: the computation
"""
...
@abstractmethod
def get_transaction_executor(self) -> TransactionExecutorAPI:
...
@abstractmethod
def costless_execute_transaction(self,
transaction: SignedTransactionAPI) -> ComputationAPI:
...
@abstractmethod
def override_transaction_context(self, gas_price: int) -> ContextManager[None]:
...
@abstractmethod
def validate_transaction(self, transaction: SignedTransactionAPI) -> None:
...
@classmethod
@abstractmethod
def get_transaction_context(cls,
transaction: SignedTransactionAPI) -> TransactionContextAPI:
...
class VirtualMachineAPI(ConfigurableAPI):
fork: str # noqa: E701 # flake8 bug that's fixed in 3.6.0+
chaindb: ChainDatabaseAPI
@abstractmethod
def __init__(self, header: BlockHeaderAPI, chaindb: ChainDatabaseAPI) -> None:
...
@property
@abstractmethod
def state(self) -> StateAPI:
...
@classmethod
@abstractmethod
def build_state(cls,
db: AtomicDatabaseAPI,
header: BlockHeaderAPI,
previous_hashes: Iterable[Hash32] = ()
) -> StateAPI:
...
@abstractmethod
def get_header(self) -> BlockHeaderAPI:
...
@abstractmethod
def get_block(self) -> BlockAPI:
...
#
# Execution
#
@abstractmethod
def apply_transaction(self,
header: BlockHeaderAPI,
transaction: SignedTransactionAPI
) -> Tuple[ReceiptAPI, ComputationAPI]:
...
@abstractmethod
def execute_bytecode(self,
origin: Address,
gas_price: int,
gas: int,
to: Address,
sender: Address,
value: int,
data: bytes,
code: bytes,
code_address: Address = None) -> ComputationAPI:
...
@abstractmethod
def apply_all_transactions(
self,
transactions: Sequence[SignedTransactionAPI],
base_header: BlockHeaderAPI
) -> Tuple[BlockHeaderAPI, Tuple[ReceiptAPI, ...], Tuple[ComputationAPI, ...]]:
...
@abstractmethod
def make_receipt(self,
base_header: BlockHeaderAPI,
transaction: SignedTransactionAPI,
computation: ComputationAPI,
state: StateAPI) -> ReceiptAPI:
"""
Generate the receipt resulting from applying the transaction.
:param base_header: the header of the block before the transaction was applied.
:param transaction: the transaction used to generate the receipt
:param computation: the result of running the transaction computation
:param state: the resulting state, after executing the computation
:return: receipt
"""
...
#
# Mining
#
@abstractmethod
def import_block(self, block: BlockAPI) -> BlockAPI:
...
@abstractmethod
def mine_block(self, *args: Any, **kwargs: Any) -> BlockAPI:
...
@abstractmethod
def set_block_transactions(self,
base_block: BlockAPI,
new_header: BlockHeaderAPI,
transactions: Sequence[SignedTransactionAPI],
receipts: Sequence[ReceiptAPI]) -> BlockAPI:
...
#
# Finalization
#
@abstractmethod
def finalize_block(self, block: BlockAPI) -> BlockAPI:
...
@abstractmethod
def pack_block(self, block: BlockAPI, *args: Any, **kwargs: Any) -> BlockAPI:
...
#
# Headers
#
@abstractmethod
def add_receipt_to_header(self,
old_header: BlockHeaderAPI,
receipt: ReceiptAPI) -> BlockHeaderAPI:
"""
Apply the receipt to the old header, and return the resulting header. This may have
storage-related side-effects. For example, pre-Byzantium, the state root hash
is included in the receipt, and so must be stored into the database.
"""
...
@classmethod
@abstractmethod
def compute_difficulty(cls, parent_header: BlockHeaderAPI, timestamp: int) -> int:
"""
Compute the difficulty for a block header.
:param parent_header: the parent header
:param timestamp: the timestamp of the child header
"""
...
@abstractmethod
def configure_header(self, **header_params: Any) -> BlockHeaderAPI:
"""
Setup the current header with the provided parameters. This can be
used to set fields like the gas limit or timestamp to value different
than their computed defaults.
"""
...
@classmethod
@abstractmethod
def create_header_from_parent(cls,
parent_header: BlockHeaderAPI,
**header_params: Any) -> BlockHeaderAPI:
"""
Creates and initializes a new block header from the provided
`parent_header`.
"""
...
#
# Blocks
#
@classmethod
@abstractmethod
def generate_block_from_parent_header_and_coinbase(cls,
parent_header: BlockHeaderAPI,
coinbase: Address) -> BlockAPI:
...
@classmethod
@abstractmethod
def get_block_class(cls) -> Type[BlockAPI]:
...
@staticmethod
@abstractmethod
def get_block_reward() -> int:
"""
Return the amount in **wei** that should be given to a miner as a reward
for this block.
.. note::
This is an abstract method that must be implemented in subclasses
"""
...
@classmethod
@abstractmethod
def get_nephew_reward(cls) -> int:
"""
Return the reward which should be given to the miner of the given `nephew`.
.. note::
This is an abstract method that must be implemented in subclasses
"""
...
@classmethod
@abstractmethod
def get_prev_hashes(cls,
last_block_hash: Hash32,
chaindb: ChainDatabaseAPI) -> Optional[Iterable[Hash32]]:
...
@staticmethod
@abstractmethod
def get_uncle_reward(block_number: int, uncle: BlockAPI) -> int:
"""
Return the reward which should be given to the miner of the given `uncle`.
.. note::
This is an abstract method that must be implemented in subclasses
"""
...
#
# Transactions
#
@abstractmethod
def create_transaction(self, *args: Any, **kwargs: Any) -> SignedTransactionAPI:
...
@classmethod
@abstractmethod
def create_unsigned_transaction(cls,
*,
nonce: int,
gas_price: int,
gas: int,
to: Address,
value: int,
data: bytes) -> UnsignedTransactionAPI:
...
@classmethod
@abstractmethod
def get_transaction_class(cls) -> Type[SignedTransactionAPI]:
...
#
# Validate
#
@classmethod
@abstractmethod
def validate_receipt(self, receipt: ReceiptAPI) -> None:
...
@abstractmethod
def validate_block(self, block: BlockAPI) -> None:
...
@classmethod
@abstractmethod
def validate_header(cls,
header: BlockHeaderAPI,
parent_header: BlockHeaderAPI,
check_seal: bool = True
) -> None:
...
@abstractmethod
def validate_transaction_against_header(self,
base_header: BlockHeaderAPI,
transaction: SignedTransactionAPI) -> None:
"""
Validate that the given transaction is valid to apply to the given header.
:param base_header: header before applying the transaction
:param transaction: the transaction to validate
:raises: ValidationError if the transaction is not valid to apply
"""
...
@classmethod
@abstractmethod
def validate_seal(cls, header: BlockHeaderAPI) -> None:
...
@classmethod
@abstractmethod
def validate_uncle(cls,
block: BlockAPI,
uncle: BlockHeaderAPI,
uncle_parent: BlockHeaderAPI
) -> None:
...
#
# State
#
@classmethod
@abstractmethod
def get_state_class(cls) -> Type[StateAPI]:
...
@abstractmethod
def state_in_temp_block(self) -> ContextManager[StateAPI]:
...
class HeaderChainAPI(ABC):
header: BlockHeaderAPI
chain_id: int
vm_configuration: Tuple[Tuple[int, Type[VirtualMachineAPI]], ...]
@abstractmethod
def __init__(self, base_db: AtomicDatabaseAPI, header: BlockHeaderAPI = None) -> None:
...
#
# Chain Initialization API
#
@classmethod
@abstractmethod
def from_genesis_header(cls,
base_db: AtomicDatabaseAPI,
genesis_header: BlockHeaderAPI) -> 'HeaderChainAPI':
...
#
# Helpers
#
@classmethod
@abstractmethod
def get_headerdb_class(cls) -> Type[HeaderDatabaseAPI]:
...
#
# Canonical Chain API
#
@abstractmethod
def get_canonical_block_header_by_number(self, block_number: BlockNumber) -> BlockHeaderAPI:
...
@abstractmethod
def get_canonical_head(self) -> BlockHeaderAPI:
...
#
# Header API
#
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
...
@abstractmethod
def header_exists(self, block_hash: Hash32) -> bool:
...
@abstractmethod
def import_header(self,
header: BlockHeaderAPI,
) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]:
...
class ChainAPI(ConfigurableAPI):
vm_configuration: Tuple[Tuple[int, Type[VirtualMachineAPI]], ...]
chain_id: int
chaindb: ChainDatabaseAPI
#
# Helpers
#
@classmethod
@abstractmethod
def get_chaindb_class(cls) -> Type[ChainDatabaseAPI]:
...
#
# Chain API
#
@classmethod
@abstractmethod
def from_genesis(cls,
base_db: AtomicDatabaseAPI,
genesis_params: Dict[str, HeaderParams],
genesis_state: AccountState=None) -> 'ChainAPI':
...
@classmethod
@abstractmethod
def from_genesis_header(cls,
base_db: AtomicDatabaseAPI,
genesis_header: BlockHeaderAPI) -> 'ChainAPI':
...
#
# VM API
#
@classmethod
def get_vm_class(cls, header: BlockHeaderAPI) -> Type[VirtualMachineAPI]:
"""
Returns the VM instance for the given block number.
"""
...
@abstractmethod
def get_vm(self, header: BlockHeaderAPI = None) -> VirtualMachineAPI:
...
@classmethod
def get_vm_class_for_block_number(cls, block_number: BlockNumber) -> Type[VirtualMachineAPI]:
...
#
# Header API
#
@abstractmethod
def create_header_from_parent(self,
parent_header: BlockHeaderAPI,
**header_params: HeaderParams) -> BlockHeaderAPI:
...
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
...
@abstractmethod
def get_canonical_head(self) -> BlockHeaderAPI:
...
@abstractmethod
def get_score(self, block_hash: Hash32) -> int:
...
#
# Block API
#
@abstractmethod
def get_ancestors(self, limit: int, header: BlockHeaderAPI) -> Tuple[BlockAPI, ...]:
...
@abstractmethod
def get_block(self) -> BlockAPI:
...
@abstractmethod
def get_block_by_hash(self, block_hash: Hash32) -> BlockAPI:
...
@abstractmethod
def get_block_by_header(self, block_header: BlockHeaderAPI) -> BlockAPI:
...
@abstractmethod
def get_canonical_block_by_number(self, block_number: BlockNumber) -> BlockAPI:
...
@abstractmethod
def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32:
...
@abstractmethod
def build_block_with_transactions(
self,
transactions: Tuple[SignedTransactionAPI, ...],
parent_header: BlockHeaderAPI = None
) -> Tuple[BlockAPI, Tuple[ReceiptAPI, ...], Tuple[ComputationAPI, ...]]:
...
#
# Transaction API
#
@abstractmethod
def create_transaction(self, *args: Any, **kwargs: Any) -> SignedTransactionAPI:
...
@abstractmethod
def create_unsigned_transaction(cls,
*,
nonce: int,
gas_price: int,
gas: int,
to: Address,
value: int,
data: bytes) -> UnsignedTransactionAPI:
...
@abstractmethod
def get_canonical_transaction(self, transaction_hash: Hash32) -> SignedTransactionAPI:
...
@abstractmethod
def get_transaction_receipt(self, transaction_hash: Hash32) -> ReceiptAPI:
...
#
# Execution API
#
@abstractmethod
def get_transaction_result(
self,
transaction: SignedTransactionAPI,
at_header: BlockHeaderAPI) -> bytes:
...
@abstractmethod
def estimate_gas(
self,
transaction: SignedTransactionAPI,
at_header: BlockHeaderAPI = None) -> int:
...
@abstractmethod
def import_block(self,
block: BlockAPI,
perform_validation: bool=True,
) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
...
#
# Validation API
#
@abstractmethod
def validate_receipt(self, receipt: ReceiptAPI, at_header: BlockHeaderAPI) -> None:
...
@abstractmethod
def validate_block(self, block: BlockAPI) -> None:
...
@abstractmethod
def validate_seal(self, header: BlockHeaderAPI) -> None:
...
@abstractmethod
def validate_gaslimit(self, header: BlockHeaderAPI) -> None:
...
@abstractmethod
def validate_uncles(self, block: BlockAPI) -> None:
...
@classmethod
@abstractmethod
def validate_chain(
cls,
root: BlockHeaderAPI,
descendants: Tuple[BlockHeaderAPI, ...],
seal_check_random_sample_rate: int = 1) -> None:
...
class MiningChainAPI(ChainAPI):
header: BlockHeaderAPI
@abstractmethod
def __init__(self, base_db: AtomicDatabaseAPI, header: BlockHeaderAPI = None) -> None:
...
@abstractmethod
def apply_transaction(self,
transaction: SignedTransactionAPI
) -> Tuple[BlockAPI, ReceiptAPI, ComputationAPI]:
...
@abstractmethod
def import_block(self,
block: BlockAPI,
perform_validation: bool=True
) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
...
@abstractmethod
def mine_block(self, *args: Any, **kwargs: Any) -> BlockAPI:
...
def get_vm(self, at_header: BlockHeaderAPI = None) -> VirtualMachineAPI:
...
| 23.839895 | 98 | 0.568777 | from abc import (
ABC,
abstractmethod
)
from typing import (
Any,
Callable,
ContextManager,
Dict,
Iterable,
Iterator,
MutableMapping,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
)
from uuid import UUID
import rlp
from eth_bloom import BloomFilter
from eth_typing import (
Address,
BlockNumber,
Hash32,
)
from eth_keys.datatypes import PrivateKey
from eth.constants import (
BLANK_ROOT_HASH,
)
from eth.exceptions import VMError
from eth.typing import (
BytesOrView,
JournalDBCheckpoint,
AccountState,
HeaderParams,
)
from eth.tools.logging import ExtendedDebugLogger
T = TypeVar('T')
class MiningHeaderAPI(rlp.Serializable, ABC):
parent_hash: Hash32
uncles_hash: Hash32
coinbase: Address
state_root: Hash32
transaction_root: Hash32
receipt_root: Hash32
bloom: int
difficulty: int
block_number: BlockNumber
gas_limit: int
gas_used: int
timestamp: int
extra_data: bytes
class BlockHeaderAPI(MiningHeaderAPI):
mix_hash: Hash32
nonce: bytes
class LogAPI(rlp.Serializable, ABC):
address: Address
topics: Sequence[int]
data: bytes
@property
@abstractmethod
def bloomables(self) -> Tuple[bytes, ...]:
...
class ReceiptAPI(rlp.Serializable, ABC):
state_root: bytes
gas_used: int
bloom: int
logs: Sequence[LogAPI]
@property
@abstractmethod
def bloom_filter(self) -> BloomFilter:
...
class BaseTransactionAPI(ABC):
@abstractmethod
def validate(self) -> None:
...
@property
@abstractmethod
def intrinsic_gas(self) -> int:
...
@abstractmethod
def get_intrinsic_gas(self) -> int:
...
@abstractmethod
def gas_used_by(self, computation: 'ComputationAPI') -> int:
...
@abstractmethod
def copy(self: T, **overrides: Any) -> T:
...
class TransactionFieldsAPI(ABC):
nonce: int
gas_price: int
gas: int
to: Address
value: int
data: bytes
v: int
r: int
s: int
@property
@abstractmethod
def hash(self) -> bytes:
...
class UnsignedTransactionAPI(rlp.Serializable, BaseTransactionAPI):
nonce: int
gas_price: int
gas: int
to: Address
value: int
data: bytes
@abstractmethod
def as_signed_transaction(self, private_key: PrivateKey) -> 'SignedTransactionAPI':
...
class SignedTransactionAPI(rlp.Serializable, BaseTransactionAPI, TransactionFieldsAPI):
@classmethod
@abstractmethod
def from_base_transaction(cls, transaction: 'SignedTransactionAPI') -> 'SignedTransactionAPI':
...
@property
@abstractmethod
def sender(self) -> Address:
...
@abstractmethod
def validate(self) -> None:
...
@property
@abstractmethod
def is_signature_valid(self) -> bool:
...
@abstractmethod
def check_signature_validity(self) -> None:
...
@abstractmethod
def get_sender(self) -> Address:
...
@abstractmethod
def get_message_for_signing(self) -> bytes:
...
@classmethod
@abstractmethod
def create_unsigned_transaction(cls,
*,
nonce: int,
gas_price: int,
gas: int,
to: Address,
value: int,
data: bytes) -> UnsignedTransactionAPI:
...
class BlockAPI(rlp.Serializable, ABC):
transaction_class: Type[SignedTransactionAPI] = None
@classmethod
@abstractmethod
def get_transaction_class(cls) -> Type[SignedTransactionAPI]:
...
@classmethod
@abstractmethod
def from_header(cls, header: BlockHeaderAPI, chaindb: 'ChainDatabaseAPI') -> 'BlockAPI':
...
@property
@abstractmethod
def hash(self) -> Hash32:
...
@property
@abstractmethod
def number(self) -> int:
...
@property
@abstractmethod
def is_genesis(self) -> bool:
...
class DatabaseAPI(MutableMapping[bytes, bytes], ABC):
@abstractmethod
def set(self, key: bytes, value: bytes) -> None:
...
@abstractmethod
def exists(self, key: bytes) -> bool:
...
@abstractmethod
def delete(self, key: bytes) -> None:
...
class AtomicDatabaseAPI(DatabaseAPI):
@abstractmethod
def atomic_batch(self) -> ContextManager[DatabaseAPI]:
...
class HeaderDatabaseAPI(ABC):
db: AtomicDatabaseAPI
@abstractmethod
def __init__(self, db: AtomicDatabaseAPI) -> None:
...
@abstractmethod
def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32:
...
@abstractmethod
def get_canonical_block_header_by_number(self, block_number: BlockNumber) -> BlockHeaderAPI:
...
@abstractmethod
def get_canonical_head(self) -> BlockHeaderAPI:
...
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
...
@abstractmethod
def get_score(self, block_hash: Hash32) -> int:
...
@abstractmethod
def header_exists(self, block_hash: Hash32) -> bool:
...
@abstractmethod
def persist_header(self,
header: BlockHeaderAPI
) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]:
...
@abstractmethod
def persist_header_chain(self,
headers: Sequence[BlockHeaderAPI]
) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]:
...
class ChainDatabaseAPI(HeaderDatabaseAPI):
@abstractmethod
def get_block_uncles(self, uncles_hash: Hash32) -> Tuple[BlockHeaderAPI, ...]:
...
@abstractmethod
def persist_block(self,
block: BlockAPI
) -> Tuple[Tuple[Hash32, ...], Tuple[Hash32, ...]]:
...
@abstractmethod
def persist_uncles(self, uncles: Tuple[BlockHeaderAPI]) -> Hash32:
...
@abstractmethod
def add_receipt(self,
block_header: BlockHeaderAPI,
index_key: int, receipt: ReceiptAPI) -> Hash32:
...
@abstractmethod
def add_transaction(self,
block_header: BlockHeaderAPI,
index_key: int, transaction: SignedTransactionAPI) -> Hash32:
...
@abstractmethod
def get_block_transactions(
self,
block_header: BlockHeaderAPI,
transaction_class: Type[SignedTransactionAPI]) -> Sequence[SignedTransactionAPI]:
...
@abstractmethod
def get_block_transaction_hashes(self, block_header: BlockHeaderAPI) -> Tuple[Hash32, ...]:
...
@abstractmethod
def get_receipt_by_index(self,
block_number: BlockNumber,
receipt_index: int) -> ReceiptAPI:
...
@abstractmethod
def get_receipts(self,
header: BlockHeaderAPI,
receipt_class: Type[ReceiptAPI]) -> Tuple[ReceiptAPI, ...]:
...
@abstractmethod
def get_transaction_by_index(
self,
block_number: BlockNumber,
transaction_index: int,
transaction_class: Type[SignedTransactionAPI]) -> SignedTransactionAPI:
...
@abstractmethod
def get_transaction_index(self, transaction_hash: Hash32) -> Tuple[BlockNumber, int]:
...
@abstractmethod
def exists(self, key: bytes) -> bool:
...
@abstractmethod
def get(self, key: bytes) -> bytes:
...
@abstractmethod
def persist_trie_data_dict(self, trie_data_dict: Dict[Hash32, bytes]) -> None:
...
class GasMeterAPI(ABC):
gas_refunded: int
gas_remaining: int
@abstractmethod
def consume_gas(self, amount: int, reason: str) -> None:
...
@abstractmethod
def return_gas(self, amount: int) -> None:
...
@abstractmethod
def refund_gas(self, amount: int) -> None:
...
class MessageAPI(ABC):
code: bytes
_code_address: Address
create_address: Address
data: BytesOrView
depth: int
gas: int
is_static: bool
sender: Address
should_transfer_value: bool
_storage_address: Address
to: Address
value: int
__slots__ = [
'code',
'_code_address',
'create_address',
'data',
'depth',
'gas',
'is_static',
'sender',
'should_transfer_value',
'_storage_address'
'to',
'value',
]
@property
@abstractmethod
def code_address(self) -> Address:
...
@property
@abstractmethod
def storage_address(self) -> Address:
...
@property
@abstractmethod
def is_create(self) -> bool:
...
@property
@abstractmethod
def data_as_bytes(self) -> bytes:
...
class OpcodeAPI(ABC):
mnemonic: str
@abstractmethod
def __call__(self, computation: 'ComputationAPI') -> None:
...
@classmethod
@abstractmethod
def as_opcode(cls: Type[T],
logic_fn: Callable[['ComputationAPI'], None],
mnemonic: str,
gas_cost: int) -> Type[T]:
...
@abstractmethod
def __copy__(self) -> 'OpcodeAPI':
...
@abstractmethod
def __deepcopy__(self, memo: Any) -> 'OpcodeAPI':
...
class TransactionContextAPI(ABC):
@abstractmethod
def __init__(self, gas_price: int, origin: Address) -> None:
...
@abstractmethod
def get_next_log_counter(self) -> int:
...
@property
@abstractmethod
def gas_price(self) -> int:
...
@property
@abstractmethod
def origin(self) -> Address:
...
class MemoryAPI(ABC):
@abstractmethod
def extend(self, start_position: int, size: int) -> None:
...
@abstractmethod
def __len__(self) -> int:
...
@abstractmethod
def write(self, start_position: int, size: int, value: bytes) -> None:
...
@abstractmethod
def read(self, start_position: int, size: int) -> memoryview:
...
@abstractmethod
def read_bytes(self, start_position: int, size: int) -> bytes:
...
class StackAPI(ABC):
@abstractmethod
def push_int(self, value: int) -> None:
...
@abstractmethod
def push_bytes(self, value: bytes) -> None:
...
@abstractmethod
def pop1_bytes(self) -> bytes:
...
@abstractmethod
def pop1_int(self) -> int:
...
@abstractmethod
def pop1_any(self) -> Union[int, bytes]:
...
@abstractmethod
def pop_any(self, num_items: int) -> Tuple[Union[int, bytes], ...]:
...
@abstractmethod
def pop_ints(self, num_items: int) -> Tuple[int, ...]:
...
@abstractmethod
def pop_bytes(self, num_items: int) -> Tuple[bytes, ...]:
...
@abstractmethod
def swap(self, position: int) -> None:
...
@abstractmethod
def dup(self, position: int) -> None:
...
class CodeStreamAPI(ABC):
pc: int
@abstractmethod
def read(self, size: int) -> bytes:
...
@abstractmethod
def __len__(self) -> int:
...
@abstractmethod
def __getitem__(self, i: int) -> int:
...
@abstractmethod
def __iter__(self) -> Iterator[int]:
...
@abstractmethod
def peek(self) -> int:
...
@abstractmethod
def seek(self, pc: int) -> ContextManager['CodeStreamAPI']:
...
@abstractmethod
def is_valid_opcode(self, position: int) -> bool:
...
class StackManipulationAPI(ABC):
@abstractmethod
def stack_pop_ints(self, num_items: int) -> Tuple[int, ...]:
...
@abstractmethod
def stack_pop_bytes(self, num_items: int) -> Tuple[bytes, ...]:
...
@abstractmethod
def stack_pop_any(self, num_items: int) -> Tuple[Union[int, bytes], ...]:
...
@abstractmethod
def stack_pop1_int(self) -> int:
...
@abstractmethod
def stack_pop1_bytes(self) -> bytes:
...
@abstractmethod
def stack_pop1_any(self) -> Union[int, bytes]:
...
@abstractmethod
def stack_push_int(self, value: int) -> None:
...
@abstractmethod
def stack_push_bytes(self, value: bytes) -> None:
...
class ExecutionContextAPI(ABC):
coinbase: Address
timestamp: int
block_number: int
difficulty: int
gas_limit: int
prev_hashes: Sequence[Hash32]
class ComputationAPI(ContextManager['ComputationAPI'], StackManipulationAPI):
msg: MessageAPI
logger: ExtendedDebugLogger
code: CodeStreamAPI
opcodes: Dict[int, OpcodeAPI] = None
state: 'StateAPI'
return_data: bytes
@abstractmethod
def __init__(self,
state: 'StateAPI',
message: MessageAPI,
transaction_context: TransactionContextAPI) -> None:
...
@property
@abstractmethod
def is_origin_computation(self) -> bool:
...
@property
@abstractmethod
def is_success(self) -> bool:
...
@property
@abstractmethod
def is_error(self) -> bool:
...
@property
@abstractmethod
def error(self) -> VMError:
...
@error.setter
def error(self, value: VMError) -> None:
# sure that the setter doesn't actually get used as a noop.
raise NotImplementedError
@abstractmethod
def raise_if_error(self) -> None:
...
@property
@abstractmethod
def should_burn_gas(self) -> bool:
...
@property
@abstractmethod
def should_return_gas(self) -> bool:
...
@property
@abstractmethod
def should_erase_return_data(self) -> bool:
...
@abstractmethod
def extend_memory(self, start_position: int, size: int) -> None:
...
@abstractmethod
def memory_write(self, start_position: int, size: int, value: bytes) -> None:
...
@abstractmethod
def memory_read(self, start_position: int, size: int) -> memoryview:
...
@abstractmethod
def memory_read_bytes(self, start_position: int, size: int) -> bytes:
...
@abstractmethod
def get_gas_meter(self) -> GasMeterAPI:
...
@abstractmethod
def consume_gas(self, amount: int, reason: str) -> None:
...
@abstractmethod
def return_gas(self, amount: int) -> None:
...
@abstractmethod
def refund_gas(self, amount: int) -> None:
...
@abstractmethod
def get_gas_refund(self) -> int:
...
@abstractmethod
def get_gas_used(self) -> int:
...
@abstractmethod
def get_gas_remaining(self) -> int:
...
@abstractmethod
def stack_swap(self, position: int) -> None:
...
@abstractmethod
def stack_dup(self, position: int) -> None:
...
@property
@abstractmethod
def output(self) -> bytes:
...
@output.setter
def output(self, value: bytes) -> None:
# sure that the setter doesn't actually get used as a noop.
raise NotImplementedError
@abstractmethod
def prepare_child_message(self,
gas: int,
to: Address,
value: int,
data: BytesOrView,
code: bytes,
**kwargs: Any) -> MessageAPI:
...
@abstractmethod
def apply_child_computation(self, child_msg: MessageAPI) -> 'ComputationAPI':
...
@abstractmethod
def generate_child_computation(self, child_msg: MessageAPI) -> 'ComputationAPI':
...
@abstractmethod
def add_child_computation(self, child_computation: 'ComputationAPI') -> None:
...
@abstractmethod
def register_account_for_deletion(self, beneficiary: Address) -> None:
...
@abstractmethod
def get_accounts_for_deletion(self) -> Tuple[Tuple[Address, Address], ...]:
...
@abstractmethod
def add_log_entry(self, account: Address, topics: Tuple[int, ...], data: bytes) -> None:
...
@abstractmethod
def get_raw_log_entries(self) -> Tuple[Tuple[int, bytes, Tuple[int, ...], bytes], ...]:
...
@abstractmethod
def get_log_entries(self) -> Tuple[Tuple[bytes, Tuple[int, ...], bytes], ...]:
...
@abstractmethod
def apply_message(self) -> 'ComputationAPI':
...
@abstractmethod
def apply_create_message(self) -> 'ComputationAPI':
...
@classmethod
@abstractmethod
def apply_computation(cls,
state: 'StateAPI',
message: MessageAPI,
transaction_context: TransactionContextAPI) -> 'ComputationAPI':
...
@property
@abstractmethod
def precompiles(self) -> Dict[Address, Callable[['ComputationAPI'], None]]:
...
@abstractmethod
def get_opcode_fn(self, opcode: int) -> OpcodeAPI:
...
class AccountStorageDatabaseAPI(ABC):
@abstractmethod
def get(self, slot: int, from_journal: bool=True) -> int:
...
@abstractmethod
def set(self, slot: int, value: int) -> None:
...
@abstractmethod
def delete(self) -> None:
...
@abstractmethod
def record(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def discard(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def commit(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def make_storage_root(self) -> None:
...
@property
@abstractmethod
def has_changed_root(self) -> bool:
...
@abstractmethod
def get_changed_root(self) -> Hash32:
...
@abstractmethod
def persist(self, db: DatabaseAPI) -> None:
...
class AccountDatabaseAPI(ABC):
@abstractmethod
def __init__(self, db: AtomicDatabaseAPI, state_root: Hash32 = BLANK_ROOT_HASH) -> None:
...
@property
@abstractmethod
def state_root(self) -> Hash32:
...
@abstractmethod
def has_root(self, state_root: bytes) -> bool:
...
@abstractmethod
def get_storage(self, address: Address, slot: int, from_journal: bool=True) -> int:
...
@abstractmethod
def set_storage(self, address: Address, slot: int, value: int) -> None:
...
@abstractmethod
def delete_storage(self, address: Address) -> None:
...
@abstractmethod
def get_balance(self, address: Address) -> int:
...
@abstractmethod
def set_balance(self, address: Address, balance: int) -> None:
...
@abstractmethod
def get_nonce(self, address: Address) -> int:
...
@abstractmethod
def set_nonce(self, address: Address, nonce: int) -> None:
...
@abstractmethod
def increment_nonce(self, address: Address) -> None:
...
@abstractmethod
def set_code(self, address: Address, code: bytes) -> None:
...
@abstractmethod
def get_code(self, address: Address) -> bytes:
...
@abstractmethod
def get_code_hash(self, address: Address) -> Hash32:
...
@abstractmethod
def delete_code(self, address: Address) -> None:
...
@abstractmethod
def account_has_code_or_nonce(self, address: Address) -> bool:
...
@abstractmethod
def delete_account(self, address: Address) -> None:
...
@abstractmethod
def account_exists(self, address: Address) -> bool:
...
@abstractmethod
def touch_account(self, address: Address) -> None:
...
@abstractmethod
def account_is_empty(self, address: Address) -> bool:
...
@abstractmethod
def record(self) -> JournalDBCheckpoint:
...
@abstractmethod
def discard(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def commit(self, checkpoint: JournalDBCheckpoint) -> None:
...
@abstractmethod
def make_state_root(self) -> Hash32:
...
@abstractmethod
def persist(self) -> None:
...
class TransactionExecutorAPI(ABC):
@abstractmethod
def __init__(self, vm_state: 'StateAPI') -> None:
...
@abstractmethod
def __call__(self, transaction: SignedTransactionAPI) -> 'ComputationAPI':
...
@abstractmethod
def validate_transaction(self, transaction: SignedTransactionAPI) -> None:
...
@abstractmethod
def build_evm_message(self, transaction: SignedTransactionAPI) -> MessageAPI:
...
@abstractmethod
def build_computation(self,
message: MessageAPI,
transaction: SignedTransactionAPI) -> 'ComputationAPI':
...
@abstractmethod
def finalize_computation(self,
transaction: SignedTransactionAPI,
computation: 'ComputationAPI') -> 'ComputationAPI':
...
class ConfigurableAPI(ABC):
@classmethod
def configure(cls: Type[T],
__name__: str=None,
**overrides: Any) -> Type[T]:
...
class StateAPI(ConfigurableAPI):
execution_context: ExecutionContextAPI
computation_class: Type[ComputationAPI]
transaction_context_class: Type[TransactionContextAPI]
account_db_class: Type[AccountDatabaseAPI]
transaction_executor_class: Type[TransactionExecutorAPI] = None
@abstractmethod
def __init__(
self,
db: AtomicDatabaseAPI,
execution_context: ExecutionContextAPI,
state_root: bytes) -> None:
...
@property
@abstractmethod
def logger(self) -> ExtendedDebugLogger:
...
@property
@abstractmethod
def coinbase(self) -> Address:
...
@property
@abstractmethod
def timestamp(self) -> int:
...
@property
@abstractmethod
def block_number(self) -> int:
...
@property
@abstractmethod
def difficulty(self) -> int:
...
@property
@abstractmethod
def gas_limit(self) -> int:
...
@classmethod
@abstractmethod
def get_account_db_class(cls) -> Type[AccountDatabaseAPI]:
...
@property
@abstractmethod
def state_root(self) -> Hash32:
...
@abstractmethod
def make_state_root(self) -> Hash32:
...
@abstractmethod
def get_storage(self, address: Address, slot: int, from_journal: bool=True) -> int:
...
@abstractmethod
def set_storage(self, address: Address, slot: int, value: int) -> None:
...
@abstractmethod
def delete_storage(self, address: Address) -> None:
...
@abstractmethod
def delete_account(self, address: Address) -> None:
...
@abstractmethod
def get_balance(self, address: Address) -> int:
...
@abstractmethod
def set_balance(self, address: Address, balance: int) -> None:
...
@abstractmethod
def delta_balance(self, address: Address, delta: int) -> None:
...
@abstractmethod
def get_nonce(self, address: Address) -> int:
...
@abstractmethod
def set_nonce(self, address: Address, nonce: int) -> None:
...
@abstractmethod
def increment_nonce(self, address: Address) -> None:
...
@abstractmethod
def get_code(self, address: Address) -> bytes:
...
@abstractmethod
def set_code(self, address: Address, code: bytes) -> None:
...
@abstractmethod
def get_code_hash(self, address: Address) -> Hash32:
...
@abstractmethod
def delete_code(self, address: Address) -> None:
...
@abstractmethod
def has_code_or_nonce(self, address: Address) -> bool:
...
@abstractmethod
def account_exists(self, address: Address) -> bool:
...
@abstractmethod
def touch_account(self, address: Address) -> None:
...
@abstractmethod
def account_is_empty(self, address: Address) -> bool:
...
@abstractmethod
def snapshot(self) -> Tuple[Hash32, UUID]:
...
@abstractmethod
def revert(self, snapshot: Tuple[Hash32, UUID]) -> None:
...
@abstractmethod
def commit(self, snapshot: Tuple[Hash32, UUID]) -> None:
...
@abstractmethod
def persist(self) -> None:
...
@abstractmethod
def get_ancestor_hash(self, block_number: int) -> Hash32:
...
@abstractmethod
def get_computation(self,
message: MessageAPI,
transaction_context: TransactionContextAPI) -> ComputationAPI:
...
@classmethod
@abstractmethod
def get_transaction_context_class(cls) -> Type[TransactionContextAPI]:
...
@abstractmethod
def apply_transaction(self, transaction: SignedTransactionAPI) -> ComputationAPI:
...
@abstractmethod
def get_transaction_executor(self) -> TransactionExecutorAPI:
...
@abstractmethod
def costless_execute_transaction(self,
transaction: SignedTransactionAPI) -> ComputationAPI:
...
@abstractmethod
def override_transaction_context(self, gas_price: int) -> ContextManager[None]:
...
@abstractmethod
def validate_transaction(self, transaction: SignedTransactionAPI) -> None:
...
@classmethod
@abstractmethod
def get_transaction_context(cls,
transaction: SignedTransactionAPI) -> TransactionContextAPI:
...
class VirtualMachineAPI(ConfigurableAPI):
fork: str @abstractmethod
def __init__(self, header: BlockHeaderAPI, chaindb: ChainDatabaseAPI) -> None:
...
@property
@abstractmethod
def state(self) -> StateAPI:
...
@classmethod
@abstractmethod
def build_state(cls,
db: AtomicDatabaseAPI,
header: BlockHeaderAPI,
previous_hashes: Iterable[Hash32] = ()
) -> StateAPI:
...
@abstractmethod
def get_header(self) -> BlockHeaderAPI:
...
@abstractmethod
def get_block(self) -> BlockAPI:
...
#
# Execution
#
@abstractmethod
def apply_transaction(self,
header: BlockHeaderAPI,
transaction: SignedTransactionAPI
) -> Tuple[ReceiptAPI, ComputationAPI]:
...
@abstractmethod
def execute_bytecode(self,
origin: Address,
gas_price: int,
gas: int,
to: Address,
sender: Address,
value: int,
data: bytes,
code: bytes,
code_address: Address = None) -> ComputationAPI:
...
@abstractmethod
def apply_all_transactions(
self,
transactions: Sequence[SignedTransactionAPI],
base_header: BlockHeaderAPI
) -> Tuple[BlockHeaderAPI, Tuple[ReceiptAPI, ...], Tuple[ComputationAPI, ...]]:
...
@abstractmethod
def make_receipt(self,
base_header: BlockHeaderAPI,
transaction: SignedTransactionAPI,
computation: ComputationAPI,
state: StateAPI) -> ReceiptAPI:
...
#
# Mining
#
@abstractmethod
def import_block(self, block: BlockAPI) -> BlockAPI:
...
@abstractmethod
def mine_block(self, *args: Any, **kwargs: Any) -> BlockAPI:
...
@abstractmethod
def set_block_transactions(self,
base_block: BlockAPI,
new_header: BlockHeaderAPI,
transactions: Sequence[SignedTransactionAPI],
receipts: Sequence[ReceiptAPI]) -> BlockAPI:
...
#
# Finalization
#
@abstractmethod
def finalize_block(self, block: BlockAPI) -> BlockAPI:
...
@abstractmethod
def pack_block(self, block: BlockAPI, *args: Any, **kwargs: Any) -> BlockAPI:
...
#
# Headers
#
@abstractmethod
def add_receipt_to_header(self,
old_header: BlockHeaderAPI,
receipt: ReceiptAPI) -> BlockHeaderAPI:
...
@classmethod
@abstractmethod
def compute_difficulty(cls, parent_header: BlockHeaderAPI, timestamp: int) -> int:
...
@abstractmethod
def configure_header(self, **header_params: Any) -> BlockHeaderAPI:
...
@classmethod
@abstractmethod
def create_header_from_parent(cls,
parent_header: BlockHeaderAPI,
**header_params: Any) -> BlockHeaderAPI:
...
#
# Blocks
#
@classmethod
@abstractmethod
def generate_block_from_parent_header_and_coinbase(cls,
parent_header: BlockHeaderAPI,
coinbase: Address) -> BlockAPI:
...
@classmethod
@abstractmethod
def get_block_class(cls) -> Type[BlockAPI]:
...
@staticmethod
@abstractmethod
def get_block_reward() -> int:
...
@classmethod
@abstractmethod
def get_nephew_reward(cls) -> int:
...
@classmethod
@abstractmethod
def get_prev_hashes(cls,
last_block_hash: Hash32,
chaindb: ChainDatabaseAPI) -> Optional[Iterable[Hash32]]:
...
@staticmethod
@abstractmethod
def get_uncle_reward(block_number: int, uncle: BlockAPI) -> int:
...
#
# Transactions
#
@abstractmethod
def create_transaction(self, *args: Any, **kwargs: Any) -> SignedTransactionAPI:
...
@classmethod
@abstractmethod
def create_unsigned_transaction(cls,
*,
nonce: int,
gas_price: int,
gas: int,
to: Address,
value: int,
data: bytes) -> UnsignedTransactionAPI:
...
@classmethod
@abstractmethod
def get_transaction_class(cls) -> Type[SignedTransactionAPI]:
...
#
# Validate
#
@classmethod
@abstractmethod
def validate_receipt(self, receipt: ReceiptAPI) -> None:
...
@abstractmethod
def validate_block(self, block: BlockAPI) -> None:
...
@classmethod
@abstractmethod
def validate_header(cls,
header: BlockHeaderAPI,
parent_header: BlockHeaderAPI,
check_seal: bool = True
) -> None:
...
@abstractmethod
def validate_transaction_against_header(self,
base_header: BlockHeaderAPI,
transaction: SignedTransactionAPI) -> None:
...
@classmethod
@abstractmethod
def validate_seal(cls, header: BlockHeaderAPI) -> None:
...
@classmethod
@abstractmethod
def validate_uncle(cls,
block: BlockAPI,
uncle: BlockHeaderAPI,
uncle_parent: BlockHeaderAPI
) -> None:
...
#
# State
#
@classmethod
@abstractmethod
def get_state_class(cls) -> Type[StateAPI]:
...
@abstractmethod
def state_in_temp_block(self) -> ContextManager[StateAPI]:
...
class HeaderChainAPI(ABC):
header: BlockHeaderAPI
chain_id: int
vm_configuration: Tuple[Tuple[int, Type[VirtualMachineAPI]], ...]
@abstractmethod
def __init__(self, base_db: AtomicDatabaseAPI, header: BlockHeaderAPI = None) -> None:
...
#
# Chain Initialization API
#
@classmethod
@abstractmethod
def from_genesis_header(cls,
base_db: AtomicDatabaseAPI,
genesis_header: BlockHeaderAPI) -> 'HeaderChainAPI':
...
#
# Helpers
#
@classmethod
@abstractmethod
def get_headerdb_class(cls) -> Type[HeaderDatabaseAPI]:
...
#
# Canonical Chain API
#
@abstractmethod
def get_canonical_block_header_by_number(self, block_number: BlockNumber) -> BlockHeaderAPI:
...
@abstractmethod
def get_canonical_head(self) -> BlockHeaderAPI:
...
#
# Header API
#
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
...
@abstractmethod
def header_exists(self, block_hash: Hash32) -> bool:
...
@abstractmethod
def import_header(self,
header: BlockHeaderAPI,
) -> Tuple[Tuple[BlockHeaderAPI, ...], Tuple[BlockHeaderAPI, ...]]:
...
class ChainAPI(ConfigurableAPI):
vm_configuration: Tuple[Tuple[int, Type[VirtualMachineAPI]], ...]
chain_id: int
chaindb: ChainDatabaseAPI
#
# Helpers
#
@classmethod
@abstractmethod
def get_chaindb_class(cls) -> Type[ChainDatabaseAPI]:
...
#
# Chain API
#
@classmethod
@abstractmethod
def from_genesis(cls,
base_db: AtomicDatabaseAPI,
genesis_params: Dict[str, HeaderParams],
genesis_state: AccountState=None) -> 'ChainAPI':
...
@classmethod
@abstractmethod
def from_genesis_header(cls,
base_db: AtomicDatabaseAPI,
genesis_header: BlockHeaderAPI) -> 'ChainAPI':
...
#
# VM API
#
@classmethod
def get_vm_class(cls, header: BlockHeaderAPI) -> Type[VirtualMachineAPI]:
...
@abstractmethod
def get_vm(self, header: BlockHeaderAPI = None) -> VirtualMachineAPI:
...
@classmethod
def get_vm_class_for_block_number(cls, block_number: BlockNumber) -> Type[VirtualMachineAPI]:
...
#
# Header API
#
@abstractmethod
def create_header_from_parent(self,
parent_header: BlockHeaderAPI,
**header_params: HeaderParams) -> BlockHeaderAPI:
...
@abstractmethod
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
...
@abstractmethod
def get_canonical_head(self) -> BlockHeaderAPI:
...
@abstractmethod
def get_score(self, block_hash: Hash32) -> int:
...
#
# Block API
#
@abstractmethod
def get_ancestors(self, limit: int, header: BlockHeaderAPI) -> Tuple[BlockAPI, ...]:
...
@abstractmethod
def get_block(self) -> BlockAPI:
...
@abstractmethod
def get_block_by_hash(self, block_hash: Hash32) -> BlockAPI:
...
@abstractmethod
def get_block_by_header(self, block_header: BlockHeaderAPI) -> BlockAPI:
...
@abstractmethod
def get_canonical_block_by_number(self, block_number: BlockNumber) -> BlockAPI:
...
@abstractmethod
def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32:
...
@abstractmethod
def build_block_with_transactions(
self,
transactions: Tuple[SignedTransactionAPI, ...],
parent_header: BlockHeaderAPI = None
) -> Tuple[BlockAPI, Tuple[ReceiptAPI, ...], Tuple[ComputationAPI, ...]]:
...
#
# Transaction API
#
@abstractmethod
def create_transaction(self, *args: Any, **kwargs: Any) -> SignedTransactionAPI:
...
@abstractmethod
def create_unsigned_transaction(cls,
*,
nonce: int,
gas_price: int,
gas: int,
to: Address,
value: int,
data: bytes) -> UnsignedTransactionAPI:
...
@abstractmethod
def get_canonical_transaction(self, transaction_hash: Hash32) -> SignedTransactionAPI:
...
@abstractmethod
def get_transaction_receipt(self, transaction_hash: Hash32) -> ReceiptAPI:
...
#
# Execution API
#
@abstractmethod
def get_transaction_result(
self,
transaction: SignedTransactionAPI,
at_header: BlockHeaderAPI) -> bytes:
...
@abstractmethod
def estimate_gas(
self,
transaction: SignedTransactionAPI,
at_header: BlockHeaderAPI = None) -> int:
...
@abstractmethod
def import_block(self,
block: BlockAPI,
perform_validation: bool=True,
) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
...
#
# Validation API
#
@abstractmethod
def validate_receipt(self, receipt: ReceiptAPI, at_header: BlockHeaderAPI) -> None:
...
@abstractmethod
def validate_block(self, block: BlockAPI) -> None:
...
@abstractmethod
def validate_seal(self, header: BlockHeaderAPI) -> None:
...
@abstractmethod
def validate_gaslimit(self, header: BlockHeaderAPI) -> None:
...
@abstractmethod
def validate_uncles(self, block: BlockAPI) -> None:
...
@classmethod
@abstractmethod
def validate_chain(
cls,
root: BlockHeaderAPI,
descendants: Tuple[BlockHeaderAPI, ...],
seal_check_random_sample_rate: int = 1) -> None:
...
class MiningChainAPI(ChainAPI):
header: BlockHeaderAPI
@abstractmethod
def __init__(self, base_db: AtomicDatabaseAPI, header: BlockHeaderAPI = None) -> None:
...
@abstractmethod
def apply_transaction(self,
transaction: SignedTransactionAPI
) -> Tuple[BlockAPI, ReceiptAPI, ComputationAPI]:
...
@abstractmethod
def import_block(self,
block: BlockAPI,
perform_validation: bool=True
) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
...
@abstractmethod
def mine_block(self, *args: Any, **kwargs: Any) -> BlockAPI:
...
def get_vm(self, at_header: BlockHeaderAPI = None) -> VirtualMachineAPI:
...
| true | true |
f737d72abbe91157bf29e15c1466404568a312f2 | 1,415 | py | Python | discord/__init__.py | multiii/nextcord | f59737da889c8e15f829f66eba9d6203f18036c2 | [
"MIT"
] | 1 | 2022-03-25T14:10:51.000Z | 2022-03-25T14:10:51.000Z | discord/__init__.py | multiii/nextcord | f59737da889c8e15f829f66eba9d6203f18036c2 | [
"MIT"
] | null | null | null | discord/__init__.py | multiii/nextcord | f59737da889c8e15f829f66eba9d6203f18036c2 | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2021-present tag-epic
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
Module to allow for backwards compatibility for existing code and extensions
"""
from nextcord import *
__title__ = 'nextcord'
__author__ = 'tag-epic & Rapptz'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-present Rapptz & tag-epic'
__version__ = '2.0.0a3'
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
| 44.21875 | 76 | 0.792933 |
from nextcord import *
__title__ = 'nextcord'
__author__ = 'tag-epic & Rapptz'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-present Rapptz & tag-epic'
__version__ = '2.0.0a3'
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
| true | true |
f737d7de8ac8142109ef6afc23040de2431836b6 | 3,038 | py | Python | netdispatch/test/test_agraph.py | GiulioRossetti/netdispatch | abba2c8ac0cf11ab0e0c001ab36d0a065554167c | [
"BSD-2-Clause"
] | null | null | null | netdispatch/test/test_agraph.py | GiulioRossetti/netdispatch | abba2c8ac0cf11ab0e0c001ab36d0a065554167c | [
"BSD-2-Clause"
] | 1 | 2020-05-12T10:17:32.000Z | 2020-05-12T10:29:03.000Z | netdispatch/test/test_agraph.py | GiulioRossetti/netdispatch | abba2c8ac0cf11ab0e0c001ab36d0a065554167c | [
"BSD-2-Clause"
] | 1 | 2021-05-05T13:44:09.000Z | 2021-05-05T13:44:09.000Z | __author__ = 'Giulio Rossetti'
__license__ = "BSD-Clause-2"
__email__ = "giulio.rossetti@gmail.com"
import unittest
import networkx as nx
try:
import igraph as ig
except ModuleNotFoundError:
ig = None
from netdispatch import AGraph
def from_nx_to_igraph(g, directed=False):
"""
:param g:
:param directed:
:return:
"""
if ig is not None:
gi = ig.Graph(directed=directed)
gi.add_vertices(list(g.nodes()))
gi.add_edges(list(g.edges()))
return gi
class AGTest(unittest.TestCase):
def test_number_of_edges(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.number_of_edges()
g = from_nx_to_igraph(g)
ag = AGraph(g)
n2 = ag.number_of_edges()
self.assertEqual(n1, n2)
def test_neighbors(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.neighbors(1)
g = from_nx_to_igraph(g)
ag = AGraph(g)
n2 = ag.neighbors(1)
self.assertListEqual(n1, n2)
def test_has_edge(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.has_edge(0, 1)
g = from_nx_to_igraph(g)
ag = AGraph(g)
n2 = ag.has_edge(0, 1)
self.assertEqual(n1, n2)
def test_predecessors(self):
g = nx.karate_club_graph()
g1 = nx.to_directed(g)
ag = AGraph(g1)
n1 = ag.predecessors(1)
g = from_nx_to_igraph(g1, directed=True)
ag = AGraph(g)
n2 = ag.predecessors(1)
self.assertListEqual(n1, n2)
def test_successors(self):
g = nx.karate_club_graph()
g1 = nx.to_directed(g)
ag = AGraph(g1)
n1 = ag.successors(1)
g = from_nx_to_igraph(g1, directed=True)
ag = AGraph(g)
n2 = ag.successors(1)
self.assertListEqual(n1, n2)
def test_add_edges(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.neighbors(1)
ag.add_edges(1, [5, 6])
n2 = ag.neighbors(1)
n1.extend([5, 6])
self.assertListEqual(n2, n1)
g = nx.karate_club_graph()
g = from_nx_to_igraph(g)
ag = AGraph(g)
n3 = ag.neighbors(1)
ag.add_edges(1, [5, 6])
n4 = ag.neighbors(1)
n3.extend([5, 6])
self.assertListEqual(sorted(n3), sorted(n4))
self.assertListEqual(sorted(n2), sorted(n4))
def test_remove_edges(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.neighbors(1)
ag.remove_edges(1, [0, 2])
n2 = ag.neighbors(1)
n1 = [n for n in n1 if n not in [0, 2]]
self.assertListEqual(n2, n1)
g = nx.karate_club_graph()
g = from_nx_to_igraph(g)
ag = AGraph(g)
n3 = ag.neighbors(1)
ag.remove_edges(1, [0, 2])
n4 = ag.neighbors(1)
n3 = [n for n in n3 if n not in [0, 2]]
self.assertListEqual(sorted(n3), sorted(n4))
self.assertListEqual(sorted(n2), sorted(n4)) | 24.901639 | 52 | 0.561883 | __author__ = 'Giulio Rossetti'
__license__ = "BSD-Clause-2"
__email__ = "giulio.rossetti@gmail.com"
import unittest
import networkx as nx
try:
import igraph as ig
except ModuleNotFoundError:
ig = None
from netdispatch import AGraph
def from_nx_to_igraph(g, directed=False):
if ig is not None:
gi = ig.Graph(directed=directed)
gi.add_vertices(list(g.nodes()))
gi.add_edges(list(g.edges()))
return gi
class AGTest(unittest.TestCase):
def test_number_of_edges(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.number_of_edges()
g = from_nx_to_igraph(g)
ag = AGraph(g)
n2 = ag.number_of_edges()
self.assertEqual(n1, n2)
def test_neighbors(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.neighbors(1)
g = from_nx_to_igraph(g)
ag = AGraph(g)
n2 = ag.neighbors(1)
self.assertListEqual(n1, n2)
def test_has_edge(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.has_edge(0, 1)
g = from_nx_to_igraph(g)
ag = AGraph(g)
n2 = ag.has_edge(0, 1)
self.assertEqual(n1, n2)
def test_predecessors(self):
g = nx.karate_club_graph()
g1 = nx.to_directed(g)
ag = AGraph(g1)
n1 = ag.predecessors(1)
g = from_nx_to_igraph(g1, directed=True)
ag = AGraph(g)
n2 = ag.predecessors(1)
self.assertListEqual(n1, n2)
def test_successors(self):
g = nx.karate_club_graph()
g1 = nx.to_directed(g)
ag = AGraph(g1)
n1 = ag.successors(1)
g = from_nx_to_igraph(g1, directed=True)
ag = AGraph(g)
n2 = ag.successors(1)
self.assertListEqual(n1, n2)
def test_add_edges(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.neighbors(1)
ag.add_edges(1, [5, 6])
n2 = ag.neighbors(1)
n1.extend([5, 6])
self.assertListEqual(n2, n1)
g = nx.karate_club_graph()
g = from_nx_to_igraph(g)
ag = AGraph(g)
n3 = ag.neighbors(1)
ag.add_edges(1, [5, 6])
n4 = ag.neighbors(1)
n3.extend([5, 6])
self.assertListEqual(sorted(n3), sorted(n4))
self.assertListEqual(sorted(n2), sorted(n4))
def test_remove_edges(self):
g = nx.karate_club_graph()
ag = AGraph(g)
n1 = ag.neighbors(1)
ag.remove_edges(1, [0, 2])
n2 = ag.neighbors(1)
n1 = [n for n in n1 if n not in [0, 2]]
self.assertListEqual(n2, n1)
g = nx.karate_club_graph()
g = from_nx_to_igraph(g)
ag = AGraph(g)
n3 = ag.neighbors(1)
ag.remove_edges(1, [0, 2])
n4 = ag.neighbors(1)
n3 = [n for n in n3 if n not in [0, 2]]
self.assertListEqual(sorted(n3), sorted(n4))
self.assertListEqual(sorted(n2), sorted(n4)) | true | true |
f737d870bb634c0df0c37ad6afb21ef085933639 | 1,439 | py | Python | src/python/pants/core/util_rules/distdir.py | rcuza/pants | 0429258b181986eed856ae45af93b776727774a0 | [
"Apache-2.0"
] | 1,806 | 2015-01-05T07:31:00.000Z | 2022-03-31T11:35:41.000Z | src/python/pants/core/util_rules/distdir.py | rcuza/pants | 0429258b181986eed856ae45af93b776727774a0 | [
"Apache-2.0"
] | 9,565 | 2015-01-02T19:01:59.000Z | 2022-03-31T23:25:16.000Z | src/python/pants/core/util_rules/distdir.py | ryanking/pants | e45b00d2eb467b599966bca262405a5d74d27bdd | [
"Apache-2.0"
] | 443 | 2015-01-06T20:17:57.000Z | 2022-03-31T05:28:17.000Z | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from pathlib import Path
from pants.base.build_root import BuildRoot
from pants.engine.rules import collect_rules, rule
from pants.option.global_options import GlobalOptions
def is_child_of(path: Path, directory: Path) -> bool:
abs_path = path if path.is_absolute() else directory.joinpath(path).resolve()
return directory == abs_path or directory in abs_path.parents
class InvalidDistDir(Exception):
pass
@dataclass(frozen=True)
class DistDir:
"""The directory to which we write distributable files."""
relpath: Path
@rule
async def get_distdir(global_options: GlobalOptions, buildroot: BuildRoot) -> DistDir:
return validate_distdir(Path(global_options.options.pants_distdir), buildroot.pathlib_path)
def validate_distdir(distdir: Path, buildroot: Path) -> DistDir:
if not is_child_of(distdir, buildroot):
raise InvalidDistDir(
f"When set to an absolute path, `--pants-distdir` must be relative to the build root."
f"You set it to {distdir}. Instead, use a relative path or an absolute path relative "
f"to the build root."
)
relpath = distdir.relative_to(buildroot) if distdir.is_absolute() else distdir
return DistDir(relpath)
def rules():
return collect_rules()
| 31.282609 | 98 | 0.739402 |
from dataclasses import dataclass
from pathlib import Path
from pants.base.build_root import BuildRoot
from pants.engine.rules import collect_rules, rule
from pants.option.global_options import GlobalOptions
def is_child_of(path: Path, directory: Path) -> bool:
abs_path = path if path.is_absolute() else directory.joinpath(path).resolve()
return directory == abs_path or directory in abs_path.parents
class InvalidDistDir(Exception):
pass
@dataclass(frozen=True)
class DistDir:
relpath: Path
@rule
async def get_distdir(global_options: GlobalOptions, buildroot: BuildRoot) -> DistDir:
return validate_distdir(Path(global_options.options.pants_distdir), buildroot.pathlib_path)
def validate_distdir(distdir: Path, buildroot: Path) -> DistDir:
if not is_child_of(distdir, buildroot):
raise InvalidDistDir(
f"When set to an absolute path, `--pants-distdir` must be relative to the build root."
f"You set it to {distdir}. Instead, use a relative path or an absolute path relative "
f"to the build root."
)
relpath = distdir.relative_to(buildroot) if distdir.is_absolute() else distdir
return DistDir(relpath)
def rules():
return collect_rules()
| true | true |
f737d8c91164f68db6e1e5eec9cb8c98f4a7a75b | 5,091 | py | Python | src/ipaddress.py | Stephixus/my-weather-indicator | 1e1f1984e08ba8d710d1af420c2e68aa9011dcb0 | [
"MIT"
] | 72 | 2016-02-12T18:18:50.000Z | 2022-02-11T12:37:53.000Z | src/ipaddress.py | Stephixus/my-weather-indicator | 1e1f1984e08ba8d710d1af420c2e68aa9011dcb0 | [
"MIT"
] | 82 | 2016-03-28T13:26:02.000Z | 2022-02-28T13:40:22.000Z | src/ipaddress.py | Stephixus/my-weather-indicator | 1e1f1984e08ba8d710d1af420c2e68aa9011dcb0 | [
"MIT"
] | 21 | 2016-02-18T05:31:26.000Z | 2022-03-03T03:15:31.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of my-weather-indicator
#
# Copyright (c) 2012 Lorenzo Carbonell Cerezo <a.k.a. atareao>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import dbus
import comun
import re
import json
from functools import partial
from collections import namedtuple
from geocodeapi import get_inv_direction
def convert(dbus_obj):
"""Converts dbus_obj from dbus type to python type.
:param dbus_obj: dbus object.
:returns: dbus_obj in python type.
"""
_isinstance = partial(isinstance, dbus_obj)
ConvertType = namedtuple('ConvertType', 'pytype dbustypes')
pyint = ConvertType(int, (dbus.Byte, dbus.Int16, dbus.Int32, dbus.Int64,
dbus.UInt16, dbus.UInt32, dbus.UInt64))
pybool = ConvertType(bool, (dbus.Boolean, ))
pyfloat = ConvertType(float, (dbus.Double, ))
pylist = ConvertType(lambda _obj: list(map(convert, dbus_obj)),
(dbus.Array, ))
pytuple = ConvertType(lambda _obj: tuple(map(convert, dbus_obj)),
(dbus.Struct, ))
types_str = (dbus.ObjectPath, dbus.Signature, dbus.String)
pystr = ConvertType(str, types_str)
pydict = ConvertType(
lambda _obj: dict(list(zip(list(map(convert, dbus_obj.keys())),
list(map(convert, dbus_obj.values()))
))
),
(dbus.Dictionary, )
)
for conv in (pyint, pybool, pyfloat, pylist, pytuple, pystr, pydict):
if any(map(_isinstance, conv.dbustypes)):
return conv.pytype(dbus_obj)
else:
return dbus_obj
def get_current_location():
latitude, longitude = get_current_location_option1()
if latitude == 0 and longitude == 0:
latitude, longitude = get_current_location_option2()
return latitude, longitude
def get_current_location_option1():
'''Gets the current location from geolocation via IP (only method
currently supported)
'''
latitude = 0
longitude = 0
bus = dbus.SessionBus()
# For now we default to the UbuntuGeoIP provider and we fall back to
# Hostip. We should probably be cleverer about provider detection, but
# this solution works for now and does not rely solely on UbuntuGeoIP,
# which means qreator can run on other distros
try:
geoclue = bus.get_object(
'org.freedesktop.Geoclue.Providers.UbuntuGeoIP',
'/org/freedesktop/Geoclue/Providers/UbuntuGeoIP')
position_info = geoclue.GetPosition(
dbus_interface='org.freedesktop.Geoclue.Position')
latitude = convert(position_info[2])
longitude = convert(position_info[3])
except dbus.exceptions.DBusException as e:
print('Error 1', e)
try:
geoclue = bus.get_object(
'org.freedesktop.Geoclue.Providers.Hostip',
'/org/freedesktop/Geoclue/Providers/Hostip')
position_info = geoclue.GetPosition(
dbus_interface='org.freedesktop.Geoclue.Position')
latitude = convert(position_info[2])
longitude = convert(position_info[3])
except dbus.exceptions.DBusException as e:
print('Error 2', e)
return latitude, longitude
def get_ip():
url = 'http://whatismyip.org'
ans = comun.read_from_url(url)
# print(ans)
return re.compile(r'(\d+\.\d+\.\d+\.\d+)').search(ans).group(1)
def get_current_location_option2():
try:
url = 'http://ip-api.com/json'
ans = json.loads(comun.read_from_url(url))
return ans['lat'], ans['lon']
except Exception as e:
print(e)
return 0, 0
def get_address_from_ip():
lat, lon = get_current_location()
ans = get_inv_direction(lat, lon)
return ans
if __name__ == "__main__":
# import requests
# r = requests.get("https://stackoverflow.com")
print(get_current_location_option2())
print('======')
print(get_current_location())
# print(get_address_from_ip())
| 35.852113 | 79 | 0.66431 |
import dbus
import comun
import re
import json
from functools import partial
from collections import namedtuple
from geocodeapi import get_inv_direction
def convert(dbus_obj):
_isinstance = partial(isinstance, dbus_obj)
ConvertType = namedtuple('ConvertType', 'pytype dbustypes')
pyint = ConvertType(int, (dbus.Byte, dbus.Int16, dbus.Int32, dbus.Int64,
dbus.UInt16, dbus.UInt32, dbus.UInt64))
pybool = ConvertType(bool, (dbus.Boolean, ))
pyfloat = ConvertType(float, (dbus.Double, ))
pylist = ConvertType(lambda _obj: list(map(convert, dbus_obj)),
(dbus.Array, ))
pytuple = ConvertType(lambda _obj: tuple(map(convert, dbus_obj)),
(dbus.Struct, ))
types_str = (dbus.ObjectPath, dbus.Signature, dbus.String)
pystr = ConvertType(str, types_str)
pydict = ConvertType(
lambda _obj: dict(list(zip(list(map(convert, dbus_obj.keys())),
list(map(convert, dbus_obj.values()))
))
),
(dbus.Dictionary, )
)
for conv in (pyint, pybool, pyfloat, pylist, pytuple, pystr, pydict):
if any(map(_isinstance, conv.dbustypes)):
return conv.pytype(dbus_obj)
else:
return dbus_obj
def get_current_location():
latitude, longitude = get_current_location_option1()
if latitude == 0 and longitude == 0:
latitude, longitude = get_current_location_option2()
return latitude, longitude
def get_current_location_option1():
latitude = 0
longitude = 0
bus = dbus.SessionBus()
try:
geoclue = bus.get_object(
'org.freedesktop.Geoclue.Providers.UbuntuGeoIP',
'/org/freedesktop/Geoclue/Providers/UbuntuGeoIP')
position_info = geoclue.GetPosition(
dbus_interface='org.freedesktop.Geoclue.Position')
latitude = convert(position_info[2])
longitude = convert(position_info[3])
except dbus.exceptions.DBusException as e:
print('Error 1', e)
try:
geoclue = bus.get_object(
'org.freedesktop.Geoclue.Providers.Hostip',
'/org/freedesktop/Geoclue/Providers/Hostip')
position_info = geoclue.GetPosition(
dbus_interface='org.freedesktop.Geoclue.Position')
latitude = convert(position_info[2])
longitude = convert(position_info[3])
except dbus.exceptions.DBusException as e:
print('Error 2', e)
return latitude, longitude
def get_ip():
url = 'http://whatismyip.org'
ans = comun.read_from_url(url)
return re.compile(r'(\d+\.\d+\.\d+\.\d+)').search(ans).group(1)
def get_current_location_option2():
try:
url = 'http://ip-api.com/json'
ans = json.loads(comun.read_from_url(url))
return ans['lat'], ans['lon']
except Exception as e:
print(e)
return 0, 0
def get_address_from_ip():
lat, lon = get_current_location()
ans = get_inv_direction(lat, lon)
return ans
if __name__ == "__main__":
print(get_current_location_option2())
print('======')
print(get_current_location())
| true | true |
f737dbad17fedcae0adcb0cc22750261edbf69cb | 411 | py | Python | gobi/urls.py | gowanpeter/gobi | 1a237f64ba79ac5878a178baf6179953d4e7a9c9 | [
"BSD-3-Clause"
] | null | null | null | gobi/urls.py | gowanpeter/gobi | 1a237f64ba79ac5878a178baf6179953d4e7a9c9 | [
"BSD-3-Clause"
] | null | null | null | gobi/urls.py | gowanpeter/gobi | 1a237f64ba79ac5878a178baf6179953d4e7a9c9 | [
"BSD-3-Clause"
] | null | null | null | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'tt.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 31.615385 | 65 | 0.705596 | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| true | true |
f737dc677b981aac049756698710b380c1714b45 | 880 | py | Python | config.py | retroxz/bilidm | aa3e2f55e3a283afa1d470901d360b637e6cc2a0 | [
"MIT"
] | 3 | 2021-01-11T10:00:08.000Z | 2022-02-18T00:37:10.000Z | config.py | retroxz/bilidm | aa3e2f55e3a283afa1d470901d360b637e6cc2a0 | [
"MIT"
] | null | null | null | config.py | retroxz/bilidm | aa3e2f55e3a283afa1d470901d360b637e6cc2a0 | [
"MIT"
] | null | null | null | import yaml
import os
# 获取当前文件路径
filePath = os.path.dirname(__file__)
# 获取当前文件的Realpath
fileNamePath = os.path.split(os.path.realpath(__file__))[0]
# 获取配置文件的路径
yamlPath = os.path.join(fileNamePath, 'config.yml')
# 加上 ,encoding='utf-8',处理配置文件中含中文出现乱码的情况
yml_read = yaml.load(open(yamlPath, 'r', encoding='utf-8').read(), Loader=yaml.FullLoader)
"""使用装饰器创建类"""
def Blive(cls):
Dict = yml_read['BLive']
for name, value in Dict.items():
setattr(cls, name, value)
return cls
def database(cls):
Dict = yml_read['database']
for name, value in Dict.items():
setattr(cls, name, value)
return cls
def api(cls):
Dict = yml_read['api']
for name, value in Dict.items():
setattr(cls, name, value)
return cls
@Blive
class BLive(object):
pass
@database
class database(object):
pass
@api
class api(object):
pass | 17.959184 | 90 | 0.663636 | import yaml
import os
filePath = os.path.dirname(__file__)
fileNamePath = os.path.split(os.path.realpath(__file__))[0]
yamlPath = os.path.join(fileNamePath, 'config.yml')
yml_read = yaml.load(open(yamlPath, 'r', encoding='utf-8').read(), Loader=yaml.FullLoader)
def Blive(cls):
Dict = yml_read['BLive']
for name, value in Dict.items():
setattr(cls, name, value)
return cls
def database(cls):
Dict = yml_read['database']
for name, value in Dict.items():
setattr(cls, name, value)
return cls
def api(cls):
Dict = yml_read['api']
for name, value in Dict.items():
setattr(cls, name, value)
return cls
@Blive
class BLive(object):
pass
@database
class database(object):
pass
@api
class api(object):
pass | true | true |
f737dcbee86581be3c24f9846eb581aeddc5166d | 1,002 | py | Python | cirq/value/type_alias.py | lilies/Cirq | 519b8b70ba4d2d92d1c034c398161ebdbd23e2e7 | [
"Apache-2.0"
] | 1 | 2020-04-06T17:06:10.000Z | 2020-04-06T17:06:10.000Z | cirq/value/type_alias.py | lilies/Cirq | 519b8b70ba4d2d92d1c034c398161ebdbd23e2e7 | [
"Apache-2.0"
] | null | null | null | cirq/value/type_alias.py | lilies/Cirq | 519b8b70ba4d2d92d1c034c398161ebdbd23e2e7 | [
"Apache-2.0"
] | 1 | 2020-04-14T15:29:29.000Z | 2020-04-14T15:29:29.000Z | # Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
import sympy
from cirq._doc import document
"""Supply aliases for commonly used types.
"""
TParamKey = Union[str, sympy.Basic]
document(
TParamKey, # type: ignore
"""A parameter that a parameter resolver may map to a value.""")
TParamVal = Union[float, sympy.Basic]
document(
TParamVal, # type: ignore
"""A value that a parameter resolver may return for a parameter.""")
| 32.322581 | 74 | 0.742515 |
from typing import Union
import sympy
from cirq._doc import document
TParamKey = Union[str, sympy.Basic]
document(
TParamKey,
"""A parameter that a parameter resolver may map to a value.""")
TParamVal = Union[float, sympy.Basic]
document(
TParamVal,
"""A value that a parameter resolver may return for a parameter.""")
| true | true |
f737dce0a1abddb2fb4b7f3a12d1d24afb756f12 | 16,223 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/sensu/sensu_go/tests/unit/plugins/module_utils/test_utils.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/sensu/sensu_go/tests/unit/plugins/module_utils/test_utils.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/sensu/sensu_go/tests/unit/plugins/module_utils/test_utils.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright: (c) 2019, XLAB Steampunk <steampunk@xlab.si>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import pytest
from ansible_collections.sensu.sensu_go.plugins.module_utils import (
errors, http, utils,
)
pytestmark = pytest.mark.skipif(
sys.version_info < (2, 7), reason="requires python2.7 or higher"
)
class TestSync:
def test_absent_no_current_object(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = utils.sync("absent", client, "/path", {}, False)
assert changed is False
assert object is None
def test_absent_no_current_object_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = utils.sync("absent", client, "/path", {}, True)
assert changed is False
assert object is None
def test_absent_current_object_present(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{}')
client.delete.return_value = http.Response(204, "")
changed, object = utils.sync("absent", client, "/path", {}, False)
assert changed is True
assert object is None
client.delete.assert_called_with("/path")
def test_absent_current_object_present_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{}')
client.delete.return_value = http.Response(204, "")
changed, object = utils.sync("absent", client, "/path", {}, True)
assert changed is True
assert object is None
client.delete.assert_not_called()
def test_present_no_current_object(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(404, ""),
http.Response(200, '{"new": "data"}'),
)
client.put.return_value = http.Response(201, "")
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, False,
)
assert changed is True
assert {"new": "data"} == object
client.put.assert_called_once_with("/path", {"my": "data"})
def test_present_no_current_object_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, True,
)
assert changed is True
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_differ(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(200, '{"current": "data"}'),
http.Response(200, '{"new": "data"}'),
)
client.put.return_value = http.Response(201, "")
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, False,
)
assert changed is True
assert {"new": "data"} == object
client.put.assert_called_once_with("/path", {"my": "data"})
def test_present_current_object_differ_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"current": "data"}')
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, True,
)
assert changed is True
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_does_not_differ(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"my": "data"}')
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, False,
)
assert changed is False
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_does_not_differ_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"my": "data"}')
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, True,
)
assert changed is False
assert {"my": "data"} == object
client.put.assert_not_called()
class TestSyncV1:
def test_parameter_passthrough(self, mocker):
sync_mock = mocker.patch.object(utils, "sync")
sync_mock.return_value = (True, {
"metadata": {"name": "test", "namespace": "space"},
"spec": {"key": "value"},
})
changed, object = utils.sync_v1("absent", "c", "/path", {}, False)
assert changed is True
assert {
"metadata": {"name": "test", "namespace": "space"},
"key": "value",
}
class TestDoDiffer:
def test_extra_keys_in_current_do_not_matter(self):
assert utils.do_differ({"a": "b", "c": 3}, {"a": "b"}) is False
def test_detect_different_values(self):
assert utils.do_differ({"a": "b"}, {"a": "c"}) is True
def test_detect_missing_keys_in_current(self):
assert utils.do_differ({"a": "b"}, {"c": "d"}) is True
def test_desired_none_values_are_ignored(self):
assert utils.do_differ({"a": "b"}, {"c": None}) is False
def test_metadata_ignores_created_by(self):
assert utils.do_differ(
dict(metadata=dict(a=1, created_by=2)),
dict(metadata=dict(a=1)),
) is False
def test_metadata_detects_change(self):
assert utils.do_differ(
dict(metadata=dict(a=1)), dict(metadata=dict(a=2)),
) is True
def test_metadata_detects_change_in_presence_of_created_by(self):
assert utils.do_differ(
dict(metadata=dict(a=1, created_by=2)),
dict(metadata=dict(a=2)),
) is True
def test_ignore_keys_do_not_affect_the_outcome(self):
assert utils.do_differ(dict(a=1), dict(a=2), "a") is False
def test_ignore_keys_do_not_mask_other_differences(self):
assert utils.do_differ(dict(a=1, b=1), dict(a=2, b=2), "a") is True
class TestDoDifferV1:
def test_extra_keys_in_current_do_not_matter(self):
assert utils.do_differ_v1(
{"spec": {"a": "b", "c": 3}}, {"spec": {"a": "b"}},
) is False
def test_detect_different_values(self):
assert utils.do_differ_v1(
{"spec": {"a": "b"}}, {"spec": {"a": "c"}},
) is True
def test_detect_missing_keys_in_current(self):
assert utils.do_differ_v1(
{"spec": {"a": "b"}}, {"spec": {"c": "d"}},
) is True
def test_desired_none_values_are_ignored(self):
assert utils.do_differ_v1(
{"spec": {"a": "b"}}, {"spec": {"c": None}},
) is False
def test_metadata_ignores_created_by(self):
assert utils.do_differ_v1(
{"metadata": {"a": 1, "created_by": 2}},
{"metadata": {"a": 1}},
) is False
def test_metadata_detects_change(self):
assert utils.do_differ_v1(
{"metadata": {"a": 1}}, {"metadata": {"a": 2}},
) is True
def test_metadata_detects_change_in_presence_of_created_by(self):
assert utils.do_differ_v1(
{"metadata": {"a": 1, "created_by": 2}},
{"metadata": {"a": 2}},
) is True
def test_ignore_keys_do_not_affect_the_outcome(self):
assert utils.do_differ_v1(
{"spec": {"a": 1}}, {"spec": {"a": 2}}, "a",
) is False
def test_ignore_keys_do_not_mask_other_differences(self):
assert utils.do_differ_v1(
{"spec": {"a": 1, "b": 1}}, {"spec": {"a": 2, "b": 2}}, "a",
) is True
class TestGet:
@pytest.mark.parametrize(
"status", [100, 201, 202, 203, 204, 400, 401, 403, 500, 501],
)
def test_abort_on_invalid_status(self, mocker, status):
client = mocker.Mock()
client.get.return_value = http.Response(status, "")
with pytest.raises(errors.SyncError, match=str(status)):
utils.get(client, "/get")
client.get.assert_called_once_with("/get")
def test_abort_on_invalid_json(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, "")
with pytest.raises(errors.SyncError, match="JSON"):
utils.get(client, "/get")
client.get.assert_called_once_with("/get")
def test_ignore_invalid_json_on_404(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
object = utils.get(client, "/get")
assert object is None
client.get.assert_called_once_with("/get")
def test_valid_json(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"get": "data"}')
object = utils.get(client, "/get")
assert {"get": "data"} == object
client.get.assert_called_once_with("/get")
class TestDelete:
@pytest.mark.parametrize(
"status", [100, 200, 201, 202, 203, 400, 401, 403, 500, 501],
)
def test_abort_on_invalid_status(self, mocker, status):
client = mocker.Mock()
client.delete.return_value = http.Response(status, "")
with pytest.raises(errors.SyncError, match=str(status)):
utils.delete(client, "/delete")
client.delete.assert_called_once_with("/delete")
def test_valid_delete(self, mocker):
client = mocker.Mock()
client.delete.return_value = http.Response(204, "{}")
object = utils.delete(client, "/delete")
assert object is None
client.delete.assert_called_once_with("/delete")
class TestPut:
@pytest.mark.parametrize(
"status", [100, 202, 203, 204, 400, 401, 403, 500, 501],
)
def test_abort_on_invalid_status(self, mocker, status):
client = mocker.Mock()
client.put.return_value = http.Response(status, "")
with pytest.raises(errors.SyncError, match=str(status)):
utils.put(client, "/put", {"payload": "data"})
client.put.assert_called_once_with("/put", {"payload": "data"})
@pytest.mark.parametrize("status", [200, 201])
def test_valid_put(self, mocker, status):
client = mocker.Mock()
client.put.return_value = http.Response(status, '{"put": "data"}')
object = utils.put(client, "/put", {"payload": "data"})
assert object is None
client.put.assert_called_once_with("/put", {"payload": "data"})
class TestDictToSingleItemDicts:
def test_conversion(self):
result = utils.dict_to_single_item_dicts({"a": 0, 1: "b"})
assert 2 == len(result)
for item in ({"a": 0}, {1: "b"}):
assert item in result
class TestSingleItemDictsToDict:
def test_conversion(self):
assert dict(a=3, b=4, c=5) == utils.single_item_dicts_to_dict(
[dict(a=3), dict(b=4), dict(c=5)]
)
class TestDictToKeyValueString:
def test_conversion(self):
result = utils.dict_to_key_value_strings({"a": 0, 1: "b"})
assert set(("a=0", "1=b")) == set(result)
class TestBuildUrlPath:
@pytest.mark.parametrize("parts,expectation", [
((), "/"),
((None, None), "/"),
((None, "a", "b", None, None, "c"), "/a/b/c"),
(("get/rid of+stuff",), "/get%2Frid%20of%2Bstuff"),
(("/", " ", "a"), "/%2F/%20/a"),
])
def test_build_url_path_no_namespace(self, parts, expectation):
path = "/api/enterprise/store/v1" + expectation
assert path == utils.build_url_path(
"enterprise/store", "v1", None, *parts
)
@pytest.mark.parametrize("parts,expectation", [
((), "/"),
((None, None), "/"),
((None, "a", "b", None, None, "c"), "/a/b/c"),
(("get/rid of+stuff",), "/get%2Frid%20of%2Bstuff"),
(("/", " ", "a"), "/%2F/%20/a"),
])
def test_build_url_path_with_namespace(self, parts, expectation):
path = "/api/core/v2/namespaces/default" + expectation
assert path == utils.build_url_path(
"core", "v2", "default", *parts
)
class TestBuildCoreV2Path:
def test_build_path_no_namespace(self):
assert utils.build_core_v2_path(None, "a").startswith(
"/api/core/v2/",
)
def test_build_url_with_namespace(self):
assert utils.build_core_v2_path("default", "a").startswith(
"/api/core/v2/namespaces/default/",
)
class TestPrepareResultList:
@pytest.mark.parametrize("input,output", [
(None, []), # this is mosti likely result of a 404 status
("a", ["a"]),
([], []),
([1, 2, 3], [1, 2, 3]),
([None], [None]), # we leave lists intact, even if they contain None
])
def test_list_construction(self, input, output):
assert output == utils.prepare_result_list(input)
class TestConvertV1ToV2Response:
def test_none_passes_through(self):
assert utils.convert_v1_to_v2_response(None) is None
def test_spec_only_if_metadata_is_missing(self):
assert utils.convert_v1_to_v2_response(dict(
spec=dict(a=1, b=2),
)) == dict(a=1, b=2)
def test_add_metadata_from_toplevel(self):
assert utils.convert_v1_to_v2_response(dict(
metadata=dict(name="sample"),
spec=dict(a=1, b=2),
)) == dict(metadata=dict(name="sample"), a=1, b=2)
class TestDoSecretsDiffer:
@pytest.mark.parametrize("current,desired", [
( # All empty
[], [],
),
( # All is equal
[dict(name="a", secret="1"), dict(name="b", secret="2")],
[dict(name="a", secret="1"), dict(name="b", secret="2")],
),
( # Different order
[dict(name="a", secret="1"), dict(name="b", secret="2")],
[dict(name="b", secret="2"), dict(name="a", secret="1")],
),
])
def test_no_difference(self, current, desired):
assert utils.do_secrets_differ(
dict(secrets=current), dict(secrets=desired),
) is False
@pytest.mark.parametrize("current,desired", [
( # Different source for variable b
[dict(name="b", secret="2")], [dict(name="b", secret="3")],
),
( # Different name
[dict(name="a", secret="1")], [dict(name="b", secret="1")],
),
( # Different number of secrets
[dict(name="a", secret="1"), dict(name="b", secret="2")],
[dict(name="a", secret="1")],
),
])
def test_difference(self, current, desired):
assert utils.do_secrets_differ(
dict(secrets=current), dict(secrets=desired),
) is True
@pytest.mark.parametrize("secrets,diff", [
# Missing secrets and empty list are the same
([], False),
# None secrets are treated as empy list of secrets
(None, False),
# If anything is set, we have difference
([dict(name="n", secret="s")], True),
])
def test_missing_secrets(self, secrets, diff):
assert utils.do_secrets_differ(dict(), dict(secrets=secrets)) is diff
assert utils.do_secrets_differ(dict(secrets=secrets), dict()) is diff
class TestDeprecate:
def test_ansible_lt_2_9_10(self, mocker):
module = mocker.MagicMock()
module.deprecate.side_effect = (
TypeError("Simulating Ansible 2.9.9 and older"),
None, # Success, since no exception is raised
)
utils.deprecate(module, "Test msg", "3.2.1")
assert module.deprecate.call_count == 2
assert module.deprecate.called_once_with("Test msg", version="3.2.1")
def test_ansible_ge_2_9_10(self, mocker):
module = mocker.MagicMock()
utils.deprecate(module, "Test msg", "3.2.1")
assert module.deprecate.called_once_with(
"Test msg", version="3.2.1", collection_name="sensu.sensu_go",
)
| 32.973577 | 92 | 0.588177 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import pytest
from ansible_collections.sensu.sensu_go.plugins.module_utils import (
errors, http, utils,
)
pytestmark = pytest.mark.skipif(
sys.version_info < (2, 7), reason="requires python2.7 or higher"
)
class TestSync:
def test_absent_no_current_object(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = utils.sync("absent", client, "/path", {}, False)
assert changed is False
assert object is None
def test_absent_no_current_object_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = utils.sync("absent", client, "/path", {}, True)
assert changed is False
assert object is None
def test_absent_current_object_present(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{}')
client.delete.return_value = http.Response(204, "")
changed, object = utils.sync("absent", client, "/path", {}, False)
assert changed is True
assert object is None
client.delete.assert_called_with("/path")
def test_absent_current_object_present_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{}')
client.delete.return_value = http.Response(204, "")
changed, object = utils.sync("absent", client, "/path", {}, True)
assert changed is True
assert object is None
client.delete.assert_not_called()
def test_present_no_current_object(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(404, ""),
http.Response(200, '{"new": "data"}'),
)
client.put.return_value = http.Response(201, "")
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, False,
)
assert changed is True
assert {"new": "data"} == object
client.put.assert_called_once_with("/path", {"my": "data"})
def test_present_no_current_object_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, True,
)
assert changed is True
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_differ(self, mocker):
client = mocker.Mock()
client.get.side_effect = (
http.Response(200, '{"current": "data"}'),
http.Response(200, '{"new": "data"}'),
)
client.put.return_value = http.Response(201, "")
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, False,
)
assert changed is True
assert {"new": "data"} == object
client.put.assert_called_once_with("/path", {"my": "data"})
def test_present_current_object_differ_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"current": "data"}')
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, True,
)
assert changed is True
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_does_not_differ(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"my": "data"}')
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, False,
)
assert changed is False
assert {"my": "data"} == object
client.put.assert_not_called()
def test_present_current_object_does_not_differ_check(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"my": "data"}')
changed, object = utils.sync(
"present", client, "/path", {"my": "data"}, True,
)
assert changed is False
assert {"my": "data"} == object
client.put.assert_not_called()
class TestSyncV1:
def test_parameter_passthrough(self, mocker):
sync_mock = mocker.patch.object(utils, "sync")
sync_mock.return_value = (True, {
"metadata": {"name": "test", "namespace": "space"},
"spec": {"key": "value"},
})
changed, object = utils.sync_v1("absent", "c", "/path", {}, False)
assert changed is True
assert {
"metadata": {"name": "test", "namespace": "space"},
"key": "value",
}
class TestDoDiffer:
def test_extra_keys_in_current_do_not_matter(self):
assert utils.do_differ({"a": "b", "c": 3}, {"a": "b"}) is False
def test_detect_different_values(self):
assert utils.do_differ({"a": "b"}, {"a": "c"}) is True
def test_detect_missing_keys_in_current(self):
assert utils.do_differ({"a": "b"}, {"c": "d"}) is True
def test_desired_none_values_are_ignored(self):
assert utils.do_differ({"a": "b"}, {"c": None}) is False
def test_metadata_ignores_created_by(self):
assert utils.do_differ(
dict(metadata=dict(a=1, created_by=2)),
dict(metadata=dict(a=1)),
) is False
def test_metadata_detects_change(self):
assert utils.do_differ(
dict(metadata=dict(a=1)), dict(metadata=dict(a=2)),
) is True
def test_metadata_detects_change_in_presence_of_created_by(self):
assert utils.do_differ(
dict(metadata=dict(a=1, created_by=2)),
dict(metadata=dict(a=2)),
) is True
def test_ignore_keys_do_not_affect_the_outcome(self):
assert utils.do_differ(dict(a=1), dict(a=2), "a") is False
def test_ignore_keys_do_not_mask_other_differences(self):
assert utils.do_differ(dict(a=1, b=1), dict(a=2, b=2), "a") is True
class TestDoDifferV1:
def test_extra_keys_in_current_do_not_matter(self):
assert utils.do_differ_v1(
{"spec": {"a": "b", "c": 3}}, {"spec": {"a": "b"}},
) is False
def test_detect_different_values(self):
assert utils.do_differ_v1(
{"spec": {"a": "b"}}, {"spec": {"a": "c"}},
) is True
def test_detect_missing_keys_in_current(self):
assert utils.do_differ_v1(
{"spec": {"a": "b"}}, {"spec": {"c": "d"}},
) is True
def test_desired_none_values_are_ignored(self):
assert utils.do_differ_v1(
{"spec": {"a": "b"}}, {"spec": {"c": None}},
) is False
def test_metadata_ignores_created_by(self):
assert utils.do_differ_v1(
{"metadata": {"a": 1, "created_by": 2}},
{"metadata": {"a": 1}},
) is False
def test_metadata_detects_change(self):
assert utils.do_differ_v1(
{"metadata": {"a": 1}}, {"metadata": {"a": 2}},
) is True
def test_metadata_detects_change_in_presence_of_created_by(self):
assert utils.do_differ_v1(
{"metadata": {"a": 1, "created_by": 2}},
{"metadata": {"a": 2}},
) is True
def test_ignore_keys_do_not_affect_the_outcome(self):
assert utils.do_differ_v1(
{"spec": {"a": 1}}, {"spec": {"a": 2}}, "a",
) is False
def test_ignore_keys_do_not_mask_other_differences(self):
assert utils.do_differ_v1(
{"spec": {"a": 1, "b": 1}}, {"spec": {"a": 2, "b": 2}}, "a",
) is True
class TestGet:
@pytest.mark.parametrize(
"status", [100, 201, 202, 203, 204, 400, 401, 403, 500, 501],
)
def test_abort_on_invalid_status(self, mocker, status):
client = mocker.Mock()
client.get.return_value = http.Response(status, "")
with pytest.raises(errors.SyncError, match=str(status)):
utils.get(client, "/get")
client.get.assert_called_once_with("/get")
def test_abort_on_invalid_json(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, "")
with pytest.raises(errors.SyncError, match="JSON"):
utils.get(client, "/get")
client.get.assert_called_once_with("/get")
def test_ignore_invalid_json_on_404(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(404, "")
object = utils.get(client, "/get")
assert object is None
client.get.assert_called_once_with("/get")
def test_valid_json(self, mocker):
client = mocker.Mock()
client.get.return_value = http.Response(200, '{"get": "data"}')
object = utils.get(client, "/get")
assert {"get": "data"} == object
client.get.assert_called_once_with("/get")
class TestDelete:
@pytest.mark.parametrize(
"status", [100, 200, 201, 202, 203, 400, 401, 403, 500, 501],
)
def test_abort_on_invalid_status(self, mocker, status):
client = mocker.Mock()
client.delete.return_value = http.Response(status, "")
with pytest.raises(errors.SyncError, match=str(status)):
utils.delete(client, "/delete")
client.delete.assert_called_once_with("/delete")
def test_valid_delete(self, mocker):
client = mocker.Mock()
client.delete.return_value = http.Response(204, "{}")
object = utils.delete(client, "/delete")
assert object is None
client.delete.assert_called_once_with("/delete")
class TestPut:
@pytest.mark.parametrize(
"status", [100, 202, 203, 204, 400, 401, 403, 500, 501],
)
def test_abort_on_invalid_status(self, mocker, status):
client = mocker.Mock()
client.put.return_value = http.Response(status, "")
with pytest.raises(errors.SyncError, match=str(status)):
utils.put(client, "/put", {"payload": "data"})
client.put.assert_called_once_with("/put", {"payload": "data"})
@pytest.mark.parametrize("status", [200, 201])
def test_valid_put(self, mocker, status):
client = mocker.Mock()
client.put.return_value = http.Response(status, '{"put": "data"}')
object = utils.put(client, "/put", {"payload": "data"})
assert object is None
client.put.assert_called_once_with("/put", {"payload": "data"})
class TestDictToSingleItemDicts:
def test_conversion(self):
result = utils.dict_to_single_item_dicts({"a": 0, 1: "b"})
assert 2 == len(result)
for item in ({"a": 0}, {1: "b"}):
assert item in result
class TestSingleItemDictsToDict:
def test_conversion(self):
assert dict(a=3, b=4, c=5) == utils.single_item_dicts_to_dict(
[dict(a=3), dict(b=4), dict(c=5)]
)
class TestDictToKeyValueString:
def test_conversion(self):
result = utils.dict_to_key_value_strings({"a": 0, 1: "b"})
assert set(("a=0", "1=b")) == set(result)
class TestBuildUrlPath:
@pytest.mark.parametrize("parts,expectation", [
((), "/"),
((None, None), "/"),
((None, "a", "b", None, None, "c"), "/a/b/c"),
(("get/rid of+stuff",), "/get%2Frid%20of%2Bstuff"),
(("/", " ", "a"), "/%2F/%20/a"),
])
def test_build_url_path_no_namespace(self, parts, expectation):
path = "/api/enterprise/store/v1" + expectation
assert path == utils.build_url_path(
"enterprise/store", "v1", None, *parts
)
@pytest.mark.parametrize("parts,expectation", [
((), "/"),
((None, None), "/"),
((None, "a", "b", None, None, "c"), "/a/b/c"),
(("get/rid of+stuff",), "/get%2Frid%20of%2Bstuff"),
(("/", " ", "a"), "/%2F/%20/a"),
])
def test_build_url_path_with_namespace(self, parts, expectation):
path = "/api/core/v2/namespaces/default" + expectation
assert path == utils.build_url_path(
"core", "v2", "default", *parts
)
class TestBuildCoreV2Path:
def test_build_path_no_namespace(self):
assert utils.build_core_v2_path(None, "a").startswith(
"/api/core/v2/",
)
def test_build_url_with_namespace(self):
assert utils.build_core_v2_path("default", "a").startswith(
"/api/core/v2/namespaces/default/",
)
class TestPrepareResultList:
@pytest.mark.parametrize("input,output", [
(None, []),
("a", ["a"]),
([], []),
([1, 2, 3], [1, 2, 3]),
([None], [None]),
])
def test_list_construction(self, input, output):
assert output == utils.prepare_result_list(input)
class TestConvertV1ToV2Response:
def test_none_passes_through(self):
assert utils.convert_v1_to_v2_response(None) is None
def test_spec_only_if_metadata_is_missing(self):
assert utils.convert_v1_to_v2_response(dict(
spec=dict(a=1, b=2),
)) == dict(a=1, b=2)
def test_add_metadata_from_toplevel(self):
assert utils.convert_v1_to_v2_response(dict(
metadata=dict(name="sample"),
spec=dict(a=1, b=2),
)) == dict(metadata=dict(name="sample"), a=1, b=2)
class TestDoSecretsDiffer:
@pytest.mark.parametrize("current,desired", [
(
[], [],
),
(
[dict(name="a", secret="1"), dict(name="b", secret="2")],
[dict(name="a", secret="1"), dict(name="b", secret="2")],
),
(
[dict(name="a", secret="1"), dict(name="b", secret="2")],
[dict(name="b", secret="2"), dict(name="a", secret="1")],
),
])
def test_no_difference(self, current, desired):
assert utils.do_secrets_differ(
dict(secrets=current), dict(secrets=desired),
) is False
@pytest.mark.parametrize("current,desired", [
(
[dict(name="b", secret="2")], [dict(name="b", secret="3")],
),
(
[dict(name="a", secret="1")], [dict(name="b", secret="1")],
),
(
[dict(name="a", secret="1"), dict(name="b", secret="2")],
[dict(name="a", secret="1")],
),
])
def test_difference(self, current, desired):
assert utils.do_secrets_differ(
dict(secrets=current), dict(secrets=desired),
) is True
@pytest.mark.parametrize("secrets,diff", [
([], False),
(None, False),
([dict(name="n", secret="s")], True),
])
def test_missing_secrets(self, secrets, diff):
assert utils.do_secrets_differ(dict(), dict(secrets=secrets)) is diff
assert utils.do_secrets_differ(dict(secrets=secrets), dict()) is diff
class TestDeprecate:
def test_ansible_lt_2_9_10(self, mocker):
module = mocker.MagicMock()
module.deprecate.side_effect = (
TypeError("Simulating Ansible 2.9.9 and older"),
None,
)
utils.deprecate(module, "Test msg", "3.2.1")
assert module.deprecate.call_count == 2
assert module.deprecate.called_once_with("Test msg", version="3.2.1")
def test_ansible_ge_2_9_10(self, mocker):
module = mocker.MagicMock()
utils.deprecate(module, "Test msg", "3.2.1")
assert module.deprecate.called_once_with(
"Test msg", version="3.2.1", collection_name="sensu.sensu_go",
)
| true | true |
f737dd83b98f688d966eab9144049062fdc319a7 | 3,338 | py | Python | eth/_utils/rlp.py | dbfreem/py-evm | 02a1f6f38884b1f7a89640c2095ea5b0f20687c3 | [
"MIT"
] | 1,641 | 2017-11-24T04:24:22.000Z | 2022-03-31T14:59:30.000Z | eth/_utils/rlp.py | dbfreem/py-evm | 02a1f6f38884b1f7a89640c2095ea5b0f20687c3 | [
"MIT"
] | 1,347 | 2017-11-23T10:37:36.000Z | 2022-03-20T16:31:44.000Z | eth/_utils/rlp.py | dbfreem/py-evm | 02a1f6f38884b1f7a89640c2095ea5b0f20687c3 | [
"MIT"
] | 567 | 2017-11-22T18:03:27.000Z | 2022-03-28T17:49:08.000Z | import rlp
from typing import (
Iterable,
Optional,
Tuple,
)
from eth_utils.toolz import (
curry,
)
from eth_utils import (
to_tuple,
ValidationError,
)
from eth.rlp.blocks import (
BaseBlock,
)
@to_tuple
def diff_rlp_object(left: BaseBlock,
right: BaseBlock) -> Optional[Iterable[Tuple[str, str, str]]]:
if left != right:
rlp_type = type(left)
for field_name, field_type in rlp_type._meta.fields:
left_value = getattr(left, field_name)
right_value = getattr(right, field_name)
if isinstance(field_type, type) and issubclass(field_type, rlp.Serializable):
sub_diff = diff_rlp_object(left_value, right_value)
for sub_field_name, sub_left_value, sub_right_value in sub_diff:
yield (
f"{field_name}.{sub_field_name}",
sub_left_value,
sub_right_value,
)
elif isinstance(field_type, (rlp.sedes.List, rlp.sedes.CountableList)):
if tuple(left_value) != tuple(right_value):
yield (
field_name,
left_value,
right_value,
)
elif left_value != right_value:
yield (
field_name,
left_value,
right_value,
)
else:
continue
def _humanized_diff_elements(
diff: Iterable[Tuple[str, str, str]],
obj_a_name: str,
obj_b_name: str) -> Iterable[str]:
longest_obj_name = max(len(obj_a_name), len(obj_b_name))
for field_name, a_val, b_val in diff:
if isinstance(a_val, int) and isinstance(b_val, int):
element_diff = b_val - a_val
if element_diff > 0:
element_diff_display = f" (+{element_diff})"
else:
element_diff_display = f" ({element_diff})"
else:
element_diff_display = ""
yield (
f"{field_name}:\n"
f" ({obj_a_name.ljust(longest_obj_name, ' ')}) : {a_val}\n"
f" ({obj_b_name.ljust(longest_obj_name, ' ')}) : {b_val}{element_diff_display}"
)
@curry
def validate_rlp_equal(obj_a: BaseBlock,
obj_b: BaseBlock,
obj_a_name: str = None,
obj_b_name: str = None) -> None:
if obj_a == obj_b:
return
if obj_a_name is None:
obj_a_name = obj_a.__class__.__name__ + '_a'
if obj_b_name is None:
obj_b_name = obj_b.__class__.__name__ + '_b'
diff = diff_rlp_object(obj_a, obj_b)
if len(diff) == 0:
raise TypeError(
f"{obj_a_name} ({obj_a!r}) != "
f"{obj_b_name} ({obj_b!r}) but got an empty diff"
)
err_fields = "\n - ".join(_humanized_diff_elements(diff, obj_a_name, obj_b_name))
error_message = (
f"Mismatch between {obj_a_name} and {obj_b_name} "
f"on {len(diff)} fields:\n - {err_fields}"
)
raise ValidationError(error_message)
validate_imported_block_unchanged = validate_rlp_equal(
obj_a_name="locally executed block",
obj_b_name="proposed block",
)
| 29.803571 | 94 | 0.549131 | import rlp
from typing import (
Iterable,
Optional,
Tuple,
)
from eth_utils.toolz import (
curry,
)
from eth_utils import (
to_tuple,
ValidationError,
)
from eth.rlp.blocks import (
BaseBlock,
)
@to_tuple
def diff_rlp_object(left: BaseBlock,
right: BaseBlock) -> Optional[Iterable[Tuple[str, str, str]]]:
if left != right:
rlp_type = type(left)
for field_name, field_type in rlp_type._meta.fields:
left_value = getattr(left, field_name)
right_value = getattr(right, field_name)
if isinstance(field_type, type) and issubclass(field_type, rlp.Serializable):
sub_diff = diff_rlp_object(left_value, right_value)
for sub_field_name, sub_left_value, sub_right_value in sub_diff:
yield (
f"{field_name}.{sub_field_name}",
sub_left_value,
sub_right_value,
)
elif isinstance(field_type, (rlp.sedes.List, rlp.sedes.CountableList)):
if tuple(left_value) != tuple(right_value):
yield (
field_name,
left_value,
right_value,
)
elif left_value != right_value:
yield (
field_name,
left_value,
right_value,
)
else:
continue
def _humanized_diff_elements(
diff: Iterable[Tuple[str, str, str]],
obj_a_name: str,
obj_b_name: str) -> Iterable[str]:
longest_obj_name = max(len(obj_a_name), len(obj_b_name))
for field_name, a_val, b_val in diff:
if isinstance(a_val, int) and isinstance(b_val, int):
element_diff = b_val - a_val
if element_diff > 0:
element_diff_display = f" (+{element_diff})"
else:
element_diff_display = f" ({element_diff})"
else:
element_diff_display = ""
yield (
f"{field_name}:\n"
f" ({obj_a_name.ljust(longest_obj_name, ' ')}) : {a_val}\n"
f" ({obj_b_name.ljust(longest_obj_name, ' ')}) : {b_val}{element_diff_display}"
)
@curry
def validate_rlp_equal(obj_a: BaseBlock,
obj_b: BaseBlock,
obj_a_name: str = None,
obj_b_name: str = None) -> None:
if obj_a == obj_b:
return
if obj_a_name is None:
obj_a_name = obj_a.__class__.__name__ + '_a'
if obj_b_name is None:
obj_b_name = obj_b.__class__.__name__ + '_b'
diff = diff_rlp_object(obj_a, obj_b)
if len(diff) == 0:
raise TypeError(
f"{obj_a_name} ({obj_a!r}) != "
f"{obj_b_name} ({obj_b!r}) but got an empty diff"
)
err_fields = "\n - ".join(_humanized_diff_elements(diff, obj_a_name, obj_b_name))
error_message = (
f"Mismatch between {obj_a_name} and {obj_b_name} "
f"on {len(diff)} fields:\n - {err_fields}"
)
raise ValidationError(error_message)
validate_imported_block_unchanged = validate_rlp_equal(
obj_a_name="locally executed block",
obj_b_name="proposed block",
)
| true | true |
f737df981cd0555e5579b6a739f205721d094d43 | 7,916 | py | Python | bball_sim/app.py | ian-shepherd/bball_sim | 119696eda8d1c1c96da4113c3a41659e1472ebc2 | [
"MIT"
] | null | null | null | bball_sim/app.py | ian-shepherd/bball_sim | 119696eda8d1c1c96da4113c3a41659e1472ebc2 | [
"MIT"
] | null | null | null | bball_sim/app.py | ian-shepherd/bball_sim | 119696eda8d1c1c96da4113c3a41659e1472ebc2 | [
"MIT"
] | null | null | null | # Packages
import streamlit as st
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import util
# Configure page
st.set_page_config(page_title='End of Game NBA Simulator',
page_icon='https://raw.githubusercontent.com/papagorgio23/Python101/master/newlogo.png',
layout="centered")
# Load data and convert to list of players
cols = ['Player', 'bbref_id']
players = pd.read_csv('./player_data.csv', usecols=cols)
playerList = players['Player'].tolist()
# Simulation function
def baseSimulation(n, t, diff, fouls1, fouls2, ot_prob):
"""
primary simulation to determine number of games won by each strategy
returns a dataframe of strategy, result (number of won wins), number of sims, and mean point difference
"""
# Generate empty lists
simTypeList = []
resultList = []
overtimeList = []
pointDiffList = []
# Simulation
for i in range(0, n):
# 2 pt simulation
result, overtime, pointDiff = util.runSim(2,
df1,
df2,
rbPct1,
rbPct2,
timeLeftInitial=t,
pointDiffInitial=diff,
teamFouls1Initial=fouls1,
teamFouls2Initial=fouls2,
overtimeProb=ot_prob)
simTypeList.append('2pt')
resultList.append(result)
overtimeList.append(overtime)
pointDiffList.append(pointDiff)
# 3 pt simulation
result, overtime, pointDiff = util.runSim(3,
df1,
df2,
rbPct1,
rbPct2,
timeLeftInitial=t,
pointDiffInitial=diff,
teamFouls1Initial=fouls1,
teamFouls2Initial=fouls2,
overtimeProb=ot_prob)
simTypeList.append('3pt')
resultList.append(result)
overtimeList.append(overtime)
pointDiffList.append(pointDiff)
# Output dataframe
df = pd.DataFrame(zip(simTypeList, resultList, overtimeList, pointDiffList),
columns=['Strategy', 'Result', 'Overtime', 'Point_diff'])
df = df.groupby(['Strategy'])[['Result']].sum().reset_index()
df['Sims'] = n
# Generate plot
# set plot style: grey grid in the background:
sns.set(style="darkgrid")
# set the figure size
# plt.figure(figsize=(14, 10))
fig = plt.figure(figsize=(12, 8))
# plot bars
bar1 = sns.barplot(x='Strategy', y='Sims', data=df, estimator=sum, ci=None, color='lightcoral')
bar2 = sns.barplot(x='Strategy', y='Result', data=df, color='dodgerblue')
# legend
top_bar = mpatches.Patch(color='lightcoral', label='Loss')
bottom_bar = mpatches.Patch(color='dodgerblue', label='Win')
plt.legend(bbox_to_anchor=(1,1), borderaxespad=0, frameon=False, ncol=2, handles=[bottom_bar, top_bar])
# formatting
plt.ylabel("# of Simulations")
plt.title("Result of " + str(n) + " Simulations by Strategy")
st.pyplot(fig)
# Print % of sims won
st.write(str(round(df.loc[0,'Result'] / n * 100, 1)) + '% of 2pt strategy similations won')
st.write(str(round(df.loc[1,'Result'] / n * 100, 1)) + '% of 3pt strategy similations won')
return df
# Configure page
st.title("End of NBA Game Simulator")
st.subheader(
"_Adjust the inputs in the sidebar and click apply to view the results of the simulation_"
)
# Configure sidebar
buton1 = st.sidebar.button("Run")
# game state inputs
n = st.sidebar.number_input("number of simulations", min_value=100, max_value=1000000, value=1000)
t = st.sidebar.number_input("seconds remaining", min_value=1, max_value=60, value=30)
diff = st.sidebar.number_input("point differential", min_value=-10, max_value=0, value=-3)
fouls1 = st.sidebar.number_input("fouls committed by leading team", min_value=0, max_value=10, value=5)
fouls2 = st.sidebar.number_input("fouls committed by trailing team", min_value=0, max_value=10, value=5)
ot_prob = st.sidebar.number_input("overtime win probability (%)", min_value=0, max_value=100, value=50) / 100
# trailing team players
st.sidebar.write("")
st.sidebar.write("Trailing Team")
player1 = st.sidebar.selectbox("player1", playerList, playerList.index("Kemba Walker\\walkeke02"))
player2 = st.sidebar.selectbox("player2", playerList, playerList.index("Marcus Smart\\smartma01"))
player3 = st.sidebar.selectbox("player3", playerList, playerList.index("Jaylen Brown\\brownja02"))
player4 = st.sidebar.selectbox("player4", playerList, playerList.index("Jayson Tatum\\tatumja01"))
player5 = st.sidebar.selectbox("player5", playerList, playerList.index("Grant Williams\\willigr01"))
# leading team players
st.sidebar.write("Leading Team")
player6 = st.sidebar.selectbox("player6", playerList, playerList.index("Ben Simmons\\simmobe01"))
player7 = st.sidebar.selectbox("player7", playerList, playerList.index("Seth Curry\\curryse01"))
player8 = st.sidebar.selectbox("player8", playerList, playerList.index("Danny Green\\greenda02"))
player9 = st.sidebar.selectbox("player9", playerList, playerList.index("Tobias Harris\\harrito02"))
player10 = st.sidebar.selectbox("player10", playerList, playerList.index("Joel Embiid\\embiijo01"))
# Run simulations
# if st.sidebar.button('Apply'):
if buton1:
with st.spinner("Running simulations..."):
team1 = [player1.rsplit('\\',1)[1], player2.rsplit('\\',1)[1], player3.rsplit('\\',1)[1], player4.rsplit('\\',1)[1], player5.rsplit('\\',1)[1]]
team2 = [player6.rsplit('\\',1)[1], player7.rsplit('\\',1)[1], player8.rsplit('\\',1)[1], player9.rsplit('\\',1)[1], player10.rsplit('\\',1)[1]]
df1, df2, rbPct1, rbPct2 = util.prepSim(team1, team2)
baseSimulation(n, t, diff, fouls1, fouls2, ot_prob)
about = st.expander('Simulation Info')
with about:
"""
This is an end of NBA game simulator based on player statistics for the 2020-2021 NBA season. You can select the same
player to both teams but you cannot put a player on the same team twice. There are also dummy players that act as a
representative player of that position. The simulator assumes the outcome of every possession is a made shot, missed
shot with the potential of a rebound, or intentional foul. It will not account for turnovers or blocks. The time taken
by each possession is based on a normal distribution accounting for what is in the best interest of the team. For example,
the simulation assumes the trailing team will take an average of 4 seconds but if the game is tied, that team will try
and maximize the amount of time taken so that mean is changed to the time remaining - 1.5 seconds. The shooter is also
determined by a composite rating that ranks players by number of that specific shot (free throw, 2 pt, 3 pt) taken per
game and their success rate. Players are then assigned a probability of being the selected shooter. Rebounds on the other
hand are determined by a team liklihood that compares the rebounding of the two teams to determine each team's liklihood
of successfully getting a rebound.
""" | 47.119048 | 153 | 0.615589 |
import streamlit as st
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import util
st.set_page_config(page_title='End of Game NBA Simulator',
page_icon='https://raw.githubusercontent.com/papagorgio23/Python101/master/newlogo.png',
layout="centered")
cols = ['Player', 'bbref_id']
players = pd.read_csv('./player_data.csv', usecols=cols)
playerList = players['Player'].tolist()
def baseSimulation(n, t, diff, fouls1, fouls2, ot_prob):
simTypeList = []
resultList = []
overtimeList = []
pointDiffList = []
for i in range(0, n):
result, overtime, pointDiff = util.runSim(2,
df1,
df2,
rbPct1,
rbPct2,
timeLeftInitial=t,
pointDiffInitial=diff,
teamFouls1Initial=fouls1,
teamFouls2Initial=fouls2,
overtimeProb=ot_prob)
simTypeList.append('2pt')
resultList.append(result)
overtimeList.append(overtime)
pointDiffList.append(pointDiff)
result, overtime, pointDiff = util.runSim(3,
df1,
df2,
rbPct1,
rbPct2,
timeLeftInitial=t,
pointDiffInitial=diff,
teamFouls1Initial=fouls1,
teamFouls2Initial=fouls2,
overtimeProb=ot_prob)
simTypeList.append('3pt')
resultList.append(result)
overtimeList.append(overtime)
pointDiffList.append(pointDiff)
df = pd.DataFrame(zip(simTypeList, resultList, overtimeList, pointDiffList),
columns=['Strategy', 'Result', 'Overtime', 'Point_diff'])
df = df.groupby(['Strategy'])[['Result']].sum().reset_index()
df['Sims'] = n
sns.set(style="darkgrid")
fig = plt.figure(figsize=(12, 8))
bar1 = sns.barplot(x='Strategy', y='Sims', data=df, estimator=sum, ci=None, color='lightcoral')
bar2 = sns.barplot(x='Strategy', y='Result', data=df, color='dodgerblue')
top_bar = mpatches.Patch(color='lightcoral', label='Loss')
bottom_bar = mpatches.Patch(color='dodgerblue', label='Win')
plt.legend(bbox_to_anchor=(1,1), borderaxespad=0, frameon=False, ncol=2, handles=[bottom_bar, top_bar])
plt.ylabel("# of Simulations")
plt.title("Result of " + str(n) + " Simulations by Strategy")
st.pyplot(fig)
st.write(str(round(df.loc[0,'Result'] / n * 100, 1)) + '% of 2pt strategy similations won')
st.write(str(round(df.loc[1,'Result'] / n * 100, 1)) + '% of 3pt strategy similations won')
return df
st.title("End of NBA Game Simulator")
st.subheader(
"_Adjust the inputs in the sidebar and click apply to view the results of the simulation_"
)
buton1 = st.sidebar.button("Run")
n = st.sidebar.number_input("number of simulations", min_value=100, max_value=1000000, value=1000)
t = st.sidebar.number_input("seconds remaining", min_value=1, max_value=60, value=30)
diff = st.sidebar.number_input("point differential", min_value=-10, max_value=0, value=-3)
fouls1 = st.sidebar.number_input("fouls committed by leading team", min_value=0, max_value=10, value=5)
fouls2 = st.sidebar.number_input("fouls committed by trailing team", min_value=0, max_value=10, value=5)
ot_prob = st.sidebar.number_input("overtime win probability (%)", min_value=0, max_value=100, value=50) / 100
st.sidebar.write("")
st.sidebar.write("Trailing Team")
player1 = st.sidebar.selectbox("player1", playerList, playerList.index("Kemba Walker\\walkeke02"))
player2 = st.sidebar.selectbox("player2", playerList, playerList.index("Marcus Smart\\smartma01"))
player3 = st.sidebar.selectbox("player3", playerList, playerList.index("Jaylen Brown\\brownja02"))
player4 = st.sidebar.selectbox("player4", playerList, playerList.index("Jayson Tatum\\tatumja01"))
player5 = st.sidebar.selectbox("player5", playerList, playerList.index("Grant Williams\\willigr01"))
st.sidebar.write("Leading Team")
player6 = st.sidebar.selectbox("player6", playerList, playerList.index("Ben Simmons\\simmobe01"))
player7 = st.sidebar.selectbox("player7", playerList, playerList.index("Seth Curry\\curryse01"))
player8 = st.sidebar.selectbox("player8", playerList, playerList.index("Danny Green\\greenda02"))
player9 = st.sidebar.selectbox("player9", playerList, playerList.index("Tobias Harris\\harrito02"))
player10 = st.sidebar.selectbox("player10", playerList, playerList.index("Joel Embiid\\embiijo01"))
if buton1:
with st.spinner("Running simulations..."):
team1 = [player1.rsplit('\\',1)[1], player2.rsplit('\\',1)[1], player3.rsplit('\\',1)[1], player4.rsplit('\\',1)[1], player5.rsplit('\\',1)[1]]
team2 = [player6.rsplit('\\',1)[1], player7.rsplit('\\',1)[1], player8.rsplit('\\',1)[1], player9.rsplit('\\',1)[1], player10.rsplit('\\',1)[1]]
df1, df2, rbPct1, rbPct2 = util.prepSim(team1, team2)
baseSimulation(n, t, diff, fouls1, fouls2, ot_prob)
about = st.expander('Simulation Info')
with about:
| true | true |
f737df9a398aded3b4b036831bbc3efcec6afb8d | 2,827 | py | Python | docs/conf.py | jules-ch/wind-stats | e9b7cc1e5700ca092d6fd209bd75f83924ef2467 | [
"MIT"
] | 1 | 2022-03-18T01:48:31.000Z | 2022-03-18T01:48:31.000Z | docs/conf.py | jules-ch/wind-stats | e9b7cc1e5700ca092d6fd209bd75f83924ef2467 | [
"MIT"
] | 2 | 2021-04-05T18:52:50.000Z | 2021-11-12T11:36:41.000Z | docs/conf.py | jules-ch/wind-stats | e9b7cc1e5700ca092d6fd209bd75f83924ef2467 | [
"MIT"
] | 1 | 2022-03-18T02:30:09.000Z | 2022-03-18T02:30:09.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import wind_stats
sys.path.insert(0, os.path.abspath("."))
sys.path.insert(0, os.path.abspath(os.path.join("..", "..")))
# -- Project information -----------------------------------------------------
project = "Wind Stats"
copyright = "2020, Jules Chéron"
author = "Jules Chéron"
# The full version, including alpha/beta/rc tags
release = wind_stats.__version__
needs_sphinx = "3.0"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"nbsphinx",
"numpydoc",
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.mathjax",
"sphinx.ext.todo",
"numpydoc",
"matplotlib.sphinxext.plot_directive",
"IPython.sphinxext.ipython_directive",
"IPython.sphinxext.ipython_console_highlighting",
]
autosummary_generate = True
autosummary_imported_members = True
autoclass_content = "both"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["build"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "pydata_sphinx_theme"
html_theme_options = {
"github_url": "https://github.com/jules-ch/wind-stats",
}
html_logo = "_static/logo-wind-stats.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_css_files = [
"css/wind-stats.css",
]
# numpydoc
numpydoc_attributes_as_param_list = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
numfig = True
math_numfig = True
math_eqref_format = "({number})"
todo_include_todos = True
| 27.990099 | 79 | 0.684117 |
import os
import sys
import wind_stats
sys.path.insert(0, os.path.abspath("."))
sys.path.insert(0, os.path.abspath(os.path.join("..", "..")))
project = "Wind Stats"
copyright = "2020, Jules Chéron"
author = "Jules Chéron"
release = wind_stats.__version__
needs_sphinx = "3.0"
extensions = [
"nbsphinx",
"numpydoc",
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.mathjax",
"sphinx.ext.todo",
"numpydoc",
"matplotlib.sphinxext.plot_directive",
"IPython.sphinxext.ipython_directive",
"IPython.sphinxext.ipython_console_highlighting",
]
autosummary_generate = True
autosummary_imported_members = True
autoclass_content = "both"
templates_path = ["_templates"]
exclude_patterns = ["build"]
html_theme = "pydata_sphinx_theme"
html_theme_options = {
"github_url": "https://github.com/jules-ch/wind-stats",
}
html_logo = "_static/logo-wind-stats.png"
html_static_path = ["_static"]
html_css_files = [
"css/wind-stats.css",
]
numpydoc_attributes_as_param_list = False
pygments_style = "sphinx"
numfig = True
math_numfig = True
math_eqref_format = "({number})"
todo_include_todos = True
| true | true |
f737dfd5f803aa88aa6da44d301b6cfd14a79352 | 1,988 | py | Python | test/test_events.py | mempoolco/aiodiskdb | 8d162b637e7059d3d105716e1eba60851258101a | [
"MIT"
] | 4 | 2021-06-04T06:03:06.000Z | 2021-06-09T14:24:12.000Z | test/test_events.py | mempoolco/aiodiskdb | 8d162b637e7059d3d105716e1eba60851258101a | [
"MIT"
] | 9 | 2021-06-07T14:50:12.000Z | 2021-06-13T12:24:00.000Z | test/test_events.py | mempoolco/aiodiskdb | 8d162b637e7059d3d105716e1eba60851258101a | [
"MIT"
] | null | null | null | import asyncio
from unittest import IsolatedAsyncioTestCase
from aiodiskdb import exceptions
from aiodiskdb.aiodiskdb import AioDiskDB
from aiodiskdb.local_types import EventsHandlers
class TestEventsHandlerStrictTyping(IsolatedAsyncioTestCase):
def setUp(self) -> None:
self.sut = EventsHandlers()
self._dummy_ex = 0
async def _dummy(self, *a, **kw):
self._dummy_ex += 1
async def test(self):
with self.assertRaises(TypeError):
self.sut.on_start = lambda w: ''
self.sut.on_start = self._dummy
await self.sut.on_start()
self.sut.on_start = None
self.assertEqual(1, self._dummy_ex)
class AioDiskDBTestCase(IsolatedAsyncioTestCase):
_path = '/tmp/aiodiskdb_test'
def setUp(self, max_file_size=128, max_buffer_size=16, overwrite=True):
self.loop = asyncio.get_event_loop()
self._overwrite = True
self._max_file_size = max_file_size
self._max_buffer_size = max_buffer_size
self._setup_sut()
self.sut.destroy_db()
self._overwrite = overwrite
self._setup_sut()
def tearDown(self) -> None:
self.sut.destroy_db()
def _setup_sut(self):
self.sut = AioDiskDB(
self._path,
create_if_not_exists=True,
read_timeout=5,
max_file_size=self._max_file_size,
max_buffer_size=self._max_buffer_size,
overwrite=self._overwrite
)
def run_test_db(f):
async def _decorator(self, *a, **kw):
try:
self.loop.create_task(self.sut.run(), name='aiodiskdb_main_loop')
while not self.sut.running:
await asyncio.sleep(0.01)
return await f(self, *a, **kw)
finally:
try:
await self.sut.stop()
except exceptions.NotRunningException:
print('run_test_db requested to shutdown a not running database')
return _decorator
| 29.671642 | 81 | 0.635815 | import asyncio
from unittest import IsolatedAsyncioTestCase
from aiodiskdb import exceptions
from aiodiskdb.aiodiskdb import AioDiskDB
from aiodiskdb.local_types import EventsHandlers
class TestEventsHandlerStrictTyping(IsolatedAsyncioTestCase):
def setUp(self) -> None:
self.sut = EventsHandlers()
self._dummy_ex = 0
async def _dummy(self, *a, **kw):
self._dummy_ex += 1
async def test(self):
with self.assertRaises(TypeError):
self.sut.on_start = lambda w: ''
self.sut.on_start = self._dummy
await self.sut.on_start()
self.sut.on_start = None
self.assertEqual(1, self._dummy_ex)
class AioDiskDBTestCase(IsolatedAsyncioTestCase):
_path = '/tmp/aiodiskdb_test'
def setUp(self, max_file_size=128, max_buffer_size=16, overwrite=True):
self.loop = asyncio.get_event_loop()
self._overwrite = True
self._max_file_size = max_file_size
self._max_buffer_size = max_buffer_size
self._setup_sut()
self.sut.destroy_db()
self._overwrite = overwrite
self._setup_sut()
def tearDown(self) -> None:
self.sut.destroy_db()
def _setup_sut(self):
self.sut = AioDiskDB(
self._path,
create_if_not_exists=True,
read_timeout=5,
max_file_size=self._max_file_size,
max_buffer_size=self._max_buffer_size,
overwrite=self._overwrite
)
def run_test_db(f):
async def _decorator(self, *a, **kw):
try:
self.loop.create_task(self.sut.run(), name='aiodiskdb_main_loop')
while not self.sut.running:
await asyncio.sleep(0.01)
return await f(self, *a, **kw)
finally:
try:
await self.sut.stop()
except exceptions.NotRunningException:
print('run_test_db requested to shutdown a not running database')
return _decorator
| true | true |
f737e2ad7d2371e24301eb47f502a76cdad850a2 | 30,205 | py | Python | homeassistant/components/homematic/__init__.py | sara0871/laughing--barnacle- | 70412fc0ba42ccfe446c0c62e327eceeda56a2ab | [
"Apache-2.0"
] | 2 | 2020-12-06T23:15:21.000Z | 2021-03-20T20:21:03.000Z | homeassistant/components/homematic/__init__.py | sara0871/https-wakatime.com-android-studio | 5a15b2c036b332c17d5f6a06664378e9273d684f | [
"Apache-2.0"
] | 3 | 2021-09-08T03:06:43.000Z | 2022-03-12T00:56:04.000Z | homeassistant/components/homematic/__init__.py | sara0871/https-wakatime.com-android-studio | 5a15b2c036b332c17d5f6a06664378e9273d684f | [
"Apache-2.0"
] | 1 | 2019-04-26T12:59:54.000Z | 2019-04-26T12:59:54.000Z | """
Support for HomeMatic devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/homematic/
"""
import asyncio
from datetime import timedelta
from functools import partial
import logging
import socket
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_NAME, CONF_HOST, CONF_HOSTS, CONF_PASSWORD,
CONF_PLATFORM, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP, STATE_UNKNOWN)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.loader import bind_hass
REQUIREMENTS = ['pyhomematic==0.1.46']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'homematic'
SCAN_INTERVAL_HUB = timedelta(seconds=300)
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
DISCOVER_SWITCHES = 'homematic.switch'
DISCOVER_LIGHTS = 'homematic.light'
DISCOVER_SENSORS = 'homematic.sensor'
DISCOVER_BINARY_SENSORS = 'homematic.binary_sensor'
DISCOVER_COVER = 'homematic.cover'
DISCOVER_CLIMATE = 'homematic.climate'
DISCOVER_LOCKS = 'homematic.locks'
ATTR_DISCOVER_DEVICES = 'devices'
ATTR_PARAM = 'param'
ATTR_CHANNEL = 'channel'
ATTR_ADDRESS = 'address'
ATTR_VALUE = 'value'
ATTR_INTERFACE = 'interface'
ATTR_ERRORCODE = 'error'
ATTR_MESSAGE = 'message'
ATTR_MODE = 'mode'
ATTR_TIME = 'time'
EVENT_KEYPRESS = 'homematic.keypress'
EVENT_IMPULSE = 'homematic.impulse'
EVENT_ERROR = 'homematic.error'
SERVICE_VIRTUALKEY = 'virtualkey'
SERVICE_RECONNECT = 'reconnect'
SERVICE_SET_VARIABLE_VALUE = 'set_variable_value'
SERVICE_SET_DEVICE_VALUE = 'set_device_value'
SERVICE_SET_INSTALL_MODE = 'set_install_mode'
HM_DEVICE_TYPES = {
DISCOVER_SWITCHES: [
'Switch', 'SwitchPowermeter', 'IOSwitch', 'IPSwitch', 'RFSiren',
'IPSwitchPowermeter', 'HMWIOSwitch', 'Rain', 'EcoLogic',
'IPKeySwitchPowermeter'],
DISCOVER_LIGHTS: ['Dimmer', 'KeyDimmer', 'IPKeyDimmer'],
DISCOVER_SENSORS: [
'SwitchPowermeter', 'Motion', 'MotionV2', 'RemoteMotion', 'MotionIP',
'ThermostatWall', 'AreaThermostat', 'RotaryHandleSensor',
'WaterSensor', 'PowermeterGas', 'LuxSensor', 'WeatherSensor',
'WeatherStation', 'ThermostatWall2', 'TemperatureDiffSensor',
'TemperatureSensor', 'CO2Sensor', 'IPSwitchPowermeter', 'HMWIOSwitch',
'FillingLevel', 'ValveDrive', 'EcoLogic', 'IPThermostatWall',
'IPSmoke', 'RFSiren', 'PresenceIP', 'IPAreaThermostat',
'IPWeatherSensor', 'RotaryHandleSensorIP', 'IPPassageSensor',
'IPKeySwitchPowermeter'],
DISCOVER_CLIMATE: [
'Thermostat', 'ThermostatWall', 'MAXThermostat', 'ThermostatWall2',
'MAXWallThermostat', 'IPThermostat', 'IPThermostatWall',
'ThermostatGroup'],
DISCOVER_BINARY_SENSORS: [
'ShutterContact', 'Smoke', 'SmokeV2', 'Motion', 'MotionV2',
'MotionIP', 'RemoteMotion', 'WeatherSensor', 'TiltSensor',
'IPShutterContact', 'HMWIOSwitch', 'MaxShutterContact', 'Rain',
'WiredSensor', 'PresenceIP', 'IPWeatherSensor', 'IPPassageSensor',
'SmartwareMotion'],
DISCOVER_COVER: ['Blind', 'KeyBlind', 'IPKeyBlind', 'IPKeyBlindTilt'],
DISCOVER_LOCKS: ['KeyMatic']
}
HM_IGNORE_DISCOVERY_NODE = [
'ACTUAL_TEMPERATURE',
'ACTUAL_HUMIDITY'
]
HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS = {
'ACTUAL_TEMPERATURE': ['IPAreaThermostat', 'IPWeatherSensor'],
}
HM_ATTRIBUTE_SUPPORT = {
'LOWBAT': ['battery', {0: 'High', 1: 'Low'}],
'LOW_BAT': ['battery', {0: 'High', 1: 'Low'}],
'ERROR': ['sabotage', {0: 'No', 1: 'Yes'}],
'SABOTAGE': ['sabotage', {0: 'No', 1: 'Yes'}],
'RSSI_DEVICE': ['rssi', {}],
'VALVE_STATE': ['valve', {}],
'BATTERY_STATE': ['battery', {}],
'CONTROL_MODE': ['mode', {
0: 'Auto',
1: 'Manual',
2: 'Away',
3: 'Boost',
4: 'Comfort',
5: 'Lowering'
}],
'POWER': ['power', {}],
'CURRENT': ['current', {}],
'VOLTAGE': ['voltage', {}],
'OPERATING_VOLTAGE': ['voltage', {}],
'WORKING': ['working', {0: 'No', 1: 'Yes'}]
}
HM_PRESS_EVENTS = [
'PRESS_SHORT',
'PRESS_LONG',
'PRESS_CONT',
'PRESS_LONG_RELEASE',
'PRESS',
]
HM_IMPULSE_EVENTS = [
'SEQUENCE_OK',
]
CONF_RESOLVENAMES_OPTIONS = [
'metadata',
'json',
'xml',
False
]
DATA_HOMEMATIC = 'homematic'
DATA_STORE = 'homematic_store'
DATA_CONF = 'homematic_conf'
CONF_INTERFACES = 'interfaces'
CONF_LOCAL_IP = 'local_ip'
CONF_LOCAL_PORT = 'local_port'
CONF_PORT = 'port'
CONF_PATH = 'path'
CONF_CALLBACK_IP = 'callback_ip'
CONF_CALLBACK_PORT = 'callback_port'
CONF_RESOLVENAMES = 'resolvenames'
CONF_JSONPORT = 'jsonport'
CONF_VARIABLES = 'variables'
CONF_DEVICES = 'devices'
CONF_PRIMARY = 'primary'
DEFAULT_LOCAL_IP = '0.0.0.0'
DEFAULT_LOCAL_PORT = 0
DEFAULT_RESOLVENAMES = False
DEFAULT_JSONPORT = 80
DEFAULT_PORT = 2001
DEFAULT_PATH = ''
DEFAULT_USERNAME = 'Admin'
DEFAULT_PASSWORD = ''
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'homematic',
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_ADDRESS): cv.string,
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_CHANNEL, default=1): vol.Coerce(int),
vol.Optional(ATTR_PARAM): cv.string,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_INTERFACES, default={}): {cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES):
vol.In(CONF_RESOLVENAMES_OPTIONS),
vol.Optional(CONF_JSONPORT, default=DEFAULT_JSONPORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_CALLBACK_IP): cv.string,
vol.Optional(CONF_CALLBACK_PORT): cv.port,
}},
vol.Optional(CONF_HOSTS, default={}): {cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
}},
vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string,
vol.Optional(CONF_LOCAL_PORT): cv.port,
}),
}, extra=vol.ALLOW_EXTRA)
SCHEMA_SERVICE_VIRTUALKEY = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): cv.string,
vol.Optional(ATTR_INTERFACE): cv.string,
})
SCHEMA_SERVICE_SET_VARIABLE_VALUE = vol.Schema({
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
})
SCHEMA_SERVICE_SET_DEVICE_VALUE = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_INTERFACE): cv.string,
})
SCHEMA_SERVICE_RECONNECT = vol.Schema({})
SCHEMA_SERVICE_SET_INSTALL_MODE = vol.Schema({
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_TIME, default=60): cv.positive_int,
vol.Optional(ATTR_MODE, default=1):
vol.All(vol.Coerce(int), vol.In([1, 2])),
vol.Optional(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
})
@bind_hass
def virtualkey(hass, address, channel, param, interface=None):
"""Send virtual keypress to homematic controller."""
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_INTERFACE: interface,
}
hass.services.call(DOMAIN, SERVICE_VIRTUALKEY, data)
@bind_hass
def set_variable_value(hass, entity_id, value):
"""Change value of a Homematic system variable."""
data = {
ATTR_ENTITY_ID: entity_id,
ATTR_VALUE: value,
}
hass.services.call(DOMAIN, SERVICE_SET_VARIABLE_VALUE, data)
@bind_hass
def set_device_value(hass, address, channel, param, value, interface=None):
"""Call setValue XML-RPC method of supplied interface."""
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_VALUE: value,
ATTR_INTERFACE: interface,
}
hass.services.call(DOMAIN, SERVICE_SET_DEVICE_VALUE, data)
@bind_hass
def set_install_mode(hass, interface, mode=None, time=None, address=None):
"""Call setInstallMode XML-RPC method of supplied interface."""
data = {
key: value for key, value in (
(ATTR_INTERFACE, interface),
(ATTR_MODE, mode),
(ATTR_TIME, time),
(ATTR_ADDRESS, address)
) if value
}
hass.services.call(DOMAIN, SERVICE_SET_INSTALL_MODE, data)
@bind_hass
def reconnect(hass):
"""Reconnect to CCU/Homegear."""
hass.services.call(DOMAIN, SERVICE_RECONNECT, {})
def setup(hass, config):
"""Set up the Homematic component."""
from pyhomematic import HMConnection
conf = config[DOMAIN]
hass.data[DATA_CONF] = remotes = {}
hass.data[DATA_STORE] = set()
# Create hosts-dictionary for pyhomematic
for rname, rconfig in conf[CONF_INTERFACES].items():
remotes[rname] = {
'ip': socket.gethostbyname(rconfig.get(CONF_HOST)),
'port': rconfig.get(CONF_PORT),
'path': rconfig.get(CONF_PATH),
'resolvenames': rconfig.get(CONF_RESOLVENAMES),
'jsonport': rconfig.get(CONF_JSONPORT),
'username': rconfig.get(CONF_USERNAME),
'password': rconfig.get(CONF_PASSWORD),
'callbackip': rconfig.get(CONF_CALLBACK_IP),
'callbackport': rconfig.get(CONF_CALLBACK_PORT),
'connect': True,
}
for sname, sconfig in conf[CONF_HOSTS].items():
remotes[sname] = {
'ip': socket.gethostbyname(sconfig.get(CONF_HOST)),
'port': DEFAULT_PORT,
'username': sconfig.get(CONF_USERNAME),
'password': sconfig.get(CONF_PASSWORD),
'connect': False,
}
# Create server thread
bound_system_callback = partial(_system_callback_handler, hass, config)
hass.data[DATA_HOMEMATIC] = homematic = HMConnection(
local=config[DOMAIN].get(CONF_LOCAL_IP),
localport=config[DOMAIN].get(CONF_LOCAL_PORT, DEFAULT_LOCAL_PORT),
remotes=remotes,
systemcallback=bound_system_callback,
interface_id='homeassistant'
)
# Start server thread, connect to hosts, initialize to receive events
homematic.start()
# Stops server when HASS is shutting down
hass.bus.listen_once(
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
# Init homematic hubs
entity_hubs = []
for hub_name in conf[CONF_HOSTS].keys():
entity_hubs.append(HMHub(hass, homematic, hub_name))
def _hm_service_virtualkey(service):
"""Service to handle virtualkey servicecalls."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
# Device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found for service virtualkey!", address)
return
# Parameter doesn't exist for device
if param not in hmdevice.ACTIONNODE:
_LOGGER.error("%s not datapoint in hm device %s", param, address)
return
# Channel doesn't exist for device
if channel not in hmdevice.ACTIONNODE[param]:
_LOGGER.error("%i is not a channel in hm device %s",
channel, address)
return
# Call parameter
hmdevice.actionNodeData(param, True, channel)
hass.services.register(
DOMAIN, SERVICE_VIRTUALKEY, _hm_service_virtualkey,
schema=SCHEMA_SERVICE_VIRTUALKEY)
def _service_handle_value(service):
"""Service to call setValue method for HomeMatic system variable."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
name = service.data[ATTR_NAME]
value = service.data[ATTR_VALUE]
if entity_ids:
entities = [entity for entity in entity_hubs if
entity.entity_id in entity_ids]
else:
entities = entity_hubs
if not entities:
_LOGGER.error("No HomeMatic hubs available")
return
for hub in entities:
hub.hm_set_variable(name, value)
hass.services.register(
DOMAIN, SERVICE_SET_VARIABLE_VALUE, _service_handle_value,
schema=SCHEMA_SERVICE_SET_VARIABLE_VALUE)
def _service_handle_reconnect(service):
"""Service to reconnect all HomeMatic hubs."""
homematic.reconnect()
hass.services.register(
DOMAIN, SERVICE_RECONNECT, _service_handle_reconnect,
schema=SCHEMA_SERVICE_RECONNECT)
def _service_handle_device(service):
"""Service to call setValue method for HomeMatic devices."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
value = service.data.get(ATTR_VALUE)
# Device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found!", address)
return
hmdevice.setValue(param, value, channel)
hass.services.register(
DOMAIN, SERVICE_SET_DEVICE_VALUE, _service_handle_device,
schema=SCHEMA_SERVICE_SET_DEVICE_VALUE)
def _service_handle_install_mode(service):
"""Service to set interface into install mode."""
interface = service.data.get(ATTR_INTERFACE)
mode = service.data.get(ATTR_MODE)
time = service.data.get(ATTR_TIME)
address = service.data.get(ATTR_ADDRESS)
homematic.setInstallMode(interface, t=time, mode=mode, address=address)
hass.services.register(
DOMAIN, SERVICE_SET_INSTALL_MODE, _service_handle_install_mode,
schema=SCHEMA_SERVICE_SET_INSTALL_MODE)
return True
def _system_callback_handler(hass, config, src, *args):
"""System callback handler."""
# New devices available at hub
if src == 'newDevices':
(interface_id, dev_descriptions) = args
interface = interface_id.split('-')[-1]
# Device support active?
if not hass.data[DATA_CONF][interface]['connect']:
return
addresses = []
for dev in dev_descriptions:
address = dev['ADDRESS'].split(':')[0]
if address not in hass.data[DATA_STORE]:
hass.data[DATA_STORE].add(address)
addresses.append(address)
# Register EVENTS
# Search all devices with an EVENTNODE that includes data
bound_event_callback = partial(_hm_event_handler, hass, interface)
for dev in addresses:
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(dev)
if hmdevice.EVENTNODE:
hmdevice.setEventCallback(
callback=bound_event_callback, bequeath=True)
# Create HASS entities
if addresses:
for component_name, discovery_type in (
('switch', DISCOVER_SWITCHES),
('light', DISCOVER_LIGHTS),
('cover', DISCOVER_COVER),
('binary_sensor', DISCOVER_BINARY_SENSORS),
('sensor', DISCOVER_SENSORS),
('climate', DISCOVER_CLIMATE),
('lock', DISCOVER_LOCKS)):
# Get all devices of a specific type
found_devices = _get_devices(
hass, discovery_type, addresses, interface)
# When devices of this type are found
# they are setup in HASS and a discovery event is fired
if found_devices:
discovery.load_platform(hass, component_name, DOMAIN, {
ATTR_DISCOVER_DEVICES: found_devices
}, config)
# Homegear error message
elif src == 'error':
_LOGGER.error("Error: %s", args)
(interface_id, errorcode, message) = args
hass.bus.fire(EVENT_ERROR, {
ATTR_ERRORCODE: errorcode,
ATTR_MESSAGE: message
})
def _get_devices(hass, discovery_type, keys, interface):
"""Get the HomeMatic devices for given discovery_type."""
device_arr = []
for key in keys:
device = hass.data[DATA_HOMEMATIC].devices[interface][key]
class_name = device.__class__.__name__
metadata = {}
# Class not supported by discovery type
if class_name not in HM_DEVICE_TYPES[discovery_type]:
continue
# Load metadata needed to generate a parameter list
if discovery_type == DISCOVER_SENSORS:
metadata.update(device.SENSORNODE)
elif discovery_type == DISCOVER_BINARY_SENSORS:
metadata.update(device.BINARYNODE)
else:
metadata.update({None: device.ELEMENT})
# Generate options for 1...n elements with 1...n parameters
for param, channels in metadata.items():
if param in HM_IGNORE_DISCOVERY_NODE and class_name not in \
HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS.get(param, []):
continue
# Add devices
_LOGGER.debug("%s: Handling %s: %s: %s",
discovery_type, key, param, channels)
for channel in channels:
name = _create_ha_name(
name=device.NAME, channel=channel, param=param,
count=len(channels)
)
device_dict = {
CONF_PLATFORM: "homematic",
ATTR_ADDRESS: key,
ATTR_INTERFACE: interface,
ATTR_NAME: name,
ATTR_CHANNEL: channel
}
if param is not None:
device_dict[ATTR_PARAM] = param
# Add new device
try:
DEVICE_SCHEMA(device_dict)
device_arr.append(device_dict)
except vol.MultipleInvalid as err:
_LOGGER.error("Invalid device config: %s",
str(err))
return device_arr
def _create_ha_name(name, channel, param, count):
"""Generate a unique entity id."""
# HMDevice is a simple device
if count == 1 and param is None:
return name
# Has multiple elements/channels
if count > 1 and param is None:
return "{} {}".format(name, channel)
# With multiple parameters on first channel
if count == 1 and param is not None:
return "{} {}".format(name, param)
# Multiple parameters with multiple channels
if count > 1 and param is not None:
return "{} {} {}".format(name, channel, param)
def _hm_event_handler(hass, interface, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
try:
channel = int(device.split(":")[1])
address = device.split(":")[0]
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(address)
except (TypeError, ValueError):
_LOGGER.error("Event handling channel convert error!")
return
# Return if not an event supported by device
if attribute not in hmdevice.EVENTNODE:
return
_LOGGER.debug("Event %s for %s channel %i", attribute,
hmdevice.NAME, channel)
# Keypress event
if attribute in HM_PRESS_EVENTS:
hass.bus.fire(EVENT_KEYPRESS, {
ATTR_NAME: hmdevice.NAME,
ATTR_PARAM: attribute,
ATTR_CHANNEL: channel
})
return
# Impulse event
if attribute in HM_IMPULSE_EVENTS:
hass.bus.fire(EVENT_IMPULSE, {
ATTR_NAME: hmdevice.NAME,
ATTR_CHANNEL: channel
})
return
_LOGGER.warning("Event is unknown and not forwarded")
def _device_from_servicecall(hass, service):
"""Extract HomeMatic device from service call."""
address = service.data.get(ATTR_ADDRESS)
interface = service.data.get(ATTR_INTERFACE)
if address == 'BIDCOS-RF':
address = 'BidCoS-RF'
if interface:
return hass.data[DATA_HOMEMATIC].devices[interface].get(address)
for devices in hass.data[DATA_HOMEMATIC].devices.values():
if address in devices:
return devices[address]
class HMHub(Entity):
"""The HomeMatic hub. (CCU2/HomeGear)."""
def __init__(self, hass, homematic, name):
"""Initialize HomeMatic hub."""
self.hass = hass
self.entity_id = "{}.{}".format(DOMAIN, name.lower())
self._homematic = homematic
self._variables = {}
self._name = name
self._state = None
# Load data
self.hass.helpers.event.track_time_interval(
self._update_hub, SCAN_INTERVAL_HUB)
self.hass.add_job(self._update_hub, None)
self.hass.helpers.event.track_time_interval(
self._update_variables, SCAN_INTERVAL_VARIABLES)
self.hass.add_job(self._update_variables, None)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""Return false. HomeMatic Hub object updates variables."""
return False
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes."""
attr = self._variables.copy()
return attr
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return "mdi:gradient"
def _update_hub(self, now):
"""Retrieve latest state."""
service_message = self._homematic.getServiceMessages(self._name)
state = None if service_message is None else len(service_message)
# state have change?
if self._state != state:
self._state = state
self.schedule_update_ha_state()
def _update_variables(self, now):
"""Retrieve all variable data and update hmvariable states."""
variables = self._homematic.getAllSystemVariables(self._name)
if variables is None:
return
state_change = False
for key, value in variables.items():
if key in self._variables and value == self._variables[key]:
continue
state_change = True
self._variables.update({key: value})
if state_change:
self.schedule_update_ha_state()
def hm_set_variable(self, name, value):
"""Set variable value on CCU/Homegear."""
if name not in self._variables:
_LOGGER.error("Variable %s not found on %s", name, self.name)
return
old_value = self._variables.get(name)
if isinstance(old_value, bool):
value = cv.boolean(value)
else:
value = float(value)
self._homematic.setSystemVariable(self.name, name, value)
self._variables.update({name: value})
self.schedule_update_ha_state()
class HMDevice(Entity):
"""The HomeMatic device base object."""
def __init__(self, config):
"""Initialize a generic HomeMatic device."""
self._name = config.get(ATTR_NAME)
self._address = config.get(ATTR_ADDRESS)
self._interface = config.get(ATTR_INTERFACE)
self._channel = config.get(ATTR_CHANNEL)
self._state = config.get(ATTR_PARAM)
self._data = {}
self._homematic = None
self._hmdevice = None
self._connected = False
self._available = False
# Set parameter to uppercase
if self._state:
self._state = self._state.upper()
@asyncio.coroutine
def async_added_to_hass(self):
"""Load data init callbacks."""
yield from self.hass.async_add_job(self.link_homematic)
@property
def should_poll(self):
"""Return false. HomeMatic states are pushed by the XML-RPC Server."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def available(self):
"""Return true if device is available."""
return self._available
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attr = {}
# Generate a dictionary with attributes
for node, data in HM_ATTRIBUTE_SUPPORT.items():
# Is an attribute and exists for this object
if node in self._data:
value = data[1].get(self._data[node], self._data[node])
attr[data[0]] = value
# Static attributes
attr['id'] = self._hmdevice.ADDRESS
attr['interface'] = self._interface
return attr
def link_homematic(self):
"""Connect to HomeMatic."""
if self._connected:
return True
# Initialize
self._homematic = self.hass.data[DATA_HOMEMATIC]
self._hmdevice = \
self._homematic.devices[self._interface][self._address]
self._connected = True
try:
# Initialize datapoints of this object
self._init_data()
self._load_data_from_hm()
# Link events from pyhomematic
self._subscribe_homematic_events()
self._available = not self._hmdevice.UNREACH
# pylint: disable=broad-except
except Exception as err:
self._connected = False
_LOGGER.error("Exception while linking %s: %s",
self._address, str(err))
def _hm_event_callback(self, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
_LOGGER.debug("%s received event '%s' value: %s", self._name,
attribute, value)
has_changed = False
# Is data needed for this instance?
if attribute in self._data:
# Did data change?
if self._data[attribute] != value:
self._data[attribute] = value
has_changed = True
# Availability has changed
if attribute == 'UNREACH':
self._available = bool(value)
has_changed = True
elif not self.available:
self._available = False
has_changed = True
# If it has changed data point, update HASS
if has_changed:
self.schedule_update_ha_state()
def _subscribe_homematic_events(self):
"""Subscribe all required events to handle job."""
channels_to_sub = set()
channels_to_sub.add(0) # Add channel 0 for UNREACH
# Push data to channels_to_sub from hmdevice metadata
for metadata in (self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE,
self._hmdevice.ATTRIBUTENODE,
self._hmdevice.WRITENODE, self._hmdevice.EVENTNODE,
self._hmdevice.ACTIONNODE):
for node, channels in metadata.items():
# Data is needed for this instance
if node in self._data:
# chan is current channel
if len(channels) == 1:
channel = channels[0]
else:
channel = self._channel
# Prepare for subscription
try:
channels_to_sub.add(int(channel))
except (ValueError, TypeError):
_LOGGER.error("Invalid channel in metadata from %s",
self._name)
# Set callbacks
for channel in channels_to_sub:
_LOGGER.debug(
"Subscribe channel %d from %s", channel, self._name)
self._hmdevice.setEventCallback(
callback=self._hm_event_callback, bequeath=False,
channel=channel)
def _load_data_from_hm(self):
"""Load first value from pyhomematic."""
if not self._connected:
return False
# Read data from pyhomematic
for metadata, funct in (
(self._hmdevice.ATTRIBUTENODE,
self._hmdevice.getAttributeData),
(self._hmdevice.WRITENODE, self._hmdevice.getWriteData),
(self._hmdevice.SENSORNODE, self._hmdevice.getSensorData),
(self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData)):
for node in metadata:
if metadata[node] and node in self._data:
self._data[node] = funct(name=node, channel=self._channel)
return True
def _hm_set_state(self, value):
"""Set data to main datapoint."""
if self._state in self._data:
self._data[self._state] = value
def _hm_get_state(self):
"""Get data from main datapoint."""
if self._state in self._data:
return self._data[self._state]
return None
def _init_data(self):
"""Generate a data dict (self._data) from the HomeMatic metadata."""
# Add all attributes to data dictionary
for data_note in self._hmdevice.ATTRIBUTENODE:
self._data.update({data_note: STATE_UNKNOWN})
# Initialize device specific data
self._init_data_struct()
def _init_data_struct(self):
"""Generate a data dictionary from the HomeMatic device metadata."""
raise NotImplementedError
| 33.523862 | 79 | 0.626916 | import asyncio
from datetime import timedelta
from functools import partial
import logging
import socket
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_NAME, CONF_HOST, CONF_HOSTS, CONF_PASSWORD,
CONF_PLATFORM, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP, STATE_UNKNOWN)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.loader import bind_hass
REQUIREMENTS = ['pyhomematic==0.1.46']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'homematic'
SCAN_INTERVAL_HUB = timedelta(seconds=300)
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
DISCOVER_SWITCHES = 'homematic.switch'
DISCOVER_LIGHTS = 'homematic.light'
DISCOVER_SENSORS = 'homematic.sensor'
DISCOVER_BINARY_SENSORS = 'homematic.binary_sensor'
DISCOVER_COVER = 'homematic.cover'
DISCOVER_CLIMATE = 'homematic.climate'
DISCOVER_LOCKS = 'homematic.locks'
ATTR_DISCOVER_DEVICES = 'devices'
ATTR_PARAM = 'param'
ATTR_CHANNEL = 'channel'
ATTR_ADDRESS = 'address'
ATTR_VALUE = 'value'
ATTR_INTERFACE = 'interface'
ATTR_ERRORCODE = 'error'
ATTR_MESSAGE = 'message'
ATTR_MODE = 'mode'
ATTR_TIME = 'time'
EVENT_KEYPRESS = 'homematic.keypress'
EVENT_IMPULSE = 'homematic.impulse'
EVENT_ERROR = 'homematic.error'
SERVICE_VIRTUALKEY = 'virtualkey'
SERVICE_RECONNECT = 'reconnect'
SERVICE_SET_VARIABLE_VALUE = 'set_variable_value'
SERVICE_SET_DEVICE_VALUE = 'set_device_value'
SERVICE_SET_INSTALL_MODE = 'set_install_mode'
HM_DEVICE_TYPES = {
DISCOVER_SWITCHES: [
'Switch', 'SwitchPowermeter', 'IOSwitch', 'IPSwitch', 'RFSiren',
'IPSwitchPowermeter', 'HMWIOSwitch', 'Rain', 'EcoLogic',
'IPKeySwitchPowermeter'],
DISCOVER_LIGHTS: ['Dimmer', 'KeyDimmer', 'IPKeyDimmer'],
DISCOVER_SENSORS: [
'SwitchPowermeter', 'Motion', 'MotionV2', 'RemoteMotion', 'MotionIP',
'ThermostatWall', 'AreaThermostat', 'RotaryHandleSensor',
'WaterSensor', 'PowermeterGas', 'LuxSensor', 'WeatherSensor',
'WeatherStation', 'ThermostatWall2', 'TemperatureDiffSensor',
'TemperatureSensor', 'CO2Sensor', 'IPSwitchPowermeter', 'HMWIOSwitch',
'FillingLevel', 'ValveDrive', 'EcoLogic', 'IPThermostatWall',
'IPSmoke', 'RFSiren', 'PresenceIP', 'IPAreaThermostat',
'IPWeatherSensor', 'RotaryHandleSensorIP', 'IPPassageSensor',
'IPKeySwitchPowermeter'],
DISCOVER_CLIMATE: [
'Thermostat', 'ThermostatWall', 'MAXThermostat', 'ThermostatWall2',
'MAXWallThermostat', 'IPThermostat', 'IPThermostatWall',
'ThermostatGroup'],
DISCOVER_BINARY_SENSORS: [
'ShutterContact', 'Smoke', 'SmokeV2', 'Motion', 'MotionV2',
'MotionIP', 'RemoteMotion', 'WeatherSensor', 'TiltSensor',
'IPShutterContact', 'HMWIOSwitch', 'MaxShutterContact', 'Rain',
'WiredSensor', 'PresenceIP', 'IPWeatherSensor', 'IPPassageSensor',
'SmartwareMotion'],
DISCOVER_COVER: ['Blind', 'KeyBlind', 'IPKeyBlind', 'IPKeyBlindTilt'],
DISCOVER_LOCKS: ['KeyMatic']
}
HM_IGNORE_DISCOVERY_NODE = [
'ACTUAL_TEMPERATURE',
'ACTUAL_HUMIDITY'
]
HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS = {
'ACTUAL_TEMPERATURE': ['IPAreaThermostat', 'IPWeatherSensor'],
}
HM_ATTRIBUTE_SUPPORT = {
'LOWBAT': ['battery', {0: 'High', 1: 'Low'}],
'LOW_BAT': ['battery', {0: 'High', 1: 'Low'}],
'ERROR': ['sabotage', {0: 'No', 1: 'Yes'}],
'SABOTAGE': ['sabotage', {0: 'No', 1: 'Yes'}],
'RSSI_DEVICE': ['rssi', {}],
'VALVE_STATE': ['valve', {}],
'BATTERY_STATE': ['battery', {}],
'CONTROL_MODE': ['mode', {
0: 'Auto',
1: 'Manual',
2: 'Away',
3: 'Boost',
4: 'Comfort',
5: 'Lowering'
}],
'POWER': ['power', {}],
'CURRENT': ['current', {}],
'VOLTAGE': ['voltage', {}],
'OPERATING_VOLTAGE': ['voltage', {}],
'WORKING': ['working', {0: 'No', 1: 'Yes'}]
}
HM_PRESS_EVENTS = [
'PRESS_SHORT',
'PRESS_LONG',
'PRESS_CONT',
'PRESS_LONG_RELEASE',
'PRESS',
]
HM_IMPULSE_EVENTS = [
'SEQUENCE_OK',
]
CONF_RESOLVENAMES_OPTIONS = [
'metadata',
'json',
'xml',
False
]
DATA_HOMEMATIC = 'homematic'
DATA_STORE = 'homematic_store'
DATA_CONF = 'homematic_conf'
CONF_INTERFACES = 'interfaces'
CONF_LOCAL_IP = 'local_ip'
CONF_LOCAL_PORT = 'local_port'
CONF_PORT = 'port'
CONF_PATH = 'path'
CONF_CALLBACK_IP = 'callback_ip'
CONF_CALLBACK_PORT = 'callback_port'
CONF_RESOLVENAMES = 'resolvenames'
CONF_JSONPORT = 'jsonport'
CONF_VARIABLES = 'variables'
CONF_DEVICES = 'devices'
CONF_PRIMARY = 'primary'
DEFAULT_LOCAL_IP = '0.0.0.0'
DEFAULT_LOCAL_PORT = 0
DEFAULT_RESOLVENAMES = False
DEFAULT_JSONPORT = 80
DEFAULT_PORT = 2001
DEFAULT_PATH = ''
DEFAULT_USERNAME = 'Admin'
DEFAULT_PASSWORD = ''
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'homematic',
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_ADDRESS): cv.string,
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_CHANNEL, default=1): vol.Coerce(int),
vol.Optional(ATTR_PARAM): cv.string,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_INTERFACES, default={}): {cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES):
vol.In(CONF_RESOLVENAMES_OPTIONS),
vol.Optional(CONF_JSONPORT, default=DEFAULT_JSONPORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_CALLBACK_IP): cv.string,
vol.Optional(CONF_CALLBACK_PORT): cv.port,
}},
vol.Optional(CONF_HOSTS, default={}): {cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
}},
vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string,
vol.Optional(CONF_LOCAL_PORT): cv.port,
}),
}, extra=vol.ALLOW_EXTRA)
SCHEMA_SERVICE_VIRTUALKEY = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): cv.string,
vol.Optional(ATTR_INTERFACE): cv.string,
})
SCHEMA_SERVICE_SET_VARIABLE_VALUE = vol.Schema({
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
})
SCHEMA_SERVICE_SET_DEVICE_VALUE = vol.Schema({
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_INTERFACE): cv.string,
})
SCHEMA_SERVICE_RECONNECT = vol.Schema({})
SCHEMA_SERVICE_SET_INSTALL_MODE = vol.Schema({
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_TIME, default=60): cv.positive_int,
vol.Optional(ATTR_MODE, default=1):
vol.All(vol.Coerce(int), vol.In([1, 2])),
vol.Optional(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
})
@bind_hass
def virtualkey(hass, address, channel, param, interface=None):
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_INTERFACE: interface,
}
hass.services.call(DOMAIN, SERVICE_VIRTUALKEY, data)
@bind_hass
def set_variable_value(hass, entity_id, value):
data = {
ATTR_ENTITY_ID: entity_id,
ATTR_VALUE: value,
}
hass.services.call(DOMAIN, SERVICE_SET_VARIABLE_VALUE, data)
@bind_hass
def set_device_value(hass, address, channel, param, value, interface=None):
data = {
ATTR_ADDRESS: address,
ATTR_CHANNEL: channel,
ATTR_PARAM: param,
ATTR_VALUE: value,
ATTR_INTERFACE: interface,
}
hass.services.call(DOMAIN, SERVICE_SET_DEVICE_VALUE, data)
@bind_hass
def set_install_mode(hass, interface, mode=None, time=None, address=None):
data = {
key: value for key, value in (
(ATTR_INTERFACE, interface),
(ATTR_MODE, mode),
(ATTR_TIME, time),
(ATTR_ADDRESS, address)
) if value
}
hass.services.call(DOMAIN, SERVICE_SET_INSTALL_MODE, data)
@bind_hass
def reconnect(hass):
hass.services.call(DOMAIN, SERVICE_RECONNECT, {})
def setup(hass, config):
from pyhomematic import HMConnection
conf = config[DOMAIN]
hass.data[DATA_CONF] = remotes = {}
hass.data[DATA_STORE] = set()
for rname, rconfig in conf[CONF_INTERFACES].items():
remotes[rname] = {
'ip': socket.gethostbyname(rconfig.get(CONF_HOST)),
'port': rconfig.get(CONF_PORT),
'path': rconfig.get(CONF_PATH),
'resolvenames': rconfig.get(CONF_RESOLVENAMES),
'jsonport': rconfig.get(CONF_JSONPORT),
'username': rconfig.get(CONF_USERNAME),
'password': rconfig.get(CONF_PASSWORD),
'callbackip': rconfig.get(CONF_CALLBACK_IP),
'callbackport': rconfig.get(CONF_CALLBACK_PORT),
'connect': True,
}
for sname, sconfig in conf[CONF_HOSTS].items():
remotes[sname] = {
'ip': socket.gethostbyname(sconfig.get(CONF_HOST)),
'port': DEFAULT_PORT,
'username': sconfig.get(CONF_USERNAME),
'password': sconfig.get(CONF_PASSWORD),
'connect': False,
}
bound_system_callback = partial(_system_callback_handler, hass, config)
hass.data[DATA_HOMEMATIC] = homematic = HMConnection(
local=config[DOMAIN].get(CONF_LOCAL_IP),
localport=config[DOMAIN].get(CONF_LOCAL_PORT, DEFAULT_LOCAL_PORT),
remotes=remotes,
systemcallback=bound_system_callback,
interface_id='homeassistant'
)
homematic.start()
hass.bus.listen_once(
EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
entity_hubs = []
for hub_name in conf[CONF_HOSTS].keys():
entity_hubs.append(HMHub(hass, homematic, hub_name))
def _hm_service_virtualkey(service):
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found for service virtualkey!", address)
return
if param not in hmdevice.ACTIONNODE:
_LOGGER.error("%s not datapoint in hm device %s", param, address)
return
# Channel doesn't exist for device
if channel not in hmdevice.ACTIONNODE[param]:
_LOGGER.error("%i is not a channel in hm device %s",
channel, address)
return
hmdevice.actionNodeData(param, True, channel)
hass.services.register(
DOMAIN, SERVICE_VIRTUALKEY, _hm_service_virtualkey,
schema=SCHEMA_SERVICE_VIRTUALKEY)
def _service_handle_value(service):
entity_ids = service.data.get(ATTR_ENTITY_ID)
name = service.data[ATTR_NAME]
value = service.data[ATTR_VALUE]
if entity_ids:
entities = [entity for entity in entity_hubs if
entity.entity_id in entity_ids]
else:
entities = entity_hubs
if not entities:
_LOGGER.error("No HomeMatic hubs available")
return
for hub in entities:
hub.hm_set_variable(name, value)
hass.services.register(
DOMAIN, SERVICE_SET_VARIABLE_VALUE, _service_handle_value,
schema=SCHEMA_SERVICE_SET_VARIABLE_VALUE)
def _service_handle_reconnect(service):
homematic.reconnect()
hass.services.register(
DOMAIN, SERVICE_RECONNECT, _service_handle_reconnect,
schema=SCHEMA_SERVICE_RECONNECT)
def _service_handle_device(service):
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
value = service.data.get(ATTR_VALUE)
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found!", address)
return
hmdevice.setValue(param, value, channel)
hass.services.register(
DOMAIN, SERVICE_SET_DEVICE_VALUE, _service_handle_device,
schema=SCHEMA_SERVICE_SET_DEVICE_VALUE)
def _service_handle_install_mode(service):
interface = service.data.get(ATTR_INTERFACE)
mode = service.data.get(ATTR_MODE)
time = service.data.get(ATTR_TIME)
address = service.data.get(ATTR_ADDRESS)
homematic.setInstallMode(interface, t=time, mode=mode, address=address)
hass.services.register(
DOMAIN, SERVICE_SET_INSTALL_MODE, _service_handle_install_mode,
schema=SCHEMA_SERVICE_SET_INSTALL_MODE)
return True
def _system_callback_handler(hass, config, src, *args):
if src == 'newDevices':
(interface_id, dev_descriptions) = args
interface = interface_id.split('-')[-1]
if not hass.data[DATA_CONF][interface]['connect']:
return
addresses = []
for dev in dev_descriptions:
address = dev['ADDRESS'].split(':')[0]
if address not in hass.data[DATA_STORE]:
hass.data[DATA_STORE].add(address)
addresses.append(address)
bound_event_callback = partial(_hm_event_handler, hass, interface)
for dev in addresses:
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(dev)
if hmdevice.EVENTNODE:
hmdevice.setEventCallback(
callback=bound_event_callback, bequeath=True)
if addresses:
for component_name, discovery_type in (
('switch', DISCOVER_SWITCHES),
('light', DISCOVER_LIGHTS),
('cover', DISCOVER_COVER),
('binary_sensor', DISCOVER_BINARY_SENSORS),
('sensor', DISCOVER_SENSORS),
('climate', DISCOVER_CLIMATE),
('lock', DISCOVER_LOCKS)):
found_devices = _get_devices(
hass, discovery_type, addresses, interface)
if found_devices:
discovery.load_platform(hass, component_name, DOMAIN, {
ATTR_DISCOVER_DEVICES: found_devices
}, config)
elif src == 'error':
_LOGGER.error("Error: %s", args)
(interface_id, errorcode, message) = args
hass.bus.fire(EVENT_ERROR, {
ATTR_ERRORCODE: errorcode,
ATTR_MESSAGE: message
})
def _get_devices(hass, discovery_type, keys, interface):
device_arr = []
for key in keys:
device = hass.data[DATA_HOMEMATIC].devices[interface][key]
class_name = device.__class__.__name__
metadata = {}
if class_name not in HM_DEVICE_TYPES[discovery_type]:
continue
if discovery_type == DISCOVER_SENSORS:
metadata.update(device.SENSORNODE)
elif discovery_type == DISCOVER_BINARY_SENSORS:
metadata.update(device.BINARYNODE)
else:
metadata.update({None: device.ELEMENT})
for param, channels in metadata.items():
if param in HM_IGNORE_DISCOVERY_NODE and class_name not in \
HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS.get(param, []):
continue
_LOGGER.debug("%s: Handling %s: %s: %s",
discovery_type, key, param, channels)
for channel in channels:
name = _create_ha_name(
name=device.NAME, channel=channel, param=param,
count=len(channels)
)
device_dict = {
CONF_PLATFORM: "homematic",
ATTR_ADDRESS: key,
ATTR_INTERFACE: interface,
ATTR_NAME: name,
ATTR_CHANNEL: channel
}
if param is not None:
device_dict[ATTR_PARAM] = param
try:
DEVICE_SCHEMA(device_dict)
device_arr.append(device_dict)
except vol.MultipleInvalid as err:
_LOGGER.error("Invalid device config: %s",
str(err))
return device_arr
def _create_ha_name(name, channel, param, count):
if count == 1 and param is None:
return name
if count > 1 and param is None:
return "{} {}".format(name, channel)
if count == 1 and param is not None:
return "{} {}".format(name, param)
if count > 1 and param is not None:
return "{} {} {}".format(name, channel, param)
def _hm_event_handler(hass, interface, device, caller, attribute, value):
try:
channel = int(device.split(":")[1])
address = device.split(":")[0]
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(address)
except (TypeError, ValueError):
_LOGGER.error("Event handling channel convert error!")
return
if attribute not in hmdevice.EVENTNODE:
return
_LOGGER.debug("Event %s for %s channel %i", attribute,
hmdevice.NAME, channel)
if attribute in HM_PRESS_EVENTS:
hass.bus.fire(EVENT_KEYPRESS, {
ATTR_NAME: hmdevice.NAME,
ATTR_PARAM: attribute,
ATTR_CHANNEL: channel
})
return
if attribute in HM_IMPULSE_EVENTS:
hass.bus.fire(EVENT_IMPULSE, {
ATTR_NAME: hmdevice.NAME,
ATTR_CHANNEL: channel
})
return
_LOGGER.warning("Event is unknown and not forwarded")
def _device_from_servicecall(hass, service):
address = service.data.get(ATTR_ADDRESS)
interface = service.data.get(ATTR_INTERFACE)
if address == 'BIDCOS-RF':
address = 'BidCoS-RF'
if interface:
return hass.data[DATA_HOMEMATIC].devices[interface].get(address)
for devices in hass.data[DATA_HOMEMATIC].devices.values():
if address in devices:
return devices[address]
class HMHub(Entity):
def __init__(self, hass, homematic, name):
self.hass = hass
self.entity_id = "{}.{}".format(DOMAIN, name.lower())
self._homematic = homematic
self._variables = {}
self._name = name
self._state = None
self.hass.helpers.event.track_time_interval(
self._update_hub, SCAN_INTERVAL_HUB)
self.hass.add_job(self._update_hub, None)
self.hass.helpers.event.track_time_interval(
self._update_variables, SCAN_INTERVAL_VARIABLES)
self.hass.add_job(self._update_variables, None)
@property
def name(self):
return self._name
@property
def should_poll(self):
return False
@property
def state(self):
return self._state
@property
def state_attributes(self):
attr = self._variables.copy()
return attr
@property
def icon(self):
return "mdi:gradient"
def _update_hub(self, now):
service_message = self._homematic.getServiceMessages(self._name)
state = None if service_message is None else len(service_message)
if self._state != state:
self._state = state
self.schedule_update_ha_state()
def _update_variables(self, now):
variables = self._homematic.getAllSystemVariables(self._name)
if variables is None:
return
state_change = False
for key, value in variables.items():
if key in self._variables and value == self._variables[key]:
continue
state_change = True
self._variables.update({key: value})
if state_change:
self.schedule_update_ha_state()
def hm_set_variable(self, name, value):
if name not in self._variables:
_LOGGER.error("Variable %s not found on %s", name, self.name)
return
old_value = self._variables.get(name)
if isinstance(old_value, bool):
value = cv.boolean(value)
else:
value = float(value)
self._homematic.setSystemVariable(self.name, name, value)
self._variables.update({name: value})
self.schedule_update_ha_state()
class HMDevice(Entity):
def __init__(self, config):
self._name = config.get(ATTR_NAME)
self._address = config.get(ATTR_ADDRESS)
self._interface = config.get(ATTR_INTERFACE)
self._channel = config.get(ATTR_CHANNEL)
self._state = config.get(ATTR_PARAM)
self._data = {}
self._homematic = None
self._hmdevice = None
self._connected = False
self._available = False
if self._state:
self._state = self._state.upper()
@asyncio.coroutine
def async_added_to_hass(self):
yield from self.hass.async_add_job(self.link_homematic)
@property
def should_poll(self):
return False
@property
def name(self):
return self._name
@property
def available(self):
return self._available
@property
def device_state_attributes(self):
attr = {}
for node, data in HM_ATTRIBUTE_SUPPORT.items():
if node in self._data:
value = data[1].get(self._data[node], self._data[node])
attr[data[0]] = value
attr['id'] = self._hmdevice.ADDRESS
attr['interface'] = self._interface
return attr
def link_homematic(self):
if self._connected:
return True
self._homematic = self.hass.data[DATA_HOMEMATIC]
self._hmdevice = \
self._homematic.devices[self._interface][self._address]
self._connected = True
try:
self._init_data()
self._load_data_from_hm()
self._subscribe_homematic_events()
self._available = not self._hmdevice.UNREACH
except Exception as err:
self._connected = False
_LOGGER.error("Exception while linking %s: %s",
self._address, str(err))
def _hm_event_callback(self, device, caller, attribute, value):
_LOGGER.debug("%s received event '%s' value: %s", self._name,
attribute, value)
has_changed = False
if attribute in self._data:
if self._data[attribute] != value:
self._data[attribute] = value
has_changed = True
if attribute == 'UNREACH':
self._available = bool(value)
has_changed = True
elif not self.available:
self._available = False
has_changed = True
if has_changed:
self.schedule_update_ha_state()
def _subscribe_homematic_events(self):
channels_to_sub = set()
channels_to_sub.add(0)
for metadata in (self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE,
self._hmdevice.ATTRIBUTENODE,
self._hmdevice.WRITENODE, self._hmdevice.EVENTNODE,
self._hmdevice.ACTIONNODE):
for node, channels in metadata.items():
if node in self._data:
if len(channels) == 1:
channel = channels[0]
else:
channel = self._channel
try:
channels_to_sub.add(int(channel))
except (ValueError, TypeError):
_LOGGER.error("Invalid channel in metadata from %s",
self._name)
for channel in channels_to_sub:
_LOGGER.debug(
"Subscribe channel %d from %s", channel, self._name)
self._hmdevice.setEventCallback(
callback=self._hm_event_callback, bequeath=False,
channel=channel)
def _load_data_from_hm(self):
if not self._connected:
return False
for metadata, funct in (
(self._hmdevice.ATTRIBUTENODE,
self._hmdevice.getAttributeData),
(self._hmdevice.WRITENODE, self._hmdevice.getWriteData),
(self._hmdevice.SENSORNODE, self._hmdevice.getSensorData),
(self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData)):
for node in metadata:
if metadata[node] and node in self._data:
self._data[node] = funct(name=node, channel=self._channel)
return True
def _hm_set_state(self, value):
if self._state in self._data:
self._data[self._state] = value
def _hm_get_state(self):
if self._state in self._data:
return self._data[self._state]
return None
def _init_data(self):
for data_note in self._hmdevice.ATTRIBUTENODE:
self._data.update({data_note: STATE_UNKNOWN})
self._init_data_struct()
def _init_data_struct(self):
raise NotImplementedError
| true | true |
f737e3f2a7e6327a9c99529ed6fb04fdef544dbd | 943 | py | Python | molecool/molecule.py | mmim2904/molecool | 0c5c13cfec112e0d4af85044e6d8bec60f7d5b6a | [
"BSD-3-Clause"
] | null | null | null | molecool/molecule.py | mmim2904/molecool | 0c5c13cfec112e0d4af85044e6d8bec60f7d5b6a | [
"BSD-3-Clause"
] | null | null | null | molecool/molecule.py | mmim2904/molecool | 0c5c13cfec112e0d4af85044e6d8bec60f7d5b6a | [
"BSD-3-Clause"
] | null | null | null | """
function for analyzing molecules
"""
from .measure import calculate_distance
from .atom_data import atomic_weights
def build_bond_list(coordinates, max_bond=1.5, min_bond=0):
# Find the bonds in a molecule (set of coordinates) based on distance criteria.
bonds = {}
num_atoms = len(coordinates)
for atom1 in range(num_atoms):
for atom2 in range(atom1, num_atoms):
distance = calculate_distance(coordinates[atom1], coordinates[atom2])
if distance > min_bond and distance < max_bond:
bonds[(atom1, atom2)] = distance
return bonds
def calculate_molecular_mass(symbols):
"""Calculate the mass of a molecule.
Parameters
----------
symbols : list
A list of elements.
Returns
-------
mass : float
The mass of the molecule
"""
mass = 0
for atom in symbols:
mass+= atomic_weights[atom]
return mass
| 23 | 83 | 0.639449 |
from .measure import calculate_distance
from .atom_data import atomic_weights
def build_bond_list(coordinates, max_bond=1.5, min_bond=0):
bonds = {}
num_atoms = len(coordinates)
for atom1 in range(num_atoms):
for atom2 in range(atom1, num_atoms):
distance = calculate_distance(coordinates[atom1], coordinates[atom2])
if distance > min_bond and distance < max_bond:
bonds[(atom1, atom2)] = distance
return bonds
def calculate_molecular_mass(symbols):
mass = 0
for atom in symbols:
mass+= atomic_weights[atom]
return mass
| true | true |
f737e6378ae9a76bfc6516197d3b9c08336a77f9 | 8,643 | py | Python | sympy/sets/handlers/functions.py | Michal-Gagala/sympy | 3cc756c2af73b5506102abaeefd1b654e286e2c8 | [
"MIT"
] | null | null | null | sympy/sets/handlers/functions.py | Michal-Gagala/sympy | 3cc756c2af73b5506102abaeefd1b654e286e2c8 | [
"MIT"
] | null | null | null | sympy/sets/handlers/functions.py | Michal-Gagala/sympy | 3cc756c2af73b5506102abaeefd1b654e286e2c8 | [
"MIT"
] | null | null | null | from sympy.core.singleton import S
from sympy.sets.sets import Set
from sympy.calculus.singularities import singularities
from sympy.core import Expr, Add
from sympy.core.function import Lambda, FunctionClass, diff, expand_mul
from sympy.core.numbers import Float, oo
from sympy.core.symbol import Dummy, symbols, Wild
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.miscellaneous import Min, Max
from sympy.logic.boolalg import true
from sympy.multipledispatch import Dispatcher
from sympy.sets import (imageset, Interval, FiniteSet, Union, ImageSet,
Intersection, Range, Complement)
from sympy.sets.sets import EmptySet, is_function_invertible_in_set
from sympy.sets.fancysets import Integers, Naturals, Reals
from sympy.functions.elementary.exponential import match_real_imag
_x, _y = symbols("x y")
FunctionUnion = (FunctionClass, Lambda)
_set_function = Dispatcher('_set_function')
@_set_function.register(FunctionClass, Set)
def _(f, x):
return None
@_set_function.register(FunctionUnion, FiniteSet)
def _(f, x):
return FiniteSet(*map(f, x))
@_set_function.register(Lambda, Interval)
def _(f, x):
from sympy.solvers.solveset import solveset
from sympy.series import limit
# TODO: handle functions with infinitely many solutions (eg, sin, tan)
# TODO: handle multivariate functions
expr = f.expr
if len(expr.free_symbols) > 1 or len(f.variables) != 1:
return
var = f.variables[0]
if not var.is_real:
if expr.subs(var, Dummy(real=True)).is_real is False:
return
if expr.is_Piecewise:
result = S.EmptySet
domain_set = x
for (p_expr, p_cond) in expr.args:
if p_cond is true:
intrvl = domain_set
else:
intrvl = p_cond.as_set()
intrvl = Intersection(domain_set, intrvl)
if p_expr.is_Number:
image = FiniteSet(p_expr)
else:
image = imageset(Lambda(var, p_expr), intrvl)
result = Union(result, image)
# remove the part which has been `imaged`
domain_set = Complement(domain_set, intrvl)
if domain_set is S.EmptySet:
break
return result
if not x.start.is_comparable or not x.end.is_comparable:
return
try:
from sympy.polys.polyutils import _nsort
sing = list(singularities(expr, var, x))
if len(sing) > 1:
sing = _nsort(sing)
except NotImplementedError:
return
if x.left_open:
_start = limit(expr, var, x.start, dir="+")
elif x.start not in sing:
_start = f(x.start)
if x.right_open:
_end = limit(expr, var, x.end, dir="-")
elif x.end not in sing:
_end = f(x.end)
if len(sing) == 0:
soln_expr = solveset(diff(expr, var), var)
if not (isinstance(soln_expr, FiniteSet)
or soln_expr is S.EmptySet):
return
solns = list(soln_expr)
extr = [_start, _end] + [f(i) for i in solns
if i.is_real and i in x]
start, end = Min(*extr), Max(*extr)
left_open, right_open = False, False
if _start <= _end:
# the minimum or maximum value can occur simultaneously
# on both the edge of the interval and in some interior
# point
if start == _start and start not in solns:
left_open = x.left_open
if end == _end and end not in solns:
right_open = x.right_open
else:
if start == _end and start not in solns:
left_open = x.right_open
if end == _start and end not in solns:
right_open = x.left_open
return Interval(start, end, left_open, right_open)
else:
return imageset(f, Interval(x.start, sing[0],
x.left_open, True)) + \
Union(*[imageset(f, Interval(sing[i], sing[i + 1], True, True))
for i in range(0, len(sing) - 1)]) + \
imageset(f, Interval(sing[-1], x.end, True, x.right_open))
@_set_function.register(FunctionClass, Interval)
def _(f, x):
if f == exp:
return Interval(exp(x.start), exp(x.end), x.left_open, x.right_open)
elif f == log:
return Interval(log(x.start), log(x.end), x.left_open, x.right_open)
return ImageSet(Lambda(_x, f(_x)), x)
@_set_function.register(FunctionUnion, Union)
def _(f, x):
return Union(*(imageset(f, arg) for arg in x.args))
@_set_function.register(FunctionUnion, Intersection)
def _(f, x):
# If the function is invertible, intersect the maps of the sets.
if is_function_invertible_in_set(f, x):
return Intersection(*(imageset(f, arg) for arg in x.args))
else:
return ImageSet(Lambda(_x, f(_x)), x)
@_set_function.register(FunctionUnion, EmptySet)
def _(f, x):
return x
@_set_function.register(FunctionUnion, Set)
def _(f, x):
return ImageSet(Lambda(_x, f(_x)), x)
@_set_function.register(FunctionUnion, Range)
def _(f, self):
if not self:
return S.EmptySet
if not isinstance(f.expr, Expr):
return
if self.size == 1:
return FiniteSet(f(self[0]))
if f is S.IdentityFunction:
return self
x = f.variables[0]
expr = f.expr
# handle f that is linear in f's variable
if x not in expr.free_symbols or x in expr.diff(x).free_symbols:
return
if self.start.is_finite:
F = f(self.step*x + self.start) # for i in range(len(self))
else:
F = f(-self.step*x + self[-1])
F = expand_mul(F)
if F != expr:
return imageset(x, F, Range(self.size))
@_set_function.register(FunctionUnion, Integers)
def _(f, self):
expr = f.expr
if not isinstance(expr, Expr):
return
n = f.variables[0]
if expr == abs(n):
return S.Naturals0
# f(x) + c and f(-x) + c cover the same integers
# so choose the form that has the fewest negatives
c = f(0)
fx = f(n) - c
f_x = f(-n) - c
neg_count = lambda e: sum(_.could_extract_minus_sign()
for _ in Add.make_args(e))
if neg_count(f_x) < neg_count(fx):
expr = f_x + c
a = Wild('a', exclude=[n])
b = Wild('b', exclude=[n])
match = expr.match(a*n + b)
if match and match[a] and (
not match[a].atoms(Float) and
not match[b].atoms(Float)):
# canonical shift
a, b = match[a], match[b]
if a in [1, -1]:
# drop integer addends in b
nonint = []
for bi in Add.make_args(b):
if not bi.is_integer:
nonint.append(bi)
b = Add(*nonint)
if b.is_number and a.is_real:
# avoid Mod for complex numbers, #11391
br, bi = match_real_imag(b)
if br and br.is_comparable and a.is_comparable:
br %= a
b = br + S.ImaginaryUnit*bi
elif b.is_number and a.is_imaginary:
br, bi = match_real_imag(b)
ai = a/S.ImaginaryUnit
if bi and bi.is_comparable and ai.is_comparable:
bi %= ai
b = br + S.ImaginaryUnit*bi
expr = a*n + b
if expr != f.expr:
return ImageSet(Lambda(n, expr), S.Integers)
@_set_function.register(FunctionUnion, Naturals)
def _(f, self):
expr = f.expr
if not isinstance(expr, Expr):
return
x = f.variables[0]
if not expr.free_symbols - {x}:
if expr == abs(x):
if self is S.Naturals:
return self
return S.Naturals0
step = expr.coeff(x)
c = expr.subs(x, 0)
if c.is_Integer and step.is_Integer and expr == step*x + c:
if self is S.Naturals:
c += step
if step > 0:
if step == 1:
if c == 0:
return S.Naturals0
elif c == 1:
return S.Naturals
return Range(c, oo, step)
return Range(c, -oo, step)
@_set_function.register(FunctionUnion, Reals)
def _(f, self):
expr = f.expr
if not isinstance(expr, Expr):
return
return _set_function(f, Interval(-oo, oo))
| 32.863118 | 77 | 0.569247 | from sympy.core.singleton import S
from sympy.sets.sets import Set
from sympy.calculus.singularities import singularities
from sympy.core import Expr, Add
from sympy.core.function import Lambda, FunctionClass, diff, expand_mul
from sympy.core.numbers import Float, oo
from sympy.core.symbol import Dummy, symbols, Wild
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.miscellaneous import Min, Max
from sympy.logic.boolalg import true
from sympy.multipledispatch import Dispatcher
from sympy.sets import (imageset, Interval, FiniteSet, Union, ImageSet,
Intersection, Range, Complement)
from sympy.sets.sets import EmptySet, is_function_invertible_in_set
from sympy.sets.fancysets import Integers, Naturals, Reals
from sympy.functions.elementary.exponential import match_real_imag
_x, _y = symbols("x y")
FunctionUnion = (FunctionClass, Lambda)
_set_function = Dispatcher('_set_function')
@_set_function.register(FunctionClass, Set)
def _(f, x):
return None
@_set_function.register(FunctionUnion, FiniteSet)
def _(f, x):
return FiniteSet(*map(f, x))
@_set_function.register(Lambda, Interval)
def _(f, x):
from sympy.solvers.solveset import solveset
from sympy.series import limit
expr = f.expr
if len(expr.free_symbols) > 1 or len(f.variables) != 1:
return
var = f.variables[0]
if not var.is_real:
if expr.subs(var, Dummy(real=True)).is_real is False:
return
if expr.is_Piecewise:
result = S.EmptySet
domain_set = x
for (p_expr, p_cond) in expr.args:
if p_cond is true:
intrvl = domain_set
else:
intrvl = p_cond.as_set()
intrvl = Intersection(domain_set, intrvl)
if p_expr.is_Number:
image = FiniteSet(p_expr)
else:
image = imageset(Lambda(var, p_expr), intrvl)
result = Union(result, image)
domain_set = Complement(domain_set, intrvl)
if domain_set is S.EmptySet:
break
return result
if not x.start.is_comparable or not x.end.is_comparable:
return
try:
from sympy.polys.polyutils import _nsort
sing = list(singularities(expr, var, x))
if len(sing) > 1:
sing = _nsort(sing)
except NotImplementedError:
return
if x.left_open:
_start = limit(expr, var, x.start, dir="+")
elif x.start not in sing:
_start = f(x.start)
if x.right_open:
_end = limit(expr, var, x.end, dir="-")
elif x.end not in sing:
_end = f(x.end)
if len(sing) == 0:
soln_expr = solveset(diff(expr, var), var)
if not (isinstance(soln_expr, FiniteSet)
or soln_expr is S.EmptySet):
return
solns = list(soln_expr)
extr = [_start, _end] + [f(i) for i in solns
if i.is_real and i in x]
start, end = Min(*extr), Max(*extr)
left_open, right_open = False, False
if _start <= _end:
if start == _start and start not in solns:
left_open = x.left_open
if end == _end and end not in solns:
right_open = x.right_open
else:
if start == _end and start not in solns:
left_open = x.right_open
if end == _start and end not in solns:
right_open = x.left_open
return Interval(start, end, left_open, right_open)
else:
return imageset(f, Interval(x.start, sing[0],
x.left_open, True)) + \
Union(*[imageset(f, Interval(sing[i], sing[i + 1], True, True))
for i in range(0, len(sing) - 1)]) + \
imageset(f, Interval(sing[-1], x.end, True, x.right_open))
@_set_function.register(FunctionClass, Interval)
def _(f, x):
if f == exp:
return Interval(exp(x.start), exp(x.end), x.left_open, x.right_open)
elif f == log:
return Interval(log(x.start), log(x.end), x.left_open, x.right_open)
return ImageSet(Lambda(_x, f(_x)), x)
@_set_function.register(FunctionUnion, Union)
def _(f, x):
return Union(*(imageset(f, arg) for arg in x.args))
@_set_function.register(FunctionUnion, Intersection)
def _(f, x):
if is_function_invertible_in_set(f, x):
return Intersection(*(imageset(f, arg) for arg in x.args))
else:
return ImageSet(Lambda(_x, f(_x)), x)
@_set_function.register(FunctionUnion, EmptySet)
def _(f, x):
return x
@_set_function.register(FunctionUnion, Set)
def _(f, x):
return ImageSet(Lambda(_x, f(_x)), x)
@_set_function.register(FunctionUnion, Range)
def _(f, self):
if not self:
return S.EmptySet
if not isinstance(f.expr, Expr):
return
if self.size == 1:
return FiniteSet(f(self[0]))
if f is S.IdentityFunction:
return self
x = f.variables[0]
expr = f.expr
if x not in expr.free_symbols or x in expr.diff(x).free_symbols:
return
if self.start.is_finite:
F = f(self.step*x + self.start) # for i in range(len(self))
else:
F = f(-self.step*x + self[-1])
F = expand_mul(F)
if F != expr:
return imageset(x, F, Range(self.size))
@_set_function.register(FunctionUnion, Integers)
def _(f, self):
expr = f.expr
if not isinstance(expr, Expr):
return
n = f.variables[0]
if expr == abs(n):
return S.Naturals0
# f(x) + c and f(-x) + c cover the same integers
# so choose the form that has the fewest negatives
c = f(0)
fx = f(n) - c
f_x = f(-n) - c
neg_count = lambda e: sum(_.could_extract_minus_sign()
for _ in Add.make_args(e))
if neg_count(f_x) < neg_count(fx):
expr = f_x + c
a = Wild('a', exclude=[n])
b = Wild('b', exclude=[n])
match = expr.match(a*n + b)
if match and match[a] and (
not match[a].atoms(Float) and
not match[b].atoms(Float)):
# canonical shift
a, b = match[a], match[b]
if a in [1, -1]:
# drop integer addends in b
nonint = []
for bi in Add.make_args(b):
if not bi.is_integer:
nonint.append(bi)
b = Add(*nonint)
if b.is_number and a.is_real:
# avoid Mod for complex numbers, #11391
br, bi = match_real_imag(b)
if br and br.is_comparable and a.is_comparable:
br %= a
b = br + S.ImaginaryUnit*bi
elif b.is_number and a.is_imaginary:
br, bi = match_real_imag(b)
ai = a/S.ImaginaryUnit
if bi and bi.is_comparable and ai.is_comparable:
bi %= ai
b = br + S.ImaginaryUnit*bi
expr = a*n + b
if expr != f.expr:
return ImageSet(Lambda(n, expr), S.Integers)
@_set_function.register(FunctionUnion, Naturals)
def _(f, self):
expr = f.expr
if not isinstance(expr, Expr):
return
x = f.variables[0]
if not expr.free_symbols - {x}:
if expr == abs(x):
if self is S.Naturals:
return self
return S.Naturals0
step = expr.coeff(x)
c = expr.subs(x, 0)
if c.is_Integer and step.is_Integer and expr == step*x + c:
if self is S.Naturals:
c += step
if step > 0:
if step == 1:
if c == 0:
return S.Naturals0
elif c == 1:
return S.Naturals
return Range(c, oo, step)
return Range(c, -oo, step)
@_set_function.register(FunctionUnion, Reals)
def _(f, self):
expr = f.expr
if not isinstance(expr, Expr):
return
return _set_function(f, Interval(-oo, oo))
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.