hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f73889404b1f3c76175bd1549c1754c81875f813 | 2,190 | py | Python | tests/conftest.py | edge-minato/pypj | c928cf9ba29017ed6c0756b24f91d75ae16473e4 | [
"MIT"
] | 13 | 2021-09-29T03:16:42.000Z | 2022-02-28T19:23:28.000Z | tests/conftest.py | edge-minato/pypj | c928cf9ba29017ed6c0756b24f91d75ae16473e4 | [
"MIT"
] | 41 | 2021-09-03T09:49:49.000Z | 2022-03-20T20:46:02.000Z | tests/conftest.py | edge-minato/pypj | c928cf9ba29017ed6c0756b24f91d75ae16473e4 | [
"MIT"
] | null | null | null | import json
import re
from pathlib import Path
from shutil import rmtree
from subprocess import PIPE, run
from typing import Generator
import pytest
import toml # type: ignore
import yaml # type: ignore
from pypj.exception import Emsg, PypjError
def prepare_tmp_dir(tmp: Path) -> None:
if tmp.exists():
rmtree(tmp)
tmp.mkdir()
def remove_tmp_dir(tmp: Path) -> None:
rmtree(tmp)
@pytest.fixture(scope="session", autouse=True)
def scope_session() -> Generator:
print("setup before session")
tmp = Path("tmp").resolve()
prepare_tmp_dir(tmp)
yield
remove_tmp_dir(tmp)
print("teardown after session")
def prepare_dir(dir_name: str) -> Path:
package_dir = Path("tmp").joinpath(dir_name)
package_dir.mkdir() # mkdir ./tmp/dirname
return package_dir
def validate_jsonc(file_path: Path) -> bool:
try:
with file_path.open(mode="r") as f:
text = f.read()
# delete comments
re_text = re.sub(r"/\*[\s\S]*?\*/|//.*", "", text)
json.loads(re_text)
return True
except Exception:
return False
def validate_yaml(file_path: Path) -> bool:
try:
with file_path.open(mode="r") as f:
yaml.safe_load(f)
return True
except Exception:
return False
def validate_toml(file_path: Path) -> bool:
try:
with file_path.open(mode="r") as f:
toml.load(f)
return True
except Exception:
return False
class DummyReturn:
def __init__(self, returncode: int) -> None:
self.returncode: int = returncode
def dummy_command(cmd: str) -> DummyReturn:
if "poetry new" in cmd:
r = run(cmd, shell=True, stdout=PIPE, stderr=PIPE, text=True)
return DummyReturn(r.returncode)
else:
return DummyReturn(0)
def dummy_input(input_list: list) -> Generator:
for r in input_list:
print(r)
yield r
def pypj_error() -> None:
raise PypjError(Emsg.OS_NOT_SUPPORTED)
WORDS = ["pypj", "edge", "minato"]
def does_contain_specific_words(content: str) -> bool:
for w in WORDS:
if w in content:
return True
return False
| 21.470588 | 69 | 0.632877 | import json
import re
from pathlib import Path
from shutil import rmtree
from subprocess import PIPE, run
from typing import Generator
import pytest
import toml
import yaml
from pypj.exception import Emsg, PypjError
def prepare_tmp_dir(tmp: Path) -> None:
if tmp.exists():
rmtree(tmp)
tmp.mkdir()
def remove_tmp_dir(tmp: Path) -> None:
rmtree(tmp)
@pytest.fixture(scope="session", autouse=True)
def scope_session() -> Generator:
print("setup before session")
tmp = Path("tmp").resolve()
prepare_tmp_dir(tmp)
yield
remove_tmp_dir(tmp)
print("teardown after session")
def prepare_dir(dir_name: str) -> Path:
package_dir = Path("tmp").joinpath(dir_name)
package_dir.mkdir()
return package_dir
def validate_jsonc(file_path: Path) -> bool:
try:
with file_path.open(mode="r") as f:
text = f.read()
re_text = re.sub(r"/\*[\s\S]*?\*/|//.*", "", text)
json.loads(re_text)
return True
except Exception:
return False
def validate_yaml(file_path: Path) -> bool:
try:
with file_path.open(mode="r") as f:
yaml.safe_load(f)
return True
except Exception:
return False
def validate_toml(file_path: Path) -> bool:
try:
with file_path.open(mode="r") as f:
toml.load(f)
return True
except Exception:
return False
class DummyReturn:
def __init__(self, returncode: int) -> None:
self.returncode: int = returncode
def dummy_command(cmd: str) -> DummyReturn:
if "poetry new" in cmd:
r = run(cmd, shell=True, stdout=PIPE, stderr=PIPE, text=True)
return DummyReturn(r.returncode)
else:
return DummyReturn(0)
def dummy_input(input_list: list) -> Generator:
for r in input_list:
print(r)
yield r
def pypj_error() -> None:
raise PypjError(Emsg.OS_NOT_SUPPORTED)
WORDS = ["pypj", "edge", "minato"]
def does_contain_specific_words(content: str) -> bool:
for w in WORDS:
if w in content:
return True
return False
| true | true |
f738896be1d84123833439159d7e056cc67a9738 | 1,547 | py | Python | experimentator/wandb_experiment.py | gabriel-vanzandycke/experimentator | e733e03930fc45ad62dfbf3f85cb9babfd078585 | [
"BSD-3-Clause"
] | 1 | 2020-09-25T13:10:59.000Z | 2020-09-25T13:10:59.000Z | experimentator/wandb_experiment.py | gabriel-vanzandycke/experimentator | e733e03930fc45ad62dfbf3f85cb9babfd078585 | [
"BSD-3-Clause"
] | null | null | null | experimentator/wandb_experiment.py | gabriel-vanzandycke/experimentator | e733e03930fc45ad62dfbf3f85cb9babfd078585 | [
"BSD-3-Clause"
] | null | null | null | import os
import pandas
from functools import cached_property
from experimentator import StateLogger
import wandb
os.environ["WANDB_SILENT"] = "true"
os.environ["WANDB_START_METHOD"] = "thread"
class LogStateWandB(StateLogger):
best_report = {}
def __init__(self, criterion_metric=None, mode="online"):
self.criterion_metric = criterion_metric
self.mode = mode
self.initialized = False
@cached_property
def wandb_run(self):
run = wandb.init(
project=self.project_name,
reinit=True,
config=self.config,
settings=wandb.Settings(show_emoji=False, _save_requirements=False),
mode=self.mode,
)
run.name = self.run_name
self.initialized = True
return run
def __del__(self):
if self.initialized:
self.wandb_run.finish()
def on_epoch_end(self, state, **_):
report = {}
for key, data in state.items():
if key not in self.excluded_keys:
if isinstance(data, pandas.DataFrame):
report[key] = wandb.Table(dataframe=data)
else:
report[key] = data
self.wandb_run.log(report) # log *once* per epoch
if self.criterion_metric and self.criterion_metric in report:
if not self.best_report or report[self.criterion_metric] > self.best_report[self.criterion_metric]:
self.best_report = report
self.wandb_run.summary.update(self.best_report)
| 33.630435 | 111 | 0.624434 | import os
import pandas
from functools import cached_property
from experimentator import StateLogger
import wandb
os.environ["WANDB_SILENT"] = "true"
os.environ["WANDB_START_METHOD"] = "thread"
class LogStateWandB(StateLogger):
best_report = {}
def __init__(self, criterion_metric=None, mode="online"):
self.criterion_metric = criterion_metric
self.mode = mode
self.initialized = False
@cached_property
def wandb_run(self):
run = wandb.init(
project=self.project_name,
reinit=True,
config=self.config,
settings=wandb.Settings(show_emoji=False, _save_requirements=False),
mode=self.mode,
)
run.name = self.run_name
self.initialized = True
return run
def __del__(self):
if self.initialized:
self.wandb_run.finish()
def on_epoch_end(self, state, **_):
report = {}
for key, data in state.items():
if key not in self.excluded_keys:
if isinstance(data, pandas.DataFrame):
report[key] = wandb.Table(dataframe=data)
else:
report[key] = data
self.wandb_run.log(report)
if self.criterion_metric and self.criterion_metric in report:
if not self.best_report or report[self.criterion_metric] > self.best_report[self.criterion_metric]:
self.best_report = report
self.wandb_run.summary.update(self.best_report)
| true | true |
f73889eefdaf5142fa18e7f573ffd713f576ac95 | 6,243 | py | Python | tests/core/util/test_keychain.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 6 | 2021-06-30T13:03:47.000Z | 2021-07-10T12:46:37.000Z | tests/core/util/test_keychain.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 8 | 2021-07-01T15:45:09.000Z | 2021-09-08T04:30:46.000Z | tests/core/util/test_keychain.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 11 | 2021-07-03T17:30:57.000Z | 2022-03-15T08:47:03.000Z | import json
import unittest
from secrets import token_bytes
from blspy import AugSchemeMPL, PrivateKey
from thyme.util.keychain import Keychain, bytes_from_mnemonic, bytes_to_mnemonic, generate_mnemonic, mnemonic_to_seed
class TesKeychain(unittest.TestCase):
def test_basic_add_delete(self):
kc: Keychain = Keychain(testing=True)
kc.delete_all_keys()
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 0
assert kc.get_first_private_key() is None
assert kc.get_first_public_key() is None
mnemonic = generate_mnemonic()
entropy = bytes_from_mnemonic(mnemonic)
assert bytes_to_mnemonic(entropy) == mnemonic
mnemonic_2 = generate_mnemonic()
kc.add_private_key(mnemonic, "")
assert kc._get_free_private_key_index() == 1
assert len(kc.get_all_private_keys()) == 1
kc.add_private_key(mnemonic_2, "")
kc.add_private_key(mnemonic_2, "") # checks to not add duplicates
assert kc._get_free_private_key_index() == 2
assert len(kc.get_all_private_keys()) == 2
assert kc._get_free_private_key_index() == 2
assert len(kc.get_all_private_keys()) == 2
assert len(kc.get_all_public_keys()) == 2
assert kc.get_all_private_keys()[0] == kc.get_first_private_key()
assert kc.get_all_public_keys()[0] == kc.get_first_public_key()
assert len(kc.get_all_private_keys()) == 2
seed_2 = mnemonic_to_seed(mnemonic, "")
seed_key_2 = AugSchemeMPL.key_gen(seed_2)
kc.delete_key_by_fingerprint(seed_key_2.get_g1().get_fingerprint())
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 1
kc.delete_all_keys()
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 0
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "my passphrase")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "third passphrase")
assert len(kc.get_all_public_keys()) == 3
assert len(kc.get_all_private_keys()) == 1
assert len(kc.get_all_private_keys(["my passphrase", ""])) == 2
assert len(kc.get_all_private_keys(["my passphrase", "", "third passphrase", "another"])) == 3
assert len(kc.get_all_private_keys(["my passhrase wrong"])) == 0
assert kc.get_first_private_key() is not None
assert kc.get_first_private_key(["bad passphrase"]) is None
assert kc.get_first_public_key() is not None
kc.delete_all_keys()
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "my passphrase")
assert kc.get_first_public_key() is not None
def test_bip39_eip2333_test_vector(self):
kc: Keychain = Keychain(testing=True)
kc.delete_all_keys()
mnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
passphrase = "TREZOR"
print("entropy to seed:", mnemonic_to_seed(mnemonic, passphrase).hex())
master_sk = kc.add_private_key(mnemonic, passphrase)
tv_master_int = 5399117110774477986698372024995405256382522670366369834617409486544348441851
tv_child_int = 11812940737387919040225825939013910852517748782307378293770044673328955938106
assert master_sk == PrivateKey.from_bytes(tv_master_int.to_bytes(32, "big"))
child_sk = AugSchemeMPL.derive_child_sk(master_sk, 0)
assert child_sk == PrivateKey.from_bytes(tv_child_int.to_bytes(32, "big"))
def test_bip39_test_vectors_trezor(self):
with open("tests/util/bip39_test_vectors.json") as f:
all_vectors = json.loads(f.read())
for vector_list in all_vectors["english"]:
entropy_bytes = bytes.fromhex(vector_list[0])
mnemonic = vector_list[1]
seed = bytes.fromhex(vector_list[2])
assert bytes_from_mnemonic(mnemonic) == entropy_bytes
assert bytes_to_mnemonic(entropy_bytes) == mnemonic
assert mnemonic_to_seed(mnemonic, "TREZOR") == seed
def test_utf8_nfkd(self):
# Test code from trezor:
# Copyright (c) 2013 Pavol Rusnak
# Copyright (c) 2017 mruddy
# https://github.com/trezor/python-mnemonic/blob/master/test_mnemonic.py
# The same sentence in various UTF-8 forms
words_nfkd = "Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a" # noqa: E501
words_nfc = "P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f" # noqa: E501
words_nfkc = "P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f" # noqa: E501
words_nfd = "Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a" # noqa: E501
passphrase_nfkd = "Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
passphrase_nfc = "Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
passphrase_nfkc = "Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
passphrase_nfd = "Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
seed_nfkd = mnemonic_to_seed(words_nfkd, passphrase_nfkd)
seed_nfc = mnemonic_to_seed(words_nfc, passphrase_nfc)
seed_nfkc = mnemonic_to_seed(words_nfkc, passphrase_nfkc)
seed_nfd = mnemonic_to_seed(words_nfd, passphrase_nfd)
assert seed_nfkd == seed_nfc
assert seed_nfkd == seed_nfkc
assert seed_nfkd == seed_nfd
| 51.595041 | 278 | 0.707833 | import json
import unittest
from secrets import token_bytes
from blspy import AugSchemeMPL, PrivateKey
from thyme.util.keychain import Keychain, bytes_from_mnemonic, bytes_to_mnemonic, generate_mnemonic, mnemonic_to_seed
class TesKeychain(unittest.TestCase):
def test_basic_add_delete(self):
kc: Keychain = Keychain(testing=True)
kc.delete_all_keys()
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 0
assert kc.get_first_private_key() is None
assert kc.get_first_public_key() is None
mnemonic = generate_mnemonic()
entropy = bytes_from_mnemonic(mnemonic)
assert bytes_to_mnemonic(entropy) == mnemonic
mnemonic_2 = generate_mnemonic()
kc.add_private_key(mnemonic, "")
assert kc._get_free_private_key_index() == 1
assert len(kc.get_all_private_keys()) == 1
kc.add_private_key(mnemonic_2, "")
kc.add_private_key(mnemonic_2, "")
assert kc._get_free_private_key_index() == 2
assert len(kc.get_all_private_keys()) == 2
assert kc._get_free_private_key_index() == 2
assert len(kc.get_all_private_keys()) == 2
assert len(kc.get_all_public_keys()) == 2
assert kc.get_all_private_keys()[0] == kc.get_first_private_key()
assert kc.get_all_public_keys()[0] == kc.get_first_public_key()
assert len(kc.get_all_private_keys()) == 2
seed_2 = mnemonic_to_seed(mnemonic, "")
seed_key_2 = AugSchemeMPL.key_gen(seed_2)
kc.delete_key_by_fingerprint(seed_key_2.get_g1().get_fingerprint())
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 1
kc.delete_all_keys()
assert kc._get_free_private_key_index() == 0
assert len(kc.get_all_private_keys()) == 0
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "my passphrase")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "")
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "third passphrase")
assert len(kc.get_all_public_keys()) == 3
assert len(kc.get_all_private_keys()) == 1
assert len(kc.get_all_private_keys(["my passphrase", ""])) == 2
assert len(kc.get_all_private_keys(["my passphrase", "", "third passphrase", "another"])) == 3
assert len(kc.get_all_private_keys(["my passhrase wrong"])) == 0
assert kc.get_first_private_key() is not None
assert kc.get_first_private_key(["bad passphrase"]) is None
assert kc.get_first_public_key() is not None
kc.delete_all_keys()
kc.add_private_key(bytes_to_mnemonic(token_bytes(32)), "my passphrase")
assert kc.get_first_public_key() is not None
def test_bip39_eip2333_test_vector(self):
kc: Keychain = Keychain(testing=True)
kc.delete_all_keys()
mnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
passphrase = "TREZOR"
print("entropy to seed:", mnemonic_to_seed(mnemonic, passphrase).hex())
master_sk = kc.add_private_key(mnemonic, passphrase)
tv_master_int = 5399117110774477986698372024995405256382522670366369834617409486544348441851
tv_child_int = 11812940737387919040225825939013910852517748782307378293770044673328955938106
assert master_sk == PrivateKey.from_bytes(tv_master_int.to_bytes(32, "big"))
child_sk = AugSchemeMPL.derive_child_sk(master_sk, 0)
assert child_sk == PrivateKey.from_bytes(tv_child_int.to_bytes(32, "big"))
def test_bip39_test_vectors_trezor(self):
with open("tests/util/bip39_test_vectors.json") as f:
all_vectors = json.loads(f.read())
for vector_list in all_vectors["english"]:
entropy_bytes = bytes.fromhex(vector_list[0])
mnemonic = vector_list[1]
seed = bytes.fromhex(vector_list[2])
assert bytes_from_mnemonic(mnemonic) == entropy_bytes
assert bytes_to_mnemonic(entropy_bytes) == mnemonic
assert mnemonic_to_seed(mnemonic, "TREZOR") == seed
def test_utf8_nfkd(self):
words_nfkd = "Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
words_nfc = "P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
words_nfkc = "P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
words_nfd = "Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
passphrase_nfkd = "Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
passphrase_nfc = "Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
passphrase_nfkc = "Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
passphrase_nfd = "Neuve\u030cr\u030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
seed_nfkd = mnemonic_to_seed(words_nfkd, passphrase_nfkd)
seed_nfc = mnemonic_to_seed(words_nfc, passphrase_nfc)
seed_nfkc = mnemonic_to_seed(words_nfkc, passphrase_nfkc)
seed_nfd = mnemonic_to_seed(words_nfd, passphrase_nfd)
assert seed_nfkd == seed_nfc
assert seed_nfkd == seed_nfkc
assert seed_nfkd == seed_nfd
| true | true |
f73889fd6a9ba5321c99288f50c62740c1329694 | 20,345 | py | Python | qiskit/compiler/assemble.py | wagnersj/qiskit-terra | 8636502a832ee5c63a26f66c9669c88a1149e432 | [
"Apache-2.0"
] | null | null | null | qiskit/compiler/assemble.py | wagnersj/qiskit-terra | 8636502a832ee5c63a26f66c9669c88a1149e432 | [
"Apache-2.0"
] | null | null | null | qiskit/compiler/assemble.py | wagnersj/qiskit-terra | 8636502a832ee5c63a26f66c9669c88a1149e432 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Assemble function for converting a list of circuits into a qobj"""
import uuid
import copy
import logging
import warnings
from time import time
from typing import Union, List, Dict, Optional
from qiskit.circuit import QuantumCircuit, Qubit, Parameter
from qiskit.exceptions import QiskitError
from qiskit.pulse import ScheduleComponent, LoConfig
from qiskit.assembler.run_config import RunConfig
from qiskit.assembler import assemble_circuits, assemble_schedules
from qiskit.qobj import QobjHeader, Qobj
from qiskit.qobj.utils import MeasLevel, MeasReturnType
from qiskit.validation.jsonschema import SchemaValidationError
from qiskit.providers import BaseBackend
from qiskit.pulse.channels import PulseChannel
from qiskit.pulse import Schedule
LOG = logging.getLogger(__name__)
def _log_assembly_time(start_time, end_time):
log_msg = "Total Assembly Time - %.5f (ms)" % ((end_time - start_time) * 1000)
LOG.info(log_msg)
# TODO: parallelize over the experiments (serialize each separately, then add global header/config)
def assemble(experiments: Union[QuantumCircuit, List[QuantumCircuit], Schedule, List[Schedule]],
backend: Optional[BaseBackend] = None,
qobj_id: Optional[str] = None,
qobj_header: Optional[Union[QobjHeader, Dict]] = None,
shots: Optional[int] = None, memory: Optional[bool] = False,
max_credits: Optional[int] = None,
seed_simulator: Optional[int] = None,
qubit_lo_freq: Optional[List[int]] = None,
meas_lo_freq: Optional[List[int]] = None,
qubit_lo_range: Optional[List[int]] = None,
meas_lo_range: Optional[List[int]] = None,
schedule_los: Optional[Union[List[Union[Dict[PulseChannel, float], LoConfig]],
Union[Dict[PulseChannel, float], LoConfig]]] = None,
meas_level: Union[int, MeasLevel] = MeasLevel.CLASSIFIED,
meas_return: Union[str, MeasReturnType] = MeasReturnType.AVERAGE,
meas_map: Optional[List[List[Qubit]]] = None,
memory_slot_size: int = 100,
rep_time: Optional[int] = None,
rep_delay: Optional[float] = None,
parameter_binds: Optional[List[Dict[Parameter, float]]] = None,
parametric_pulses: Optional[List[str]] = None,
init_qubits: bool = True,
**run_config: Dict) -> Qobj:
"""Assemble a list of circuits or pulse schedules into a ``Qobj``.
This function serializes the payloads, which could be either circuits or schedules,
to create ``Qobj`` "experiments". It further annotates the experiment payload with
header and configurations.
Args:
experiments: Circuit(s) or pulse schedule(s) to execute
backend: If set, some runtime options are automatically grabbed from
``backend.configuration()`` and ``backend.defaults()``.
If any other option is explicitly set (e.g., ``rep_time``), it
will override the backend's.
If any other options is set in the run_config, it will
also override the backend's.
qobj_id: String identifier to annotate the ``Qobj``
qobj_header: User input that will be inserted in ``Qobj`` header, and will also be
copied to the corresponding Result header. Headers do not affect the run.
shots: Number of repetitions of each circuit, for sampling. Default: 1024
or ``max_shots`` from the backend configuration, whichever is smaller
memory: If ``True``, per-shot measurement bitstrings are returned as well
(provided the backend supports it). For OpenPulse jobs, only
measurement level 2 supports this option.
max_credits: Maximum credits to spend on job. Default: 10
seed_simulator: Random seed to control sampling, for when backend is a simulator
qubit_lo_freq: List of default qubit LO frequencies in Hz. Will be overridden by
``schedule_los`` if set.
meas_lo_freq: List of default measurement LO frequencies in Hz. Will be overridden
by ``schedule_los`` if set.
qubit_lo_range: List of drive LO ranges each of form ``[range_min, range_max]`` in Hz.
Used to validate the supplied qubit frequencies.
meas_lo_range: List of measurement LO ranges each of form ``[range_min, range_max]`` in Hz.
Used to validate the supplied qubit frequencies.
schedule_los: Experiment LO configurations, frequencies are given in Hz.
meas_level: Set the appropriate level of the measurement output for pulse experiments.
meas_return: Level of measurement data for the backend to return.
For ``meas_level`` 0 and 1:
* ``single`` returns information from every shot.
* ``avg`` returns average measurement output (averaged over number of shots).
meas_map: List of lists, containing qubits that must be measured together.
memory_slot_size: Size of each memory slot if the output is Level 0.
rep_time (int): Time per program execution in seconds. Must be from the list provided
by the backend (``backend.configuration().rep_times``). Defaults to the first entry.
rep_delay (float): Delay between programs in seconds. Only supported on certain
backends (if ``backend.configuration().dynamic_reprate_enabled=True``). If supported,
``rep_delay`` will be used instead of ``rep_time`` and must be from the range supplied
by the backend (``backend.configuration().rep_delay_range``). Default is given by
``backend.configuration().default_rep_delay``.
parameter_binds: List of Parameter bindings over which the set of experiments will be
executed. Each list element (bind) should be of the form
{Parameter1: value1, Parameter2: value2, ...}. All binds will be
executed across all experiments; e.g., if parameter_binds is a
length-n list, and there are m experiments, a total of m x n
experiments will be run (one for each experiment/bind pair).
parametric_pulses: A list of pulse shapes which are supported internally on the backend.
Example::
['gaussian', 'constant']
init_qubits: Whether to reset the qubits to the ground state for each shot.
Default: ``True``.
**run_config: Extra arguments used to configure the run (e.g., for Aer configurable
backends). Refer to the backend documentation for details on these
arguments.
Returns:
A ``Qobj`` that can be run on a backend. Depending on the type of input,
this will be either a ``QasmQobj`` or a ``PulseQobj``.
Raises:
QiskitError: if the input cannot be interpreted as either circuits or schedules
"""
start_time = time()
experiments = experiments if isinstance(experiments, list) else [experiments]
qobj_id, qobj_header, run_config_common_dict = _parse_common_args(backend, qobj_id, qobj_header,
shots, memory, max_credits,
seed_simulator, init_qubits,
**run_config)
# assemble either circuits or schedules
if all(isinstance(exp, QuantumCircuit) for exp in experiments):
run_config = _parse_circuit_args(parameter_binds, **run_config_common_dict)
# If circuits are parameterized, bind parameters and remove from run_config
bound_experiments, run_config = _expand_parameters(circuits=experiments,
run_config=run_config)
end_time = time()
_log_assembly_time(start_time, end_time)
return assemble_circuits(circuits=bound_experiments, qobj_id=qobj_id,
qobj_header=qobj_header, run_config=run_config)
elif all(isinstance(exp, ScheduleComponent) for exp in experiments):
run_config = _parse_pulse_args(backend, qubit_lo_freq, meas_lo_freq,
qubit_lo_range, meas_lo_range,
schedule_los, meas_level, meas_return,
meas_map, memory_slot_size,
rep_time, rep_delay,
parametric_pulses,
**run_config_common_dict)
end_time = time()
_log_assembly_time(start_time, end_time)
return assemble_schedules(schedules=experiments, qobj_id=qobj_id,
qobj_header=qobj_header, run_config=run_config)
else:
raise QiskitError("bad input to assemble() function; "
"must be either circuits or schedules")
# TODO: rework to return a list of RunConfigs (one for each experiments), and a global one
def _parse_common_args(backend, qobj_id, qobj_header, shots,
memory, max_credits, seed_simulator, init_qubits,
**run_config):
"""Resolve the various types of args allowed to the assemble() function through
duck typing, overriding args, etc. Refer to the assemble() docstring for details on
what types of inputs are allowed.
Here the args are resolved by converting them to standard instances, and prioritizing
them in case a run option is passed through multiple args (explicitly setting an arg
has more priority than the arg set by backend)
Returns:
RunConfig: a run config, which is a standardized object that configures the qobj
and determines the runtime environment.
Raises:
QiskitError: if the memory arg is True and the backend does not support
memory. Also if shots exceeds max_shots for the configured backend.
"""
# grab relevant info from backend if it exists
backend_config = None
if backend:
backend_config = backend.configuration()
# check for memory flag applied to backend that does not support memory
if memory and not backend_config.memory:
raise QiskitError("memory not supported by backend {}"
.format(backend_config.backend_name))
# an identifier for the Qobj
qobj_id = qobj_id or str(uuid.uuid4())
# The header that goes at the top of the Qobj (and later Result)
# we process it as dict, then write entries that are not None to a QobjHeader object
qobj_header = qobj_header or {}
if isinstance(qobj_header, QobjHeader):
qobj_header = qobj_header.to_dict()
backend_name = getattr(backend_config, 'backend_name', None)
backend_version = getattr(backend_config, 'backend_version', None)
qobj_header = {**dict(backend_name=backend_name, backend_version=backend_version),
**qobj_header}
qobj_header = QobjHeader(**{k: v for k, v in qobj_header.items() if v is not None})
max_shots = getattr(backend_config, 'max_shots', None)
if shots is None:
if max_shots:
shots = min(1024, max_shots)
else:
shots = 1024
elif max_shots and max_shots < shots:
raise QiskitError(
'Number of shots specified: %s exceeds max_shots property of the '
'backend: %s.' % (shots, max_shots))
# create run configuration and populate
run_config_dict = dict(shots=shots,
memory=memory,
max_credits=max_credits,
seed_simulator=seed_simulator,
init_qubits=init_qubits,
**run_config)
return qobj_id, qobj_header, run_config_dict
def _parse_pulse_args(backend, qubit_lo_freq, meas_lo_freq, qubit_lo_range,
meas_lo_range, schedule_los, meas_level,
meas_return, meas_map,
memory_slot_size,
rep_time, rep_delay,
parametric_pulses,
**run_config):
"""Build a pulse RunConfig replacing unset arguments with defaults derived from the `backend`.
See `assemble` for more information on the required arguments.
Returns:
RunConfig: a run config, which is a standardized object that configures the qobj
and determines the runtime environment.
Raises:
SchemaValidationError: If the given meas_level is not allowed for the given `backend`. If
rep_delay is not in the backend rep_delay_range.
"""
# grab relevant info from backend if it exists
backend_config = None
backend_default = None
if backend:
backend_default = backend.defaults()
backend_config = backend.configuration()
if meas_level not in getattr(backend_config, 'meas_levels', [MeasLevel.CLASSIFIED]):
raise SchemaValidationError(
('meas_level = {} not supported for backend {}, only {} is supported'
).format(meas_level, backend_config.backend_name, backend_config.meas_levels)
)
meas_map = meas_map or getattr(backend_config, 'meas_map', None)
schedule_los = schedule_los or []
if isinstance(schedule_los, (LoConfig, dict)):
schedule_los = [schedule_los]
# Convert to LoConfig if LO configuration supplied as dictionary
schedule_los = [lo_config if isinstance(lo_config, LoConfig) else LoConfig(lo_config)
for lo_config in schedule_los]
if not qubit_lo_freq and hasattr(backend_default, 'qubit_freq_est'):
qubit_lo_freq = backend_default.qubit_freq_est
if not meas_lo_freq and hasattr(backend_default, 'meas_freq_est'):
meas_lo_freq = backend_default.meas_freq_est
qubit_lo_range = qubit_lo_range or getattr(backend_config, 'qubit_lo_range', None)
meas_lo_range = meas_lo_range or getattr(backend_config, 'meas_lo_range', None)
dynamic_reprate_enabled = getattr(backend_config, 'dynamic_reprate_enabled', False)
rep_time = rep_time or getattr(backend_config, 'rep_times', None)
if rep_time:
if dynamic_reprate_enabled:
warnings.warn("Dynamic rep rates are supported on this backend. 'rep_delay' will be "
"used instead of 'rep_time'.", RuntimeWarning)
if isinstance(rep_time, list):
rep_time = rep_time[0]
rep_time = int(rep_time * 1e6) # convert sec to μs
if dynamic_reprate_enabled:
rep_delay = rep_delay or getattr(backend_config, "default_rep_delay", None)
if rep_delay is not None:
rep_delay_range = getattr(backend_config, "rep_delay_range", None)
# check that rep_delay is in rep_delay_range
if rep_delay_range is not None and isinstance(rep_delay_range, list):
# pylint: disable=E1136
if len(rep_delay_range) != 2:
raise SchemaValidationError(
"Backend rep_delay_range {} must be a list with two entries.".format(
rep_delay_range
)
)
if not rep_delay_range[0] <= rep_delay <= rep_delay_range[1]:
raise SchemaValidationError(
"Supplied rep delay {} not in the supported "
"backend range {}".format(rep_delay, rep_delay_range)
)
rep_delay = rep_delay * 1e6 # convert sec to μs
else:
rep_delay = None
warnings.warn(
"Dynamic rep rates not supported on this backend. rep_time will be "
"used instead of rep_delay.",
RuntimeWarning,
)
parametric_pulses = parametric_pulses or getattr(backend_config, 'parametric_pulses', [])
# create run configuration and populate
run_config_dict = dict(qubit_lo_freq=qubit_lo_freq,
meas_lo_freq=meas_lo_freq,
qubit_lo_range=qubit_lo_range,
meas_lo_range=meas_lo_range,
schedule_los=schedule_los,
meas_level=meas_level,
meas_return=meas_return,
meas_map=meas_map,
memory_slot_size=memory_slot_size,
rep_time=rep_time,
rep_delay=rep_delay,
parametric_pulses=parametric_pulses,
**run_config)
run_config = RunConfig(**{k: v for k, v in run_config_dict.items() if v is not None})
return run_config
def _parse_circuit_args(parameter_binds, **run_config):
"""Build a circuit RunConfig replacing unset arguments with defaults derived from the `backend`.
See `assemble` for more information on the required arguments.
Returns:
RunConfig: a run config, which is a standardized object that configures the qobj
and determines the runtime environment.
"""
parameter_binds = parameter_binds or []
# create run configuration and populate
run_config_dict = dict(parameter_binds=parameter_binds, **run_config)
run_config = RunConfig(**{k: v for k, v in run_config_dict.items() if v is not None})
return run_config
def _expand_parameters(circuits, run_config):
"""Verifies that there is a single common set of parameters shared between
all circuits and all parameter binds in the run_config. Returns an expanded
list of circuits (if parameterized) with all parameters bound, and a copy of
the run_config with parameter_binds cleared.
If neither the circuits nor the run_config specify parameters, the two are
returned unmodified.
Raises:
QiskitError: if run_config parameters are not compatible with circuit parameters
Returns:
Tuple(List[QuantumCircuit], RunConfig):
- List of input circuits expanded and with parameters bound
- RunConfig with parameter_binds removed
"""
parameter_binds = run_config.parameter_binds
if parameter_binds or \
any(circuit.parameters for circuit in circuits):
all_bind_parameters = [bind.keys()
for bind in parameter_binds]
all_circuit_parameters = [circuit.parameters for circuit in circuits]
# Collect set of all unique parameters across all circuits and binds
unique_parameters = {param
for param_list in all_bind_parameters + all_circuit_parameters
for param in param_list}
# Check that all parameters are common to all circuits and binds
if not all_bind_parameters \
or not all_circuit_parameters \
or any(unique_parameters != bind_params for bind_params in all_bind_parameters) \
or any(unique_parameters != parameters for parameters in all_circuit_parameters):
raise QiskitError(
('Mismatch between run_config.parameter_binds and all circuit parameters. ' +
'Parameter binds: {} ' +
'Circuit parameters: {}').format(all_bind_parameters, all_circuit_parameters))
circuits = [circuit.bind_parameters(binds)
for circuit in circuits
for binds in parameter_binds]
# All parameters have been expanded and bound, so remove from run_config
run_config = copy.deepcopy(run_config)
run_config.parameter_binds = []
return circuits, run_config
| 49.142512 | 100 | 0.646891 |
import uuid
import copy
import logging
import warnings
from time import time
from typing import Union, List, Dict, Optional
from qiskit.circuit import QuantumCircuit, Qubit, Parameter
from qiskit.exceptions import QiskitError
from qiskit.pulse import ScheduleComponent, LoConfig
from qiskit.assembler.run_config import RunConfig
from qiskit.assembler import assemble_circuits, assemble_schedules
from qiskit.qobj import QobjHeader, Qobj
from qiskit.qobj.utils import MeasLevel, MeasReturnType
from qiskit.validation.jsonschema import SchemaValidationError
from qiskit.providers import BaseBackend
from qiskit.pulse.channels import PulseChannel
from qiskit.pulse import Schedule
LOG = logging.getLogger(__name__)
def _log_assembly_time(start_time, end_time):
log_msg = "Total Assembly Time - %.5f (ms)" % ((end_time - start_time) * 1000)
LOG.info(log_msg)
def assemble(experiments: Union[QuantumCircuit, List[QuantumCircuit], Schedule, List[Schedule]],
backend: Optional[BaseBackend] = None,
qobj_id: Optional[str] = None,
qobj_header: Optional[Union[QobjHeader, Dict]] = None,
shots: Optional[int] = None, memory: Optional[bool] = False,
max_credits: Optional[int] = None,
seed_simulator: Optional[int] = None,
qubit_lo_freq: Optional[List[int]] = None,
meas_lo_freq: Optional[List[int]] = None,
qubit_lo_range: Optional[List[int]] = None,
meas_lo_range: Optional[List[int]] = None,
schedule_los: Optional[Union[List[Union[Dict[PulseChannel, float], LoConfig]],
Union[Dict[PulseChannel, float], LoConfig]]] = None,
meas_level: Union[int, MeasLevel] = MeasLevel.CLASSIFIED,
meas_return: Union[str, MeasReturnType] = MeasReturnType.AVERAGE,
meas_map: Optional[List[List[Qubit]]] = None,
memory_slot_size: int = 100,
rep_time: Optional[int] = None,
rep_delay: Optional[float] = None,
parameter_binds: Optional[List[Dict[Parameter, float]]] = None,
parametric_pulses: Optional[List[str]] = None,
init_qubits: bool = True,
**run_config: Dict) -> Qobj:
start_time = time()
experiments = experiments if isinstance(experiments, list) else [experiments]
qobj_id, qobj_header, run_config_common_dict = _parse_common_args(backend, qobj_id, qobj_header,
shots, memory, max_credits,
seed_simulator, init_qubits,
**run_config)
if all(isinstance(exp, QuantumCircuit) for exp in experiments):
run_config = _parse_circuit_args(parameter_binds, **run_config_common_dict)
bound_experiments, run_config = _expand_parameters(circuits=experiments,
run_config=run_config)
end_time = time()
_log_assembly_time(start_time, end_time)
return assemble_circuits(circuits=bound_experiments, qobj_id=qobj_id,
qobj_header=qobj_header, run_config=run_config)
elif all(isinstance(exp, ScheduleComponent) for exp in experiments):
run_config = _parse_pulse_args(backend, qubit_lo_freq, meas_lo_freq,
qubit_lo_range, meas_lo_range,
schedule_los, meas_level, meas_return,
meas_map, memory_slot_size,
rep_time, rep_delay,
parametric_pulses,
**run_config_common_dict)
end_time = time()
_log_assembly_time(start_time, end_time)
return assemble_schedules(schedules=experiments, qobj_id=qobj_id,
qobj_header=qobj_header, run_config=run_config)
else:
raise QiskitError("bad input to assemble() function; "
"must be either circuits or schedules")
def _parse_common_args(backend, qobj_id, qobj_header, shots,
memory, max_credits, seed_simulator, init_qubits,
**run_config):
backend_config = None
if backend:
backend_config = backend.configuration()
if memory and not backend_config.memory:
raise QiskitError("memory not supported by backend {}"
.format(backend_config.backend_name))
qobj_id = qobj_id or str(uuid.uuid4())
qobj_header = qobj_header or {}
if isinstance(qobj_header, QobjHeader):
qobj_header = qobj_header.to_dict()
backend_name = getattr(backend_config, 'backend_name', None)
backend_version = getattr(backend_config, 'backend_version', None)
qobj_header = {**dict(backend_name=backend_name, backend_version=backend_version),
**qobj_header}
qobj_header = QobjHeader(**{k: v for k, v in qobj_header.items() if v is not None})
max_shots = getattr(backend_config, 'max_shots', None)
if shots is None:
if max_shots:
shots = min(1024, max_shots)
else:
shots = 1024
elif max_shots and max_shots < shots:
raise QiskitError(
'Number of shots specified: %s exceeds max_shots property of the '
'backend: %s.' % (shots, max_shots))
run_config_dict = dict(shots=shots,
memory=memory,
max_credits=max_credits,
seed_simulator=seed_simulator,
init_qubits=init_qubits,
**run_config)
return qobj_id, qobj_header, run_config_dict
def _parse_pulse_args(backend, qubit_lo_freq, meas_lo_freq, qubit_lo_range,
meas_lo_range, schedule_los, meas_level,
meas_return, meas_map,
memory_slot_size,
rep_time, rep_delay,
parametric_pulses,
**run_config):
backend_config = None
backend_default = None
if backend:
backend_default = backend.defaults()
backend_config = backend.configuration()
if meas_level not in getattr(backend_config, 'meas_levels', [MeasLevel.CLASSIFIED]):
raise SchemaValidationError(
('meas_level = {} not supported for backend {}, only {} is supported'
).format(meas_level, backend_config.backend_name, backend_config.meas_levels)
)
meas_map = meas_map or getattr(backend_config, 'meas_map', None)
schedule_los = schedule_los or []
if isinstance(schedule_los, (LoConfig, dict)):
schedule_los = [schedule_los]
schedule_los = [lo_config if isinstance(lo_config, LoConfig) else LoConfig(lo_config)
for lo_config in schedule_los]
if not qubit_lo_freq and hasattr(backend_default, 'qubit_freq_est'):
qubit_lo_freq = backend_default.qubit_freq_est
if not meas_lo_freq and hasattr(backend_default, 'meas_freq_est'):
meas_lo_freq = backend_default.meas_freq_est
qubit_lo_range = qubit_lo_range or getattr(backend_config, 'qubit_lo_range', None)
meas_lo_range = meas_lo_range or getattr(backend_config, 'meas_lo_range', None)
dynamic_reprate_enabled = getattr(backend_config, 'dynamic_reprate_enabled', False)
rep_time = rep_time or getattr(backend_config, 'rep_times', None)
if rep_time:
if dynamic_reprate_enabled:
warnings.warn("Dynamic rep rates are supported on this backend. 'rep_delay' will be "
"used instead of 'rep_time'.", RuntimeWarning)
if isinstance(rep_time, list):
rep_time = rep_time[0]
rep_time = int(rep_time * 1e6)
if dynamic_reprate_enabled:
rep_delay = rep_delay or getattr(backend_config, "default_rep_delay", None)
if rep_delay is not None:
rep_delay_range = getattr(backend_config, "rep_delay_range", None)
if rep_delay_range is not None and isinstance(rep_delay_range, list):
if len(rep_delay_range) != 2:
raise SchemaValidationError(
"Backend rep_delay_range {} must be a list with two entries.".format(
rep_delay_range
)
)
if not rep_delay_range[0] <= rep_delay <= rep_delay_range[1]:
raise SchemaValidationError(
"Supplied rep delay {} not in the supported "
"backend range {}".format(rep_delay, rep_delay_range)
)
rep_delay = rep_delay * 1e6
else:
rep_delay = None
warnings.warn(
"Dynamic rep rates not supported on this backend. rep_time will be "
"used instead of rep_delay.",
RuntimeWarning,
)
parametric_pulses = parametric_pulses or getattr(backend_config, 'parametric_pulses', [])
run_config_dict = dict(qubit_lo_freq=qubit_lo_freq,
meas_lo_freq=meas_lo_freq,
qubit_lo_range=qubit_lo_range,
meas_lo_range=meas_lo_range,
schedule_los=schedule_los,
meas_level=meas_level,
meas_return=meas_return,
meas_map=meas_map,
memory_slot_size=memory_slot_size,
rep_time=rep_time,
rep_delay=rep_delay,
parametric_pulses=parametric_pulses,
**run_config)
run_config = RunConfig(**{k: v for k, v in run_config_dict.items() if v is not None})
return run_config
def _parse_circuit_args(parameter_binds, **run_config):
parameter_binds = parameter_binds or []
run_config_dict = dict(parameter_binds=parameter_binds, **run_config)
run_config = RunConfig(**{k: v for k, v in run_config_dict.items() if v is not None})
return run_config
def _expand_parameters(circuits, run_config):
parameter_binds = run_config.parameter_binds
if parameter_binds or \
any(circuit.parameters for circuit in circuits):
all_bind_parameters = [bind.keys()
for bind in parameter_binds]
all_circuit_parameters = [circuit.parameters for circuit in circuits]
unique_parameters = {param
for param_list in all_bind_parameters + all_circuit_parameters
for param in param_list}
if not all_bind_parameters \
or not all_circuit_parameters \
or any(unique_parameters != bind_params for bind_params in all_bind_parameters) \
or any(unique_parameters != parameters for parameters in all_circuit_parameters):
raise QiskitError(
('Mismatch between run_config.parameter_binds and all circuit parameters. ' +
'Parameter binds: {} ' +
'Circuit parameters: {}').format(all_bind_parameters, all_circuit_parameters))
circuits = [circuit.bind_parameters(binds)
for circuit in circuits
for binds in parameter_binds]
run_config = copy.deepcopy(run_config)
run_config.parameter_binds = []
return circuits, run_config
| true | true |
f7388a06dbdae2eaa3784a579a31cb9b67ebaeac | 682 | py | Python | app/core/migrations/0002_tag.py | Y-Chishiro/recipe-app-api | 85bbfbef47587898895c7b34f2444bc0b2717838 | [
"MIT"
] | null | null | null | app/core/migrations/0002_tag.py | Y-Chishiro/recipe-app-api | 85bbfbef47587898895c7b34f2444bc0b2717838 | [
"MIT"
] | null | null | null | app/core/migrations/0002_tag.py | Y-Chishiro/recipe-app-api | 85bbfbef47587898895c7b34f2444bc0b2717838 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.4 on 2020-12-12 06:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 28.416667 | 118 | 0.615836 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| true | true |
f7388a32d6244a9ebc6e8ed203770f2a545ec410 | 4,167 | py | Python | sdk/operationsmanagement/azure-mgmt-operationsmanagement/azure/mgmt/operationsmanagement/aio/_configuration.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/operationsmanagement/azure-mgmt-operationsmanagement/azure/mgmt/operationsmanagement/aio/_configuration.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/operationsmanagement/azure-mgmt-operationsmanagement/azure/mgmt/operationsmanagement/aio/_configuration.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class OperationsManagementClientConfiguration(Configuration):
"""Configuration for OperationsManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Gets subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
:param provider_name: Provider name for the parent resource.
:type provider_name: str
:param resource_type: Resource type for the parent resource.
:type resource_type: str
:param resource_name: Parent resource name.
:type resource_name: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
provider_name: str,
resource_type: str,
resource_name: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if provider_name is None:
raise ValueError("Parameter 'provider_name' must not be None.")
if resource_type is None:
raise ValueError("Parameter 'resource_type' must not be None.")
if resource_name is None:
raise ValueError("Parameter 'resource_name' must not be None.")
super(OperationsManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.provider_name = provider_name
self.resource_type = resource_type
self.resource_name = resource_name
self.api_version = "2015-11-01-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-operationsmanagement/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| 48.453488 | 177 | 0.700504 |
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
from azure.core.credentials_async import AsyncTokenCredential
class OperationsManagementClientConfiguration(Configuration):
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
provider_name: str,
resource_type: str,
resource_name: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if provider_name is None:
raise ValueError("Parameter 'provider_name' must not be None.")
if resource_type is None:
raise ValueError("Parameter 'resource_type' must not be None.")
if resource_name is None:
raise ValueError("Parameter 'resource_name' must not be None.")
super(OperationsManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.provider_name = provider_name
self.resource_type = resource_type
self.resource_name = resource_name
self.api_version = "2015-11-01-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-operationsmanagement/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| true | true |
f7388ad7e6f554b4412b475060f4aa97cc9d185e | 6,409 | py | Python | tools/wptrunner/wptrunner/browsers/base.py | jimmywarting/wpt | 75d80fa43c763935dff59b3c6b21f4dffa9b03b7 | [
"BSD-3-Clause"
] | 575 | 2015-06-18T23:58:20.000Z | 2022-03-23T09:32:39.000Z | tools/wptrunner/wptrunner/browsers/base.py | jimmywarting/wpt | 75d80fa43c763935dff59b3c6b21f4dffa9b03b7 | [
"BSD-3-Clause"
] | 113 | 2015-05-04T09:58:14.000Z | 2022-01-31T19:35:03.000Z | tools/wptrunner/wptrunner/browsers/base.py | almajlis/wpt | a1d4dd189a5bdca857845b374946b8002c41d199 | [
"BSD-3-Clause"
] | 52 | 2015-07-14T10:40:50.000Z | 2022-03-15T01:11:49.000Z | import os
import platform
import socket
from abc import ABCMeta, abstractmethod
from copy import deepcopy
from ..wptcommandline import require_arg # noqa: F401
here = os.path.dirname(__file__)
def inherit(super_module, child_globals, product_name):
super_wptrunner = super_module.__wptrunner__
child_globals["__wptrunner__"] = child_wptrunner = deepcopy(super_wptrunner)
child_wptrunner["product"] = product_name
for k in ("check_args", "browser", "browser_kwargs", "executor_kwargs",
"env_extras", "env_options", "timeout_multiplier"):
attr = super_wptrunner[k]
child_globals[attr] = getattr(super_module, attr)
for v in super_module.__wptrunner__["executor"].values():
child_globals[v] = getattr(super_module, v)
if "run_info_extras" in super_wptrunner:
attr = super_wptrunner["run_info_extras"]
child_globals[attr] = getattr(super_module, attr)
def cmd_arg(name, value=None):
prefix = "-" if platform.system() == "Windows" else "--"
rv = prefix + name
if value is not None:
rv += "=" + value
return rv
def maybe_add_args(required_args, current_args):
for required_arg in required_args:
# If the arg is in the form of "variable=value", only add it if
# no arg with another value for "variable" is already there.
if "=" in required_arg:
required_arg_prefix = "%s=" % required_arg.split("=")[0]
if not any(item.startswith(required_arg_prefix) for item in current_args):
current_args.append(required_arg)
else:
if required_arg not in current_args:
current_args.append(required_arg)
return current_args
def certificate_domain_list(list_of_domains, certificate_file):
"""Build a list of domains where certificate_file should be used"""
cert_list = []
for domain in list_of_domains:
cert_list.append({"host": domain, "certificateFile": certificate_file})
return cert_list
def get_free_port():
"""Get a random unbound port"""
while True:
s = socket.socket()
try:
s.bind(("127.0.0.1", 0))
except socket.error:
continue
else:
return s.getsockname()[1]
finally:
s.close()
def get_timeout_multiplier(test_type, run_info_data, **kwargs):
if kwargs["timeout_multiplier"] is not None:
return kwargs["timeout_multiplier"]
return 1
def browser_command(binary, args, debug_info):
if debug_info:
if debug_info.requiresEscapedArgs:
args = [item.replace("&", "\\&") for item in args]
debug_args = [debug_info.path] + debug_info.args
else:
debug_args = []
command = [binary] + args
return debug_args, command
class BrowserError(Exception):
pass
class Browser(object):
"""Abstract class serving as the basis for Browser implementations.
The Browser is used in the TestRunnerManager to start and stop the browser
process, and to check the state of that process. This class also acts as a
context manager, enabling it to do browser-specific setup at the start of
the testrun and cleanup after the run is complete.
:param logger: Structured logger to use for output.
"""
__metaclass__ = ABCMeta
process_cls = None
init_timeout = 30
def __init__(self, logger):
self.logger = logger
def __enter__(self):
self.setup()
return self
def __exit__(self, *args, **kwargs):
self.cleanup()
def setup(self):
"""Used for browser-specific setup that happens at the start of a test run"""
pass
def settings(self, test):
"""Dictionary of metadata that is constant for a specific launch of a browser.
This is used to determine when the browser instance configuration changes, requiring
a relaunch of the browser. The test runner calls this method for each test, and if the
returned value differs from that for the previous test, the browser is relaunched.
"""
return {}
@abstractmethod
def start(self, group_metadata, **kwargs):
"""Launch the browser object and get it into a state where is is ready to run tests"""
pass
@abstractmethod
def stop(self, force=False):
"""Stop the running browser process."""
pass
@abstractmethod
def pid(self):
"""pid of the browser process or None if there is no pid"""
pass
@abstractmethod
def is_alive(self):
"""Boolean indicating whether the browser process is still running"""
pass
def cleanup(self):
"""Browser-specific cleanup that is run after the testrun is finished"""
pass
def executor_browser(self):
"""Returns the ExecutorBrowser subclass for this Browser subclass and the keyword arguments
with which it should be instantiated"""
return ExecutorBrowser, {}
def maybe_parse_tombstone(self):
"""Possibly parse tombstones on Android device for Android target"""
pass
def check_crash(self, process, test):
"""Check if a crash occured and output any useful information to the
log. Returns a boolean indicating whether a crash occured."""
return False
class NullBrowser(Browser):
def __init__(self, logger, **kwargs):
super(NullBrowser, self).__init__(logger)
def start(self, **kwargs):
"""No-op browser to use in scenarios where the TestRunnerManager shouldn't
actually own the browser process (e.g. Servo where we start one browser
per test)"""
pass
def stop(self, force=False):
pass
def pid(self):
return None
def is_alive(self):
return True
def on_output(self, line):
raise NotImplementedError
class ExecutorBrowser(object):
"""View of the Browser used by the Executor object.
This is needed because the Executor runs in a child process and
we can't ship Browser instances between processes on Windows.
Typically this will have a few product-specific properties set,
but in some cases it may have more elaborate methods for setting
up the browser from the runner process.
"""
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
| 30.519048 | 99 | 0.660633 | import os
import platform
import socket
from abc import ABCMeta, abstractmethod
from copy import deepcopy
from ..wptcommandline import require_arg
here = os.path.dirname(__file__)
def inherit(super_module, child_globals, product_name):
super_wptrunner = super_module.__wptrunner__
child_globals["__wptrunner__"] = child_wptrunner = deepcopy(super_wptrunner)
child_wptrunner["product"] = product_name
for k in ("check_args", "browser", "browser_kwargs", "executor_kwargs",
"env_extras", "env_options", "timeout_multiplier"):
attr = super_wptrunner[k]
child_globals[attr] = getattr(super_module, attr)
for v in super_module.__wptrunner__["executor"].values():
child_globals[v] = getattr(super_module, v)
if "run_info_extras" in super_wptrunner:
attr = super_wptrunner["run_info_extras"]
child_globals[attr] = getattr(super_module, attr)
def cmd_arg(name, value=None):
prefix = "-" if platform.system() == "Windows" else "--"
rv = prefix + name
if value is not None:
rv += "=" + value
return rv
def maybe_add_args(required_args, current_args):
for required_arg in required_args:
if "=" in required_arg:
required_arg_prefix = "%s=" % required_arg.split("=")[0]
if not any(item.startswith(required_arg_prefix) for item in current_args):
current_args.append(required_arg)
else:
if required_arg not in current_args:
current_args.append(required_arg)
return current_args
def certificate_domain_list(list_of_domains, certificate_file):
cert_list = []
for domain in list_of_domains:
cert_list.append({"host": domain, "certificateFile": certificate_file})
return cert_list
def get_free_port():
while True:
s = socket.socket()
try:
s.bind(("127.0.0.1", 0))
except socket.error:
continue
else:
return s.getsockname()[1]
finally:
s.close()
def get_timeout_multiplier(test_type, run_info_data, **kwargs):
if kwargs["timeout_multiplier"] is not None:
return kwargs["timeout_multiplier"]
return 1
def browser_command(binary, args, debug_info):
if debug_info:
if debug_info.requiresEscapedArgs:
args = [item.replace("&", "\\&") for item in args]
debug_args = [debug_info.path] + debug_info.args
else:
debug_args = []
command = [binary] + args
return debug_args, command
class BrowserError(Exception):
pass
class Browser(object):
__metaclass__ = ABCMeta
process_cls = None
init_timeout = 30
def __init__(self, logger):
self.logger = logger
def __enter__(self):
self.setup()
return self
def __exit__(self, *args, **kwargs):
self.cleanup()
def setup(self):
pass
def settings(self, test):
return {}
@abstractmethod
def start(self, group_metadata, **kwargs):
pass
@abstractmethod
def stop(self, force=False):
pass
@abstractmethod
def pid(self):
pass
@abstractmethod
def is_alive(self):
pass
def cleanup(self):
pass
def executor_browser(self):
return ExecutorBrowser, {}
def maybe_parse_tombstone(self):
pass
def check_crash(self, process, test):
return False
class NullBrowser(Browser):
def __init__(self, logger, **kwargs):
super(NullBrowser, self).__init__(logger)
def start(self, **kwargs):
pass
def stop(self, force=False):
pass
def pid(self):
return None
def is_alive(self):
return True
def on_output(self, line):
raise NotImplementedError
class ExecutorBrowser(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
| true | true |
f7388b96d6e7b06494d6da8401c197fda18dab4b | 3,942 | py | Python | source/fraud_detection/index.py | ldomb/fraud-detection-using-machine-learning | 925f838d78f0e40b6a0c7d27d2bb38b8001c2ad7 | [
"Apache-2.0"
] | 13 | 2019-09-30T04:40:53.000Z | 2021-08-17T00:31:58.000Z | source/fraud_detection/index.py | ldomb/fraud-detection-using-machine-learning | 925f838d78f0e40b6a0c7d27d2bb38b8001c2ad7 | [
"Apache-2.0"
] | null | null | null | source/fraud_detection/index.py | ldomb/fraud-detection-using-machine-learning | 925f838d78f0e40b6a0c7d27d2bb38b8001c2ad7 | [
"Apache-2.0"
] | 14 | 2019-09-18T12:04:53.000Z | 2021-09-28T07:08:13.000Z | ##############################################################################
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Amazon Software License (the "License"). You may not #
# use this file except in compliance with the License. A copy of the #
# License is located at #
# #
# http://aws.amazon.com/asl/ #
# #
# or in the "license" file accompanying this file. This file is distributed #
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, #
# express or implied. See the License for the specific language governing #
# permissions and limitations under the License. #
##############################################################################
import json
import os
import boto3
import random
import datetime
import re
def lambda_handler(event, context):
data_payload = get_data(event, context)
if not data_payload:
return
pred = get_fraud_prediction(data_payload)
transformed_data = postprocess_event(event, pred)
response = store_data_prediction(transformed_data)
print(response)
def get_data(event, context):
if random.random() < 0.15:
return
non_fraud_example = [1.00000000e+00, -9.66271698e-01, -1.85226008e-01, 1.79299331e+00, -8.63291264e-01, -1.03088794e-02, 1.24720311e+00, 2.37608939e-01,
3.77435863e-01, -1.38702404e+00, -5.49519211e-02, -2.26487264e-01, 1.78228229e-01, 5.07756889e-01, -2.87923753e-01, -6.31418109e-01,
-1.05964720e+00, -6.84092760e-01, 1.96577501e+00, -1.23262203e+00, -2.08037779e-01, -1.08300455e-01, 5.27359685e-03, -1.90320522e-01,
-1.17557538e+00, 6.47376060e-01, -2.21928850e-01, 6.27228469e-02, 6.14576302e-02, 1.23500000e+02]
fraud_example = [4.0600000e+02, -2.3122265e+00, 1.9519920e+00, -1.6098508e+00, 3.9979055e+00, -5.2218789e-01, -1.4265453e+00, -2.5373874e+00,
1.3916572e+00, -2.7700894e+00, -2.7722721e+00, 3.2020333e+00, -2.8999074e+00, -5.9522188e-01, -4.2892537e+00, 3.8972411e-01, -1.1407472e+00,
-2.8300557e+00, -1.6822468e-02, 4.1695571e-01, 1.2691055e-01, 5.1723236e-01, -3.5049368e-02, -4.6521106e-01, 3.2019821e-01, 4.4519167e-02,
1.7783980e-01, 2.6114500e-01, -1.4327587e-01, 0.0000000e+00]
examples = [fraud_example, non_fraud_example]
idx = 1
if random.random() < 0.05:
idx = 0
return ','.join(map(str, examples[idx]))
def get_fraud_prediction(data):
sagemaker_endpoint_name = 'fraud-detection-endpoint'
sagemaker_runtime = boto3.client('sagemaker-runtime')
response = sagemaker_runtime.invoke_endpoint(EndpointName=sagemaker_endpoint_name, ContentType='text/csv',
Body=data)
print(response)
result = json.loads(response['Body'].read().decode())
print(result)
pred = int(result['predictions'][0]['predicted_label'])
return pred
def postprocess_event(event, pred):
millisecond_regex = r'\\.\\d+'
timestamp = re.sub(millisecond_regex, '', str(datetime.datetime.now()))
source = random.choice(['Mobile', 'Web', 'Store'])
return [timestamp, 'random_id', source, str(pred)]
def store_data_prediction(data):
firehose_delivery_stream = 'fraud-detection-firehose-stream'
firehose = boto3.client('firehose', region_name=os.environ['AWS_REGION'])
record = ','.join(data) + '\\n'
response = firehose.put_record(DeliveryStreamName=firehose_delivery_stream, Record={'Data': record})
return response
| 55.521127 | 161 | 0.58346 | true | true | |
f7388e0d13624cb560384478f92976049c90672f | 12,276 | py | Python | akshare/option/option_commodity.py | ghmole/akshare | eeeec96f90c6738bcd9ce92fcfa6b9c9176928a6 | [
"MIT"
] | 12 | 2020-12-30T02:50:01.000Z | 2021-11-08T11:32:51.000Z | akshare/option/option_commodity.py | ghmole/akshare | eeeec96f90c6738bcd9ce92fcfa6b9c9176928a6 | [
"MIT"
] | null | null | null | akshare/option/option_commodity.py | ghmole/akshare | eeeec96f90c6738bcd9ce92fcfa6b9c9176928a6 | [
"MIT"
] | 5 | 2020-12-31T01:31:48.000Z | 2021-07-17T15:54:03.000Z | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2021/1/14 20:50
Desc: 商品期权数据
说明:
(1) 价格:自2019年12月02日起,纤维板报价单位由元/张改为元/立方米
(2) 价格:元/吨,鸡蛋为元/500千克,纤维板为元/立方米,胶合板为元/张
(3) 成交量、持仓量:手(按双边计算)
(4) 成交额:万元(按双边计算)
(5) 涨跌=收盘价-前结算价
(6) 涨跌1=今结算价-前结算价
(7) 合约系列:具有相同月份标的期货合约的所有期权合约的统称
(8) 隐含波动率:根据期权市场价格,利用期权定价模型计算的标的期货合约价格波动率
"""
import datetime
import warnings
from io import StringIO, BytesIO
import requests
import pandas as pd
from akshare.option.cons import (
get_calendar,
convert_date,
DCE_DAILY_OPTION_URL,
SHFE_OPTION_URL,
CZCE_DAILY_OPTION_URL_3,
SHFE_HEADERS,
)
def get_dce_option_daily(trade_date="20200817", symbol="聚丙烯期权"):
"""
大连商品交易所-期权-日频行情数据
:param trade_date: str format:"20191017"
:param symbol: str "玉米期权" or "豆粕期权" or "铁矿石期权", or "液化石油气期权" or "聚乙烯期权" or "聚氯乙烯期权" or "聚丙烯期权"
:return: pandas.DataFrame
part-1:
商品名称 合约名称 开盘价 最高价 最低价 收盘价 前结算价 结算价 涨跌 涨跌1 \
0 玉米 c2001-C-1680 168.5 168.5 168.5 168.5 168.0 167.5 0.5 -0.5
1 玉米 c2001-C-1700 0 0.0 0.0 148.0 148.0 148.0 0.0 0.0
2 玉米 c2001-C-1720 0 0.0 0.0 129.0 128.0 129.0 1.0 1.0
3 玉米 c2001-C-1740 115 115.0 115.0 115.0 108.0 111.0 7.0 3.0
4 玉米 c2001-C-1760 89 95.5 89.0 95.5 89.0 93.5 6.5 4.5
.. ... ... ... ... ... ... ... ... ... ...
239 玉米 c2009-P-2040 0 0.0 0.0 91.0 88.5 91.0 2.5 2.5
240 玉米 c2009-P-2060 0 0.0 0.0 106.0 104.0 106.0 2.0 2.0
241 玉米 c2009-P-2080 0 0.0 0.0 121.5 120.5 121.5 1.0 1.0
242 玉米 c2009-P-2100 0 0.0 0.0 138.5 137.5 138.5 1.0 1.0
243 玉米 c2009-P-2120 0 0.0 0.0 155.5 155.5 155.5 0.0 0.0
Delta 成交量 持仓量 持仓量变化 成交额 行权量
0 0.98 2 236 0 0.34 0.0
1 0.96 0 236 0 0 0.0
2 0.94 0 210 0 0 0.0
3 0.90 20 1,040 0 2.3 0.0
4 0.85 12 680 0 1.11 0.0
.. ... .. ... ... ... ...
239 -0.70 0 30 0 0 0.0
240 -0.75 0 50 0 0 0.0
241 -0.80 0 20 0 0 0.0
242 -0.84 0 10 0 0 0.0
243 -0.88 0 0 0 0 0.0
part-2:
0 合约系列 隐含波动率(%)
1 c2001 12.95
2 c2003 8.74
3 c2005 8.75
4 c2007 7.7
5 c2009 6.85
"""
calendar = get_calendar()
day = convert_date(trade_date) if trade_date is not None else datetime.date.today()
if day.strftime("%Y%m%d") not in calendar:
warnings.warn("%s非交易日" % day.strftime("%Y%m%d"))
return None
url = DCE_DAILY_OPTION_URL
payload = {
"dayQuotes.variety": "all",
"dayQuotes.trade_type": "1",
"year": str(day.year),
"month": str(day.month - 1),
"day": str(day.day),
"exportFlag": "excel",
}
res = requests.post(url, data=payload)
table_df = pd.read_excel(BytesIO(res.content), header=0)
another_df = table_df.iloc[
table_df[table_df.iloc[:, 0].str.contains("合约")].iloc[-1].name:, [0, 1]
]
another_df.reset_index(inplace=True, drop=True)
another_df.iloc[0] = another_df.iat[0, 0].split("\t")
another_df.columns = another_df.iloc[0]
another_df = another_df.iloc[1:, :]
if symbol == "豆粕期权":
return table_df[table_df["商品名称"] == "豆粕"], another_df[another_df.iloc[:, 0].str.contains("m")]
elif symbol == "玉米期权":
return table_df[table_df["商品名称"] == "玉米"], another_df[another_df.iloc[:, 0].str.contains("c")]
elif symbol == "铁矿石期权":
return table_df[table_df["商品名称"] == "铁矿石"], another_df[another_df.iloc[:, 0].str.contains("i")]
elif symbol == "液化石油气期权":
return table_df[table_df["商品名称"] == "液化石油气"], another_df[another_df.iloc[:, 0].str.contains("pg")]
elif symbol == "聚乙烯期权":
return table_df[table_df["商品名称"] == "聚乙烯"], another_df[another_df.iloc[:, 0].str.contains("i")]
elif symbol == "聚氯乙烯期权":
return table_df[table_df["商品名称"] == "聚氯乙烯"], another_df[another_df.iloc[:, 0].str.contains("v")]
elif symbol == "聚丙烯期权":
return table_df[table_df["商品名称"] == "聚丙烯"], another_df[another_df.iloc[:, 0].str.contains("pp")]
def get_czce_option_daily(trade_date="20191017", symbol="白糖期权"):
"""
郑州商品交易所-期权-日频行情数据
说明:
(1) 价格:元/吨
(2) 成交量、空盘量:手
(3) 成交额:万元
(4) 涨跌一:今收盘-昨结算
(5) 涨跌二:今结算-昨结算
(6) 隐含波动率:将当日期权合约的结算价代入期权定价模型,反推出来的波动率数值
:param trade_date: str "20191017"
:param symbol: str "白糖期权", "棉花期权", "甲醇期权", "PTA期权", "菜籽粕期权"
:return: pandas.DataFrame
郑商所每日期权交易数据
品种代码 昨结算 今开盘 最高价 最低价 今收盘 \
0 CF001C10800 1,579.00 0.00 0.00 0.00 0.00
1 CF001C11000 1,392.00 0.00 0.00 0.00 0.00
2 CF001C11200 1,211.00 0.00 0.00 0.00 0.00
3 CF001C11400 1,038.00 1,396.00 1,396.00 1,396.00 1,396.00
4 CF001C11600 874.00 0.00 0.00 0.00 0.00
.. ... ... ... ... ... ...
398 SR009P5900 576.00 0.00 0.00 0.00 0.00
399 SR009P6000 653.00 0.00 0.00 0.00 0.00
400 小计
401 SR合计
402 总计
今结算 涨跌1 涨跌2 成交量(手) 空盘量 增减量 \
0 1,866.00 287.00 287.00 0 0 0
1 1,672.00 280.00 280.00 0 0 0
2 1,481.00 270.00 270.00 0 4 0
3 1,295.00 358.00 257.00 2 68 0
4 1,114.00 240.00 240.00 0 224 0
.. ... ... ... ... ... ...
398 580.00 4.00 4.00 0 0 0
399 658.00 5.00 5.00 0 0 0
400 656 860 400
401 32,098 276,900 2252
402 110,664 474,154 14770
成交额(万元) DELTA 隐含波动率 行权量
0 0.00 0.9765 22.29 0
1 0.00 0.9621 21.84 0
2 0.00 0.9423 21.38 0
3 1.40 0.9155 20.91 0
4 0.00 0.8800 20.45 0
.. ... ... ... ...
398 0.00 -0.6639 16.24 0
399 0.00 -0.7007 16.58 0
400 97.28 0
401 2138.41 0
402 8769.52 2
"""
calendar = get_calendar()
day = convert_date(trade_date) if trade_date is not None else datetime.date.today()
if day.strftime("%Y%m%d") not in calendar:
warnings.warn("{}非交易日".format(day.strftime("%Y%m%d")))
return None
if day > datetime.date(2010, 8, 24):
url = CZCE_DAILY_OPTION_URL_3.format(day.strftime("%Y"), day.strftime("%Y%m%d"))
try:
r = requests.get(url)
f = StringIO(r.text)
table_df = pd.read_table(f, encoding="utf-8", skiprows=1, sep="|")
if symbol == "白糖期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("SR")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "PTA期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("TA")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "甲醇期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("MA")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "菜籽粕期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("RM")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "动力煤期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("ZC")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
else:
temp_df = table_df[table_df.iloc[:, 0].str.contains("CF")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
except:
return None
def get_shfe_option_daily(trade_date="20200827", symbol="铝期权"):
"""
上海期货交易所-期权-日频行情数据
:param trade_date: str "20191017"
:param symbol: str "铜期权" or "天胶期权" or "黄金期权" or "铝期权" or "锌期权"
:return: tuple(pandas.DataFrame)
"""
calendar = get_calendar()
day = convert_date(trade_date) if trade_date is not None else datetime.date.today()
if day.strftime("%Y%m%d") not in calendar:
warnings.warn("%s非交易日" % day.strftime("%Y%m%d"))
return None
if day > datetime.date(2010, 8, 24):
url = SHFE_OPTION_URL.format(day.strftime("%Y%m%d"))
try:
r = requests.get(url, headers=SHFE_HEADERS)
json_data = r.json()
table_df = pd.DataFrame(
[
row
for row in json_data["o_curinstrument"]
if row["INSTRUMENTID"] not in ["小计", "合计"]
and row["INSTRUMENTID"] != ""
]
)
contract_df = table_df[table_df["PRODUCTNAME"].str.strip() == symbol]
product_df = pd.DataFrame(json_data["o_curproduct"])
product_df = product_df[product_df["PRODUCTNAME"].str.strip() == symbol]
volatility_df = pd.DataFrame(json_data["o_cursigma"])
volatility_df = volatility_df[
volatility_df["PRODUCTNAME"].str.strip() == symbol
]
contract_df.columns = [
"_",
"_",
"_",
"合约代码",
"前结算价",
"开盘价",
"最高价",
"最低价",
"收盘价",
"结算价",
"涨跌1",
"涨跌2",
"成交量",
"持仓量",
"持仓量变化",
"_",
"行权量",
"成交额",
"德尔塔",
"_",
"_",
"_",
"_",
]
contract_df = contract_df[[
"合约代码",
"开盘价",
"最高价",
"最低价",
"收盘价",
"前结算价",
"结算价",
"涨跌1",
"涨跌2",
"成交量",
"持仓量",
"持仓量变化",
"成交额",
"德尔塔",
"行权量",
]]
volatility_df.columns = [
"_",
"_",
"_",
"合约系列",
"成交量",
"持仓量",
"持仓量变化",
"行权量",
"成交额",
"隐含波动率",
"_",
]
volatility_df = volatility_df[[
"合约系列",
"成交量",
"持仓量",
"持仓量变化",
"成交额",
"行权量",
"隐含波动率",
]]
return contract_df, volatility_df
except:
return None
if __name__ == "__main__":
get_czce_option_daily_df = get_czce_option_daily(trade_date="20200817", symbol="动力煤期权")
print(get_czce_option_daily_df)
get_dce_option_daily_one, get_dce_option_daily_two = get_dce_option_daily(trade_date="20210113", symbol="玉米期权")
print(get_dce_option_daily_one)
print(get_dce_option_daily_two)
get_shfe_option_daily_one, get_shfe_option_daily_two = get_shfe_option_daily(trade_date="20210312", symbol="天胶期权")
print(get_shfe_option_daily_one)
print(get_shfe_option_daily_two)
| 39.095541 | 118 | 0.463587 |
import datetime
import warnings
from io import StringIO, BytesIO
import requests
import pandas as pd
from akshare.option.cons import (
get_calendar,
convert_date,
DCE_DAILY_OPTION_URL,
SHFE_OPTION_URL,
CZCE_DAILY_OPTION_URL_3,
SHFE_HEADERS,
)
def get_dce_option_daily(trade_date="20200817", symbol="聚丙烯期权"):
calendar = get_calendar()
day = convert_date(trade_date) if trade_date is not None else datetime.date.today()
if day.strftime("%Y%m%d") not in calendar:
warnings.warn("%s非交易日" % day.strftime("%Y%m%d"))
return None
url = DCE_DAILY_OPTION_URL
payload = {
"dayQuotes.variety": "all",
"dayQuotes.trade_type": "1",
"year": str(day.year),
"month": str(day.month - 1),
"day": str(day.day),
"exportFlag": "excel",
}
res = requests.post(url, data=payload)
table_df = pd.read_excel(BytesIO(res.content), header=0)
another_df = table_df.iloc[
table_df[table_df.iloc[:, 0].str.contains("合约")].iloc[-1].name:, [0, 1]
]
another_df.reset_index(inplace=True, drop=True)
another_df.iloc[0] = another_df.iat[0, 0].split("\t")
another_df.columns = another_df.iloc[0]
another_df = another_df.iloc[1:, :]
if symbol == "豆粕期权":
return table_df[table_df["商品名称"] == "豆粕"], another_df[another_df.iloc[:, 0].str.contains("m")]
elif symbol == "玉米期权":
return table_df[table_df["商品名称"] == "玉米"], another_df[another_df.iloc[:, 0].str.contains("c")]
elif symbol == "铁矿石期权":
return table_df[table_df["商品名称"] == "铁矿石"], another_df[another_df.iloc[:, 0].str.contains("i")]
elif symbol == "液化石油气期权":
return table_df[table_df["商品名称"] == "液化石油气"], another_df[another_df.iloc[:, 0].str.contains("pg")]
elif symbol == "聚乙烯期权":
return table_df[table_df["商品名称"] == "聚乙烯"], another_df[another_df.iloc[:, 0].str.contains("i")]
elif symbol == "聚氯乙烯期权":
return table_df[table_df["商品名称"] == "聚氯乙烯"], another_df[another_df.iloc[:, 0].str.contains("v")]
elif symbol == "聚丙烯期权":
return table_df[table_df["商品名称"] == "聚丙烯"], another_df[another_df.iloc[:, 0].str.contains("pp")]
def get_czce_option_daily(trade_date="20191017", symbol="白糖期权"):
calendar = get_calendar()
day = convert_date(trade_date) if trade_date is not None else datetime.date.today()
if day.strftime("%Y%m%d") not in calendar:
warnings.warn("{}非交易日".format(day.strftime("%Y%m%d")))
return None
if day > datetime.date(2010, 8, 24):
url = CZCE_DAILY_OPTION_URL_3.format(day.strftime("%Y"), day.strftime("%Y%m%d"))
try:
r = requests.get(url)
f = StringIO(r.text)
table_df = pd.read_table(f, encoding="utf-8", skiprows=1, sep="|")
if symbol == "白糖期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("SR")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "PTA期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("TA")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "甲醇期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("MA")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "菜籽粕期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("RM")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
elif symbol == "动力煤期权":
temp_df = table_df[table_df.iloc[:, 0].str.contains("ZC")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
else:
temp_df = table_df[table_df.iloc[:, 0].str.contains("CF")]
temp_df.reset_index(inplace=True, drop=True)
return temp_df.iloc[:-1, :]
except:
return None
def get_shfe_option_daily(trade_date="20200827", symbol="铝期权"):
calendar = get_calendar()
day = convert_date(trade_date) if trade_date is not None else datetime.date.today()
if day.strftime("%Y%m%d") not in calendar:
warnings.warn("%s非交易日" % day.strftime("%Y%m%d"))
return None
if day > datetime.date(2010, 8, 24):
url = SHFE_OPTION_URL.format(day.strftime("%Y%m%d"))
try:
r = requests.get(url, headers=SHFE_HEADERS)
json_data = r.json()
table_df = pd.DataFrame(
[
row
for row in json_data["o_curinstrument"]
if row["INSTRUMENTID"] not in ["小计", "合计"]
and row["INSTRUMENTID"] != ""
]
)
contract_df = table_df[table_df["PRODUCTNAME"].str.strip() == symbol]
product_df = pd.DataFrame(json_data["o_curproduct"])
product_df = product_df[product_df["PRODUCTNAME"].str.strip() == symbol]
volatility_df = pd.DataFrame(json_data["o_cursigma"])
volatility_df = volatility_df[
volatility_df["PRODUCTNAME"].str.strip() == symbol
]
contract_df.columns = [
"_",
"_",
"_",
"合约代码",
"前结算价",
"开盘价",
"最高价",
"最低价",
"收盘价",
"结算价",
"涨跌1",
"涨跌2",
"成交量",
"持仓量",
"持仓量变化",
"_",
"行权量",
"成交额",
"德尔塔",
"_",
"_",
"_",
"_",
]
contract_df = contract_df[[
"合约代码",
"开盘价",
"最高价",
"最低价",
"收盘价",
"前结算价",
"结算价",
"涨跌1",
"涨跌2",
"成交量",
"持仓量",
"持仓量变化",
"成交额",
"德尔塔",
"行权量",
]]
volatility_df.columns = [
"_",
"_",
"_",
"合约系列",
"成交量",
"持仓量",
"持仓量变化",
"行权量",
"成交额",
"隐含波动率",
"_",
]
volatility_df = volatility_df[[
"合约系列",
"成交量",
"持仓量",
"持仓量变化",
"成交额",
"行权量",
"隐含波动率",
]]
return contract_df, volatility_df
except:
return None
if __name__ == "__main__":
get_czce_option_daily_df = get_czce_option_daily(trade_date="20200817", symbol="动力煤期权")
print(get_czce_option_daily_df)
get_dce_option_daily_one, get_dce_option_daily_two = get_dce_option_daily(trade_date="20210113", symbol="玉米期权")
print(get_dce_option_daily_one)
print(get_dce_option_daily_two)
get_shfe_option_daily_one, get_shfe_option_daily_two = get_shfe_option_daily(trade_date="20210312", symbol="天胶期权")
print(get_shfe_option_daily_one)
print(get_shfe_option_daily_two)
| true | true |
f7388e4bd0b2d195926289caa4bb2bb37ec68d7b | 676 | py | Python | lib-python/modified-2.4.1/encodings/raw_unicode_escape.py | camillobruni/pygirl | ddbd442d53061d6ff4af831c1eab153bcc771b5a | [
"MIT"
] | 12 | 2016-01-06T07:10:28.000Z | 2021-05-13T23:02:02.000Z | lib-python/modified-2.4.1/encodings/raw_unicode_escape.py | camillobruni/pygirl | ddbd442d53061d6ff4af831c1eab153bcc771b5a | [
"MIT"
] | null | null | null | lib-python/modified-2.4.1/encodings/raw_unicode_escape.py | camillobruni/pygirl | ddbd442d53061d6ff4af831c1eab153bcc771b5a | [
"MIT"
] | 2 | 2016-07-29T07:09:50.000Z | 2016-10-16T08:50:26.000Z | """ Python 'raw-unicode-escape' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = staticmethod(codecs.raw_unicode_escape_encode)
decode = staticmethod(codecs.raw_unicode_escape_decode)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
| 21.806452 | 69 | 0.748521 | import codecs
c):
encode = staticmethod(codecs.raw_unicode_escape_encode)
decode = staticmethod(codecs.raw_unicode_escape_decode)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
e,Codec.decode,StreamReader,StreamWriter)
| true | true |
f7388e882f1c8dc07674770a9c19d61c63f93aa8 | 559 | py | Python | locking/urls.py | ZG-Tennis/django-locking | 2f10ff5b787d0de471ddb8bfa54cb70126b85f77 | [
"BSD-2-Clause-FreeBSD"
] | 21 | 2015-01-27T16:49:48.000Z | 2021-12-05T09:41:56.000Z | locking/urls.py | ZG-Tennis/django-locking | 2f10ff5b787d0de471ddb8bfa54cb70126b85f77 | [
"BSD-2-Clause-FreeBSD"
] | 4 | 2015-01-26T21:53:49.000Z | 2015-05-16T22:34:26.000Z | locking/urls.py | ZG-Tennis/django-locking | 2f10ff5b787d0de471ddb8bfa54cb70126b85f77 | [
"BSD-2-Clause-FreeBSD"
] | 4 | 2015-11-23T19:10:20.000Z | 2018-01-09T00:25:03.000Z | from django.conf.urls.defaults import *
urlpatterns = patterns('locking.views',
# verwijst naar een ajax-view voor het lockingmechanisme
(r'(?P<app>[\w-]+)/(?P<model>[\w-]+)/(?P<id>\d+)/lock/$', 'lock'),
(r'(?P<app>[\w-]+)/(?P<model>[\w-]+)/(?P<id>\d+)/unlock/$', 'unlock'),
(r'(?P<app>[\w-]+)/(?P<model>[\w-]+)/(?P<id>\d+)/is_locked/$', 'is_locked'),
(r'variables\.js$', 'js_variables', {}, 'locking_variables'),
)
urlpatterns += patterns('',
(r'jsi18n/$', 'django.views.i18n.javascript_catalog', {'packages': 'locking'}),
) | 43 | 83 | 0.561717 | from django.conf.urls.defaults import *
urlpatterns = patterns('locking.views',
(r'(?P<app>[\w-]+)/(?P<model>[\w-]+)/(?P<id>\d+)/lock/$', 'lock'),
(r'(?P<app>[\w-]+)/(?P<model>[\w-]+)/(?P<id>\d+)/unlock/$', 'unlock'),
(r'(?P<app>[\w-]+)/(?P<model>[\w-]+)/(?P<id>\d+)/is_locked/$', 'is_locked'),
(r'variables\.js$', 'js_variables', {}, 'locking_variables'),
)
urlpatterns += patterns('',
(r'jsi18n/$', 'django.views.i18n.javascript_catalog', {'packages': 'locking'}),
) | true | true |
f7388f3e868b6143d0048c4ef7eddde44943c9a8 | 10,219 | py | Python | bootstrap.py | RileyGibbs/django-emailmgr | 82dae79aceab20ac2146103067d31b01ee51731a | [
"BSD-3-Clause"
] | null | null | null | bootstrap.py | RileyGibbs/django-emailmgr | 82dae79aceab20ac2146103067d31b01ee51731a | [
"BSD-3-Clause"
] | null | null | null | bootstrap.py | RileyGibbs/django-emailmgr | 82dae79aceab20ac2146103067d31b01ee51731a | [
"BSD-3-Clause"
] | 1 | 2019-10-17T19:37:14.000Z | 2019-10-17T19:37:14.000Z | ##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os, shutil, sys, tempfile, urllib.request, urllib.parse, urllib.error, urllib.request, urllib.error, urllib.parse, subprocess
from optparse import OptionParser
import imp
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
quote = str
# See zc.buildout.easy_install._has_broken_dash_S for motivation and comments.
stdout, stderr = subprocess.Popen(
[sys.executable, '-Sc',
'try:\n'
' import ConfigParser\n'
'except ImportError:\n'
' print 1\n'
'else:\n'
' print 0\n'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
has_broken_dash_S = bool(int(stdout.strip()))
# In order to be more robust in the face of system Pythons, we want to
# run without site-packages loaded. This is somewhat tricky, in
# particular because Python 2.6's distutils imports site, so starting
# with the -S flag is not sufficient. However, we'll start with that:
if not has_broken_dash_S and 'site' in sys.modules:
# We will restart with python -S.
args = sys.argv[:]
args[0:0] = [sys.executable, '-S']
args = list(map(quote, args))
os.execv(sys.executable, args)
# Now we are running with -S. We'll get the clean sys.path, import site
# because distutils will do it later, and then reset the path and clean
# out any namespace packages from site-packages that might have been
# loaded by .pth files.
clean_path = sys.path[:]
import site # imported because of its side effects
sys.path[:] = clean_path
for k, v in list(sys.modules.items()):
if k in ('setuptools', 'pkg_resources') or (
hasattr(v, '__path__') and
len(v.__path__) == 1 and
not os.path.exists(os.path.join(v.__path__[0], '__init__.py'))):
# This is a namespace package. Remove it.
sys.modules.pop(k)
is_jython = sys.platform.startswith('java')
setuptools_source = 'http://peak.telecommunity.com/dist/ez_setup.py'
distribute_source = 'http://python-distribute.org/distribute_setup.py'
# parsing arguments
def normalize_to_url(option, opt_str, value, parser):
if value:
if '://' not in value: # It doesn't smell like a URL.
value = 'file://%s' % (
urllib.request.pathname2url(
os.path.abspath(os.path.expanduser(value))),)
if opt_str == '--download-base' and not value.endswith('/'):
# Download base needs a trailing slash to make the world happy.
value += '/'
else:
value = None
name = opt_str[2:].replace('-', '_')
setattr(parser.values, name, value)
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --setup-source and --download-base to point to
local resources, you can keep this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="use_distribute", default=False,
help="Use Distribute rather than Setuptools.")
parser.add_option("--setup-source", action="callback", dest="setup_source",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or file location for the setup file. "
"If you use Setuptools, this will default to " +
setuptools_source + "; if you use Distribute, this "
"will default to " + distribute_source + "."))
parser.add_option("--download-base", action="callback", dest="download_base",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or directory for downloading "
"zc.buildout and either Setuptools or Distribute. "
"Defaults to PyPI."))
parser.add_option("--eggs",
help=("Specify a directory for storing eggs. Defaults to "
"a temporary directory that is deleted when the "
"bootstrap script completes."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout's main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.eggs:
eggs_dir = os.path.abspath(os.path.expanduser(options.eggs))
else:
eggs_dir = tempfile.mkdtemp()
if options.setup_source is None:
if options.use_distribute:
options.setup_source = distribute_source
else:
options.setup_source = setuptools_source
if options.accept_buildout_test_releases:
args.append('buildout:accept-buildout-test-releases=true')
args.append('bootstrap')
try:
import pkg_resources
import setuptools # A flag. Sometimes pkg_resources is installed alone.
if not hasattr(pkg_resources, '_distribute'):
raise ImportError
except ImportError:
ez_code = urllib.request.urlopen(
options.setup_source).read().replace('\r\n', '\n')
ez = {}
exec(ez_code, ez)
setup_args = dict(to_dir=eggs_dir, download_delay=0)
if options.download_base:
setup_args['download_base'] = options.download_base
if options.use_distribute:
setup_args['no_fake'] = True
ez['use_setuptools'](**setup_args)
if 'pkg_resources' in sys.modules:
imp.reload(sys.modules['pkg_resources'])
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print ("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir)
| 38.708333 | 132 | 0.643996 | the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print ("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir)
| true | true |
f73890c6457ba003e6dfaf3adf5139ae6bd61634 | 783 | py | Python | multi_linugual_chatbot/offline_chat.py | HrushikeshShukla/multilingual_chatbot | 696b403ef4e5482e2f670924b557dd17375fc5a9 | [
"Apache-2.0"
] | null | null | null | multi_linugual_chatbot/offline_chat.py | HrushikeshShukla/multilingual_chatbot | 696b403ef4e5482e2f670924b557dd17375fc5a9 | [
"Apache-2.0"
] | null | null | null | multi_linugual_chatbot/offline_chat.py | HrushikeshShukla/multilingual_chatbot | 696b403ef4e5482e2f670924b557dd17375fc5a9 | [
"Apache-2.0"
] | null | null | null | lang=['en','mr'] #defining the languages
print("***Welcome to multilinugal chatbot.***")
print("***बहुभाषिक चॅटबॉटमध्ये आपले स्वागत आहे.***")
print("\n \nPlease Select language:\nकृपया भाषा निवडा:")
indx=int(input("\nPress 1 for english, मराठीसाठी 2 दाबा: "))
if indx==2:
from mbot import chat
print("starting marathi version.")
print("मराठी आवृत्ती सुरू करीत आहे.")
flag=True
print("रोबोट: नमस्कार")
while flag==True:
user_response=input("आपण: ")
bot_response,flag=chat(user_response)
print("रोबोट: "+str(bot_response))
else:
from ebot import chat
print("Starting english version")
flag=True
print("Bot: Hello!")
while flag==True:
user_response=input("You: ")
bot_response,flag=chat(user_response)
print("Bot: "+str(bot_response))
| 27.964286 | 60 | 0.641124 | lang=['en','mr']
print("***Welcome to multilinugal chatbot.***")
print("***बहुभाषिक चॅटबॉटमध्ये आपले स्वागत आहे.***")
print("\n \nPlease Select language:\nकृपया भाषा निवडा:")
indx=int(input("\nPress 1 for english, मराठीसाठी 2 दाबा: "))
if indx==2:
from mbot import chat
print("starting marathi version.")
print("मराठी आवृत्ती सुरू करीत आहे.")
flag=True
print("रोबोट: नमस्कार")
while flag==True:
user_response=input("आपण: ")
bot_response,flag=chat(user_response)
print("रोबोट: "+str(bot_response))
else:
from ebot import chat
print("Starting english version")
flag=True
print("Bot: Hello!")
while flag==True:
user_response=input("You: ")
bot_response,flag=chat(user_response)
print("Bot: "+str(bot_response))
| true | true |
f738911d788f2c0904e06435d327d83b0a4b94bd | 27,305 | py | Python | pycorrector/transformers/models/bert_generation/modeling_bert_generation.py | xinjianlv/pycorrector | 697fc09032d129b2777cf686bb05663f2fc3c04f | [
"Apache-2.0"
] | null | null | null | pycorrector/transformers/models/bert_generation/modeling_bert_generation.py | xinjianlv/pycorrector | 697fc09032d129b2777cf686bb05663f2fc3c04f | [
"Apache-2.0"
] | null | null | null | pycorrector/transformers/models/bert_generation/modeling_bert_generation.py | xinjianlv/pycorrector | 697fc09032d129b2777cf686bb05663f2fc3c04f | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The Google AI Language Team Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch BERT model specific for generation. """
import torch
import torch.utils.checkpoint
from torch import nn
from torch.nn import CrossEntropyLoss
from ...file_utils import (
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import BaseModelOutputWithPastAndCrossAttentions, CausalLMOutputWithCrossAttentions
from ...modeling_utils import PreTrainedModel
from ..bert.modeling_bert import BertEncoder
from .configuration_bert_generation import BertGenerationConfig
from pycorrector.utils.logger import logger
_CONFIG_FOR_DOC = "BertGenerationConfig"
_TOKENIZER_FOR_DOC = "BertGenerationTokenizer"
def load_tf_weights_in_bert_generation(
model, tf_hub_path, model_class, is_encoder_named_decoder=False, is_encoder=False
):
try:
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
import tensorflow_text # noqa: F401
tf.disable_eager_execution()
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_model = hub.Module(tf_hub_path)
init = tf.global_variables_initializer()
with tf.Session() as sess:
init.run()
all_variables = tf_model.variable_map
keep_track_variables = all_variables.copy()
for key in list(all_variables.keys()):
if "global" in key:
logger.info(f"Skipping {key}...")
continue
if not is_encoder:
model_pointer = getattr(model, model_class)
else:
model_pointer = model
is_embedding = False
logger.info(f"Trying to match {key}...")
# remove start_string = "module/bert/"
sub_layers = key.split("/")[2:]
if is_encoder_named_decoder and sub_layers[0] == "encoder":
logger.info(f"Skipping encoder layer {key} for decoder")
continue
if is_encoder and sub_layers[0] == "decoder":
logger.info(f"Skipping decoder layer {key} for encoder")
continue
for i, sub_layer in enumerate(sub_layers):
if sub_layer == "embeddings":
is_embedding = True
elif sub_layer == "LayerNorm":
is_embedding = False
if "layer" in sub_layer:
model_pointer = model_pointer.layer[int(sub_layer.split("_")[-1])]
elif sub_layer in ["kernel", "gamma"]:
model_pointer = model_pointer.weight
elif sub_layer == "beta":
model_pointer = model_pointer.bias
elif sub_layer == "encdec":
model_pointer = model_pointer.crossattention.self
elif sub_layer == "encdec_output":
model_pointer = model_pointer.crossattention.output
elif is_encoder_named_decoder and sub_layer == "decoder":
model_pointer = model_pointer.encoder
else:
if sub_layer == "attention" and "encdec" in sub_layers[i + 1]:
continue
try:
model_pointer = getattr(model_pointer, sub_layer)
except AttributeError:
logger.info(f"Skipping to initialize {key} at {sub_layer}...")
raise AttributeError
array = np.asarray(sess.run(all_variables[key]))
if not is_embedding:
logger.info("Transposing numpy weight of shape {} for {}".format(array.shape, key))
array = np.transpose(array)
else:
model_pointer = model_pointer.weight
try:
assert (
model_pointer.shape == array.shape
), f"Pointer shape {model_pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (model_pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {key}")
model_pointer.data = torch.from_numpy(array.astype(np.float32))
keep_track_variables.pop(key, None)
logger.info("Weights not copied to PyTorch model: {}".format(", ".join(keep_track_variables.keys())))
return model
class BertGenerationEmbeddings(nn.Module):
"""Construct the embeddings from word and position embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = torch.nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
def forward(self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0):
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length]
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
embeddings = inputs_embeds + position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class BertGenerationPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
model_files.
"""
config_class = BertGenerationConfig
base_model_prefix = "bert"
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
""" Initialize the weights """
if isinstance(module, (nn.Linear, nn.Embedding)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
BERT_GENERATION_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.BertGenerationConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
BERT_GENERATION_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.BertGenerationTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.__call__` and :meth:`transformers.PreTrainedTokenizer.encode` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
config.max_position_embeddings - 1]``.
`What are position IDs? <../glossary.html#position-ids>`_
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare BertGeneration model transformer outputting raw hidden-states without any specific head on top.",
BERT_GENERATION_START_DOCSTRING,
)
class BertGenerationEncoder(BertGenerationPreTrainedModel):
"""
The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
cross-attention is added between the self-attention layers, following the architecture described in `Attention is
all you need <https://arxiv.org/abs/1706.03762>`__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit,
Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin.
This model should be used when leveraging Bert or Roberta checkpoints for the
:class:`~transformers.EncoderDecoderModel` class as described in `Leveraging Pre-trained Checkpoints for Sequence
Generation Tasks <https://arxiv.org/abs/1907.12461>`__ by Sascha Rothe, Shashi Narayan, and Aliaksei Severyn.
To behave as an decoder the model needs to be initialized with the :obj:`is_decoder` argument of the configuration
set to :obj:`True`. To be used in a Seq2Seq model, the model needs to initialized with both :obj:`is_decoder`
argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an
input to the forward pass.
"""
def __init__(self, config):
super().__init__(config)
self.config = config
self.embeddings = BertGenerationEmbeddings(config)
self.encoder = BertEncoder(config)
self.init_weights()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(BERT_GENERATION_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint="google/bert_for_seq_generation_L-24_bbc_encoder",
output_type=BaseModelOutputWithPastAndCrossAttentions,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
the model is configured as a decoder.
encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: ``1`` for
tokens that are NOT MASKED, ``0`` for MASKED tokens.
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if self.config.is_decoder:
use_cache = use_cache if use_cache is not None else self.config.use_cache
else:
use_cache = False
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
batch_size, seq_length = input_shape
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size, seq_length = input_shape
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask = None
if not use_cache:
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(
attention_mask, input_shape, device
)
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
past_key_values_length=past_key_values_length,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
if not return_dict:
return (sequence_output,) + encoder_outputs[1:]
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=sequence_output,
past_key_values=encoder_outputs.past_key_values,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
cross_attentions=encoder_outputs.cross_attentions,
)
class BertGenerationOnlyLMHead(nn.Module):
def __init__(self, config):
super().__init__()
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
# Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
self.decoder.bias = self.bias
def forward(self, hidden_states):
logits = self.decoder(hidden_states)
return logits
@add_start_docstrings(
"""BertGeneration Model with a `language modeling` head on top for CLM fine-tuning. """,
BERT_GENERATION_START_DOCSTRING,
)
class BertGenerationDecoder(BertGenerationPreTrainedModel):
def __init__(self, config):
super().__init__(config)
if not config.is_decoder:
logger.warn("If you want to use `BertGenerationDecoder` as a standalone, add `is_decoder=True.`")
self.bert = BertGenerationEncoder(config)
self.lm_head = BertGenerationOnlyLMHead(config)
self.init_weights()
def get_output_embeddings(self):
return self.lm_head.decoder
def set_output_embeddings(self, new_embeddings):
self.lm_head.decoder = new_embeddings
@add_start_docstrings_to_model_forward(BERT_GENERATION_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
labels=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
the model is configured as a decoder.
encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in
``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are
ignored (masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
Returns:
Example::
>>> from transformers import BertGenerationTokenizer, BertGenerationDecoder, BertGenerationConfig
>>> import torch
>>> tokenizer = BertGenerationTokenizer.from_pretrained('google/bert_for_seq_generation_L-24_bbc_encoder')
>>> config = BertGenerationConfig.from_pretrained("google/bert_for_seq_generation_L-24_bbc_encoder")
>>> config.is_decoder = True
>>> model = BertGenerationDecoder.from_pretrained('google/bert_for_seq_generation_L-24_bbc_encoder', config=config)
>>> inputs = tokenizer("Hello, my dog is cute", return_token_type_ids=False, return_tensors="pt")
>>> outputs = model(**inputs)
>>> prediction_logits = outputs.logits
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is not None:
use_cache = False
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
prediction_scores = self.lm_head(sequence_output)
lm_loss = None
if labels is not None:
# we are doing next-token prediction; shift prediction scores and input ids by one
shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous()
labels = labels[:, 1:].contiguous()
loss_fct = CrossEntropyLoss()
lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (prediction_scores,) + outputs[1:]
return ((lm_loss,) + output) if lm_loss is not None else output
return CausalLMOutputWithCrossAttentions(
loss=lm_loss,
logits=prediction_scores,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
cross_attentions=outputs.cross_attentions,
)
def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs):
input_shape = input_ids.shape
# if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly
if attention_mask is None:
attention_mask = input_ids.new_ones(input_shape)
# cut decoder_input_ids if past is used
if past is not None:
input_ids = input_ids[:, -1:]
return {"input_ids": input_ids, "attention_mask": attention_mask}
def _reorder_cache(self, past, beam_idx):
reordered_past = ()
for layer_past in past:
reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),)
return reordered_past
| 47.158895 | 213 | 0.672587 |
import torch
import torch.utils.checkpoint
from torch import nn
from torch.nn import CrossEntropyLoss
from ...file_utils import (
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import BaseModelOutputWithPastAndCrossAttentions, CausalLMOutputWithCrossAttentions
from ...modeling_utils import PreTrainedModel
from ..bert.modeling_bert import BertEncoder
from .configuration_bert_generation import BertGenerationConfig
from pycorrector.utils.logger import logger
_CONFIG_FOR_DOC = "BertGenerationConfig"
_TOKENIZER_FOR_DOC = "BertGenerationTokenizer"
def load_tf_weights_in_bert_generation(
model, tf_hub_path, model_class, is_encoder_named_decoder=False, is_encoder=False
):
try:
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
import tensorflow_text
tf.disable_eager_execution()
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_model = hub.Module(tf_hub_path)
init = tf.global_variables_initializer()
with tf.Session() as sess:
init.run()
all_variables = tf_model.variable_map
keep_track_variables = all_variables.copy()
for key in list(all_variables.keys()):
if "global" in key:
logger.info(f"Skipping {key}...")
continue
if not is_encoder:
model_pointer = getattr(model, model_class)
else:
model_pointer = model
is_embedding = False
logger.info(f"Trying to match {key}...")
sub_layers = key.split("/")[2:]
if is_encoder_named_decoder and sub_layers[0] == "encoder":
logger.info(f"Skipping encoder layer {key} for decoder")
continue
if is_encoder and sub_layers[0] == "decoder":
logger.info(f"Skipping decoder layer {key} for encoder")
continue
for i, sub_layer in enumerate(sub_layers):
if sub_layer == "embeddings":
is_embedding = True
elif sub_layer == "LayerNorm":
is_embedding = False
if "layer" in sub_layer:
model_pointer = model_pointer.layer[int(sub_layer.split("_")[-1])]
elif sub_layer in ["kernel", "gamma"]:
model_pointer = model_pointer.weight
elif sub_layer == "beta":
model_pointer = model_pointer.bias
elif sub_layer == "encdec":
model_pointer = model_pointer.crossattention.self
elif sub_layer == "encdec_output":
model_pointer = model_pointer.crossattention.output
elif is_encoder_named_decoder and sub_layer == "decoder":
model_pointer = model_pointer.encoder
else:
if sub_layer == "attention" and "encdec" in sub_layers[i + 1]:
continue
try:
model_pointer = getattr(model_pointer, sub_layer)
except AttributeError:
logger.info(f"Skipping to initialize {key} at {sub_layer}...")
raise AttributeError
array = np.asarray(sess.run(all_variables[key]))
if not is_embedding:
logger.info("Transposing numpy weight of shape {} for {}".format(array.shape, key))
array = np.transpose(array)
else:
model_pointer = model_pointer.weight
try:
assert (
model_pointer.shape == array.shape
), f"Pointer shape {model_pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (model_pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {key}")
model_pointer.data = torch.from_numpy(array.astype(np.float32))
keep_track_variables.pop(key, None)
logger.info("Weights not copied to PyTorch model: {}".format(", ".join(keep_track_variables.keys())))
return model
class BertGenerationEmbeddings(nn.Module):
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.LayerNorm = torch.nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
def forward(self, input_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0):
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length]
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
embeddings = inputs_embeds + position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class BertGenerationPreTrainedModel(PreTrainedModel):
config_class = BertGenerationConfig
base_model_prefix = "bert"
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
if isinstance(module, (nn.Linear, nn.Embedding)):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
BERT_GENERATION_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.BertGenerationConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
BERT_GENERATION_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.BertGenerationTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.__call__` and :meth:`transformers.PreTrainedTokenizer.encode` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
config.max_position_embeddings - 1]``.
`What are position IDs? <../glossary.html#position-ids>`_
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare BertGeneration model transformer outputting raw hidden-states without any specific head on top.",
BERT_GENERATION_START_DOCSTRING,
)
class BertGenerationEncoder(BertGenerationPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.config = config
self.embeddings = BertGenerationEmbeddings(config)
self.encoder = BertEncoder(config)
self.init_weights()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(BERT_GENERATION_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint="google/bert_for_seq_generation_L-24_bbc_encoder",
output_type=BaseModelOutputWithPastAndCrossAttentions,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if self.config.is_decoder:
use_cache = use_cache if use_cache is not None else self.config.use_cache
else:
use_cache = False
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
batch_size, seq_length = input_shape
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size, seq_length = input_shape
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask = None
if not use_cache:
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(
attention_mask, input_shape, device
)
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
inputs_embeds=inputs_embeds,
past_key_values_length=past_key_values_length,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
if not return_dict:
return (sequence_output,) + encoder_outputs[1:]
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=sequence_output,
past_key_values=encoder_outputs.past_key_values,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
cross_attentions=encoder_outputs.cross_attentions,
)
class BertGenerationOnlyLMHead(nn.Module):
def __init__(self, config):
super().__init__()
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
# Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
self.decoder.bias = self.bias
def forward(self, hidden_states):
logits = self.decoder(hidden_states)
return logits
@add_start_docstrings(
"""BertGeneration Model with a `language modeling` head on top for CLM fine-tuning. """,
BERT_GENERATION_START_DOCSTRING,
)
class BertGenerationDecoder(BertGenerationPreTrainedModel):
def __init__(self, config):
super().__init__(config)
if not config.is_decoder:
logger.warn("If you want to use `BertGenerationDecoder` as a standalone, add `is_decoder=True.`")
self.bert = BertGenerationEncoder(config)
self.lm_head = BertGenerationOnlyLMHead(config)
self.init_weights()
def get_output_embeddings(self):
return self.lm_head.decoder
def set_output_embeddings(self, new_embeddings):
self.lm_head.decoder = new_embeddings
@add_start_docstrings_to_model_forward(BERT_GENERATION_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
labels=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is not None:
use_cache = False
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
prediction_scores = self.lm_head(sequence_output)
lm_loss = None
if labels is not None:
# we are doing next-token prediction; shift prediction scores and input ids by one
shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous()
labels = labels[:, 1:].contiguous()
loss_fct = CrossEntropyLoss()
lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (prediction_scores,) + outputs[1:]
return ((lm_loss,) + output) if lm_loss is not None else output
return CausalLMOutputWithCrossAttentions(
loss=lm_loss,
logits=prediction_scores,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
cross_attentions=outputs.cross_attentions,
)
def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs):
input_shape = input_ids.shape
# if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly
if attention_mask is None:
attention_mask = input_ids.new_ones(input_shape)
# cut decoder_input_ids if past is used
if past is not None:
input_ids = input_ids[:, -1:]
return {"input_ids": input_ids, "attention_mask": attention_mask}
def _reorder_cache(self, past, beam_idx):
reordered_past = ()
for layer_past in past:
reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),)
return reordered_past
| true | true |
f738915be11f76308153be10d6c77fe1d1a29212 | 13,254 | py | Python | PyYaMusic/track.py | AlexRoar/YaMusic-Python | d709f8920ab4c66dafae9b5bcb8e623512d60b3a | [
"MIT"
] | 2 | 2020-07-14T08:15:34.000Z | 2020-08-25T07:13:25.000Z | PyYaMusic/track.py | AlexRoar/YaMusic-Python | d709f8920ab4c66dafae9b5bcb8e623512d60b3a | [
"MIT"
] | null | null | null | PyYaMusic/track.py | AlexRoar/YaMusic-Python | d709f8920ab4c66dafae9b5bcb8e623512d60b3a | [
"MIT"
] | null | null | null | # Copyright (c) 2019.
# Designed and codded with love by Aleksander Dremov
#
#
import json
import random
from PyYaMusic import obfuscYandex
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error, USLT, TCON, TDRC
from mutagen.easyid3 import EasyID3
import os, urllib.parse
from pydub import AudioSegment
from pydub.playback import play
import requests
class Track:
def __init__(self, default_path='cache/'):
if len(default_path) != 0:
if default_path[-1] != '':
default_path += '/'
if not os.path.isdir(default_path):
os.makedirs(default_path)
self.default_path = default_path
self.tracks = [] # Tuples (track id, album id)
self.headers = {
'Pragma': 'no-cache',
'Accept-Encoding': 'gzip, deflate, br',
'X-Current-UID': '227207001',
'Accept-Language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Cache-Control': 'no-cache',
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive'
}
def search(self, text):
self.tracks = []
params = (
('text', text),
('type', 'tracks'),
('ncrnd', '0.22174742529705926'),
('lang', 'ru'),
('external-domain', 'music.yandex.ru'),
('overembed', 'false'),
)
response = requests.get('https://music.yandex.ru/handlers/music-search.jsx', headers=self.headers,
params=params,
cookies={})
data = response.json()['tracks']['items']
for i in data:
self.tracks.append((i['id'], i['albums'][0]['id']))
if self.tracks == []:
print('Not found warning: ' + text)
return self.tracks
def load_data(self, track_id, album_id):
params_inf = (
('track', str(str(track_id) + ':' + str(album_id))),
('lang', 'ru'),
('external-domain', 'music.yandex.ru'),
('overembed', 'false'),
('ncrnd', '0.13835443477395826'),
)
info = requests.get('https://music.yandex.ru/handlers/track.jsx', headers=self.headers, params=params_inf,
cookies={})
info = json.loads(info.text)
return info
def load_data_tuple(self, trackalbum):
return self.load_data(trackalbum[0], trackalbum[1])
def download(self, track_id, album_id, name='auto', rewrite=False):
track_id = str(track_id)
album_id = str(album_id)
link = self.getDownloadLink(track_id, album_id)
response = requests.get(link[0], headers=self.headers, params=link[1])
name = urllib.parse.quote(name, safe='')
if (name == 'auto'):
info = self.load_data(track_id, album_id)
name = info['artists'][0]['name'] + '_' + info['track']['title'] + str(track_id) + '.mp3'
else:
if name.split('.')[-1] != 'mp3':
name = name + '.mp3'
# print(name)
name = name.replace(' ', '_')
name = urllib.parse.quote(name, safe='')
if (os.path.isfile(self.default_path + name) and not rewrite):
print('\nPending... ' + name)
return 201
name = self.default_path + name
audio = open(name, 'wb')
audio.write(response.content)
audio.close()
info = self.getTrackInfo(track_id, album_id)
# print('https://' + (info['track']['coverUri'][:-2]) + 'm1000x1000')
headers = {
'Referer': 'https://music.yandex.ru/album/' + album_id + '/track/' + track_id,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
}
response = requests.get('https://' + (info['track']['coverUri'][:-2]) + 'm1000x1000',
headers=headers)
audio = MP3(name, ID3=ID3)
try:
audio.add_tags()
except error:
pass
audio.tags.add(
APIC(
encoding=3, # 3 is for utf-8
mime='image/png', # image/jpeg or image/png
type=3, # 3 is for the cover image
desc=u'Cover',
data=response.content
)
)
try:
if (info['track']['lyricsAvailable'] and len(info['lyric']) != 0):
audio.tags.add(USLT(encoding=3, lang=u'eng', desc=u'desc', text=info['lyric'][0]['fullLyrics']))
except:
pass
try:
audio.tags.add(TCON(encoding=3, text=u'' + str(info['track']['albums'][0]['genre'])))
except:
pass
try:
audio.tags.add(TDRC(encoding=3, text=u'' + str(info['track']['albums'][0]['year'])))
except:
pass
audio.save()
audio = EasyID3(name)
audio['title'] = info['track']['title']
audio['artist'] = info['track']['artists'][0]['name']
audio['album'] = info['track']['albums'][0]['title']
audio['composer'] = u"" # clear
audio.save()
# print(json.dumps(info))
return response.status_code
def downloadFirst(self, name='auto'):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
else:
self.download(self.tracks[0][0], self.tracks[0][1], name=name)
def metadataForFirst(self, path):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
else:
track_id = self.tracks[0][0]
album_id = self.tracks[0][1]
track_id = str(track_id)
album_id = str(album_id)
info = self.getTrackInfo(track_id, album_id)
headers = {
'Referer': 'https://music.yandex.ru/album/' + album_id + '/track/' + track_id,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
}
try:
audio = MP3(path, ID3=ID3)
audio.delete()
try:
audio.add_tags()
except error:
pass
try:
response = requests.get('https://' + (info['track']['coverUri'][:-2]) + 'm1000x1000',
headers=headers)
audio.tags.add(
APIC(
encoding=3, # 3 is for utf-8
mime='image/png', # image/jpeg or image/png
type=3, # 3 is for the cover image
desc=u'Cover',
data=response.content
)
)
except:
print('Cover error: ' + path)
if (info['track']['lyricsAvailable']):
audio.tags.add(USLT(encoding=3, lang=u'eng', desc=u'desc', text=info['lyric'][0]['fullLyrics']))
audio.tags.add(TCON(encoding=3, text=u'' + str(info['track']['albums'][0]['genre'])))
audio.tags.add(TDRC(encoding=3, text=u'' + str(info['track']['albums'][0]['year'])))
audio.save()
except:
print('Error occurred with metadata for ' + path)
try:
audio = EasyID3(path)
audio['title'] = info['track']['title']
audio['artist'] = info['track']['artists'][0]['name']
audio['album'] = info['track']['albums'][0]['title']
audio['composer'] = u"" # clear
audio.save()
except:
print('Error occurred with metadata for ' + path)
def getDownloadLink(self, track_id, album_id):
track_id = str(track_id)
album_id = str(album_id)
__t = str(random.randint(1500000000000, 10000000000000))
headers = {
'Pragma': 'no-cache',
'Accept-Encoding': 'gzip, deflate, br',
'X-Retpath-Y': 'https%3A%2F%2Fmusic.yandex.ru%2Falbum%2F1672742%2Ftrack%2F10294529',
'X-Current-UID': '227207001',
'Accept-Language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Accept': 'application/json; q=1.0, text/*; q=0.8, */*; q=0.1',
'Cache-Control': 'no-cache',
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
'Referer': 'https://music.yandex.ru/album/' + track_id + '/track/' + album_id,
}
params = (
('hq', '1'),
('strm', '0'),
('external-domain', 'music.yandex.ru'),
('overembed', 'no'),
('__t', __t),
)
response = requests.get(
'https://music.yandex.ru/api/v2.1/handlers/track/' + track_id + ':' + album_id + '/web-album_track-track-track-fridge/download/m',
headers=headers, params=params, cookies={})
try:
result = json.loads(response.text)
src = result['src']
except:
print('Undone: ' + track_id + ' ' + album_id)
params = (
('sign', '9ba9f320a83d37549d4f3ba69dd8386f416aa3f63a51e7579f0d85b95736f6a9'),
('ts', '5c310e84'),
('format', 'json'),
('external-domain', 'music.yandex.ru'),
('overembed', 'no'),
('__t', __t),
)
headers = {
'Origin': 'https://music.yandex.ru',
'Accept-Encoding': 'identity;q=1, *;q=0',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Range': 'bytes=0-',
'chrome-proxy': 'frfr',
}
response = requests.get(src, headers=headers, params=params)
result = json.loads(response.text)
params = (
('track-id', track_id),
('play', 'false'),
)
path_end = 'https://' + result['host'] + '/get-mp3/' + obfuscYandex.obfuscateYandex(
result['path'][1:] + result['s']) + '/' + result['ts'] + result['path']
return (path_end, params)
def getFirstDownloadLink(self):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
else:
return self.getDownloadLink(self.tracks[0][0], self.tracks[0][1])
def generateLinkWithParams(self, link, params):
if (len(params) != 0):
link += '?'
for i in params:
link += i[0] + '=' + i[1] + '&'
link = link[:-1]
return link
def playFirst(self):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
link = self.getFirstDownloadLink()
link = self.generateLinkWithParams(link[0], link[1])
self.downloadFirst('cache')
self.playMusic(self.default_path + 'cache.mp3')
def playByids(self, track_id, album_id):
link = self.getDownloadLink(track_id, album_id)
link = self.generateLinkWithParams(link[0], link[1])
p = self.default_path
if not os.path.exists("cache/"):
os.mkdir('cache/')
self.default_path = 'cache/'
self.download(track_id, album_id, 'cache', True)
self.default_path = p
self.playMusic('cache/cache.mp3')
def getTrackInfo(self, track_id, album_id):
cookies = {}
headers = {
'Accept-Encoding': 'gzip, deflate, br',
'X-Retpath-Y': 'https://music.yandex.ru/album/' + str(album_id) + '/track/' + str(track_id),
'Accept-Language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
'X-Current-UID': '227207001',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Referer': 'https://music.yandex.ru/album/' + str(album_id) + '/track/' + str(track_id),
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
}
params = (
('track', str(track_id) + ':' + str(album_id)),
('lang', 'en'),
('external-domain', 'music.yandex.ru'),
('overembed', 'false'),
('ncrnd', '0.33253286876165544'),
)
response = requests.get('https://music.yandex.ru/handlers/track.jsx', headers=headers, params=params,
cookies=cookies)
return json.loads(response.text)
def playMusic(self, path):
song = AudioSegment.from_mp3(path)
play(song)
| 38.086207 | 154 | 0.514411 |
import json
import random
from PyYaMusic import obfuscYandex
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error, USLT, TCON, TDRC
from mutagen.easyid3 import EasyID3
import os, urllib.parse
from pydub import AudioSegment
from pydub.playback import play
import requests
class Track:
def __init__(self, default_path='cache/'):
if len(default_path) != 0:
if default_path[-1] != '':
default_path += '/'
if not os.path.isdir(default_path):
os.makedirs(default_path)
self.default_path = default_path
self.tracks = []
self.headers = {
'Pragma': 'no-cache',
'Accept-Encoding': 'gzip, deflate, br',
'X-Current-UID': '227207001',
'Accept-Language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Cache-Control': 'no-cache',
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive'
}
def search(self, text):
self.tracks = []
params = (
('text', text),
('type', 'tracks'),
('ncrnd', '0.22174742529705926'),
('lang', 'ru'),
('external-domain', 'music.yandex.ru'),
('overembed', 'false'),
)
response = requests.get('https://music.yandex.ru/handlers/music-search.jsx', headers=self.headers,
params=params,
cookies={})
data = response.json()['tracks']['items']
for i in data:
self.tracks.append((i['id'], i['albums'][0]['id']))
if self.tracks == []:
print('Not found warning: ' + text)
return self.tracks
def load_data(self, track_id, album_id):
params_inf = (
('track', str(str(track_id) + ':' + str(album_id))),
('lang', 'ru'),
('external-domain', 'music.yandex.ru'),
('overembed', 'false'),
('ncrnd', '0.13835443477395826'),
)
info = requests.get('https://music.yandex.ru/handlers/track.jsx', headers=self.headers, params=params_inf,
cookies={})
info = json.loads(info.text)
return info
def load_data_tuple(self, trackalbum):
return self.load_data(trackalbum[0], trackalbum[1])
def download(self, track_id, album_id, name='auto', rewrite=False):
track_id = str(track_id)
album_id = str(album_id)
link = self.getDownloadLink(track_id, album_id)
response = requests.get(link[0], headers=self.headers, params=link[1])
name = urllib.parse.quote(name, safe='')
if (name == 'auto'):
info = self.load_data(track_id, album_id)
name = info['artists'][0]['name'] + '_' + info['track']['title'] + str(track_id) + '.mp3'
else:
if name.split('.')[-1] != 'mp3':
name = name + '.mp3'
name = name.replace(' ', '_')
name = urllib.parse.quote(name, safe='')
if (os.path.isfile(self.default_path + name) and not rewrite):
print('\nPending... ' + name)
return 201
name = self.default_path + name
audio = open(name, 'wb')
audio.write(response.content)
audio.close()
info = self.getTrackInfo(track_id, album_id)
headers = {
'Referer': 'https://music.yandex.ru/album/' + album_id + '/track/' + track_id,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
}
response = requests.get('https://' + (info['track']['coverUri'][:-2]) + 'm1000x1000',
headers=headers)
audio = MP3(name, ID3=ID3)
try:
audio.add_tags()
except error:
pass
audio.tags.add(
APIC(
encoding=3,
mime='image/png',
type=3,
desc=u'Cover',
data=response.content
)
)
try:
if (info['track']['lyricsAvailable'] and len(info['lyric']) != 0):
audio.tags.add(USLT(encoding=3, lang=u'eng', desc=u'desc', text=info['lyric'][0]['fullLyrics']))
except:
pass
try:
audio.tags.add(TCON(encoding=3, text=u'' + str(info['track']['albums'][0]['genre'])))
except:
pass
try:
audio.tags.add(TDRC(encoding=3, text=u'' + str(info['track']['albums'][0]['year'])))
except:
pass
audio.save()
audio = EasyID3(name)
audio['title'] = info['track']['title']
audio['artist'] = info['track']['artists'][0]['name']
audio['album'] = info['track']['albums'][0]['title']
audio['composer'] = u""
audio.save()
return response.status_code
def downloadFirst(self, name='auto'):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
else:
self.download(self.tracks[0][0], self.tracks[0][1], name=name)
def metadataForFirst(self, path):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
else:
track_id = self.tracks[0][0]
album_id = self.tracks[0][1]
track_id = str(track_id)
album_id = str(album_id)
info = self.getTrackInfo(track_id, album_id)
headers = {
'Referer': 'https://music.yandex.ru/album/' + album_id + '/track/' + track_id,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
}
try:
audio = MP3(path, ID3=ID3)
audio.delete()
try:
audio.add_tags()
except error:
pass
try:
response = requests.get('https://' + (info['track']['coverUri'][:-2]) + 'm1000x1000',
headers=headers)
audio.tags.add(
APIC(
encoding=3,
mime='image/png',
type=3,
desc=u'Cover',
data=response.content
)
)
except:
print('Cover error: ' + path)
if (info['track']['lyricsAvailable']):
audio.tags.add(USLT(encoding=3, lang=u'eng', desc=u'desc', text=info['lyric'][0]['fullLyrics']))
audio.tags.add(TCON(encoding=3, text=u'' + str(info['track']['albums'][0]['genre'])))
audio.tags.add(TDRC(encoding=3, text=u'' + str(info['track']['albums'][0]['year'])))
audio.save()
except:
print('Error occurred with metadata for ' + path)
try:
audio = EasyID3(path)
audio['title'] = info['track']['title']
audio['artist'] = info['track']['artists'][0]['name']
audio['album'] = info['track']['albums'][0]['title']
audio['composer'] = u""
audio.save()
except:
print('Error occurred with metadata for ' + path)
def getDownloadLink(self, track_id, album_id):
track_id = str(track_id)
album_id = str(album_id)
__t = str(random.randint(1500000000000, 10000000000000))
headers = {
'Pragma': 'no-cache',
'Accept-Encoding': 'gzip, deflate, br',
'X-Retpath-Y': 'https%3A%2F%2Fmusic.yandex.ru%2Falbum%2F1672742%2Ftrack%2F10294529',
'X-Current-UID': '227207001',
'Accept-Language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Accept': 'application/json; q=1.0, text/*; q=0.8, */*; q=0.1',
'Cache-Control': 'no-cache',
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
'Referer': 'https://music.yandex.ru/album/' + track_id + '/track/' + album_id,
}
params = (
('hq', '1'),
('strm', '0'),
('external-domain', 'music.yandex.ru'),
('overembed', 'no'),
('__t', __t),
)
response = requests.get(
'https://music.yandex.ru/api/v2.1/handlers/track/' + track_id + ':' + album_id + '/web-album_track-track-track-fridge/download/m',
headers=headers, params=params, cookies={})
try:
result = json.loads(response.text)
src = result['src']
except:
print('Undone: ' + track_id + ' ' + album_id)
params = (
('sign', '9ba9f320a83d37549d4f3ba69dd8386f416aa3f63a51e7579f0d85b95736f6a9'),
('ts', '5c310e84'),
('format', 'json'),
('external-domain', 'music.yandex.ru'),
('overembed', 'no'),
('__t', __t),
)
headers = {
'Origin': 'https://music.yandex.ru',
'Accept-Encoding': 'identity;q=1, *;q=0',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'Range': 'bytes=0-',
'chrome-proxy': 'frfr',
}
response = requests.get(src, headers=headers, params=params)
result = json.loads(response.text)
params = (
('track-id', track_id),
('play', 'false'),
)
path_end = 'https://' + result['host'] + '/get-mp3/' + obfuscYandex.obfuscateYandex(
result['path'][1:] + result['s']) + '/' + result['ts'] + result['path']
return (path_end, params)
def getFirstDownloadLink(self):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
else:
return self.getDownloadLink(self.tracks[0][0], self.tracks[0][1])
def generateLinkWithParams(self, link, params):
if (len(params) != 0):
link += '?'
for i in params:
link += i[0] + '=' + i[1] + '&'
link = link[:-1]
return link
def playFirst(self):
if (len(self.tracks) == 0):
raise Exception('No tracks found')
link = self.getFirstDownloadLink()
link = self.generateLinkWithParams(link[0], link[1])
self.downloadFirst('cache')
self.playMusic(self.default_path + 'cache.mp3')
def playByids(self, track_id, album_id):
link = self.getDownloadLink(track_id, album_id)
link = self.generateLinkWithParams(link[0], link[1])
p = self.default_path
if not os.path.exists("cache/"):
os.mkdir('cache/')
self.default_path = 'cache/'
self.download(track_id, album_id, 'cache', True)
self.default_path = p
self.playMusic('cache/cache.mp3')
def getTrackInfo(self, track_id, album_id):
cookies = {}
headers = {
'Accept-Encoding': 'gzip, deflate, br',
'X-Retpath-Y': 'https://music.yandex.ru/album/' + str(album_id) + '/track/' + str(track_id),
'Accept-Language': 'ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36',
'X-Current-UID': '227207001',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Referer': 'https://music.yandex.ru/album/' + str(album_id) + '/track/' + str(track_id),
'X-Requested-With': 'XMLHttpRequest',
'Connection': 'keep-alive',
}
params = (
('track', str(track_id) + ':' + str(album_id)),
('lang', 'en'),
('external-domain', 'music.yandex.ru'),
('overembed', 'false'),
('ncrnd', '0.33253286876165544'),
)
response = requests.get('https://music.yandex.ru/handlers/track.jsx', headers=headers, params=params,
cookies=cookies)
return json.loads(response.text)
def playMusic(self, path):
song = AudioSegment.from_mp3(path)
play(song)
| true | true |
f73891686c1bc1c1b3e745479b93018789e8326b | 3,355 | py | Python | xyz/tree.py | mrzl/Composition37XY | 92214b080666930c5643972cdbe9ddd856a64650 | [
"MIT"
] | 67 | 2015-12-27T09:22:39.000Z | 2022-01-23T14:26:11.000Z | xyz/tree.py | mrzl/Composition37XY | 92214b080666930c5643972cdbe9ddd856a64650 | [
"MIT"
] | 4 | 2016-02-06T17:46:51.000Z | 2018-02-26T00:31:34.000Z | xyz/tree.py | mrzl/Composition37XY | 92214b080666930c5643972cdbe9ddd856a64650 | [
"MIT"
] | 14 | 2015-12-25T12:52:08.000Z | 2020-03-16T20:52:44.000Z | class Tree(object):
def __init__(self, shapes):
self.root = Node(shapes)
self.root.split()
def intersect(self, o, d, tmin, tmax):
return self.root.intersect(o, d, tmin, tmax)
class Node(object):
def __init__(self, shapes):
self.shapes = shapes
self.axis = None
self.point = None
self.left = None
self.right = None
def intersect(self, o, d, tmin, tmax):
axis = self.axis
point = self.point
if axis is None:
return self.intersect_shapes(o, d)
if d[axis] == 0:
return None
tsplit = (point - o[axis]) / d[axis]
if (o[axis] < point) or (o[axis] == point and d[axis] <= 0):
first, second = self.left, self.right
else:
first, second = self.right, self.left
if tsplit > tmax or tsplit <= 0:
return first.intersect(o, d, tmin, tmax)
elif tsplit < tmin:
return second.intersect(o, d, tmin, tmax)
else:
inf = 1e9
h1 = first.intersect(o, d, tmin, tsplit)
if h1 is None:
h1 = inf
if h1 <= tsplit:
result = h1
else:
h2 = second.intersect(o, d, tsplit, min(tmax, h1))
if h2 is None:
h2 = inf
if h1 <= h2:
result = h1
else:
result = h2
if result >= inf:
result = None
return result
def intersect_shapes(self, o, d):
ts = [x.intersect(o, d) for x in self.shapes]
ts = [x for x in ts if x is not None]
return min(ts) if ts else None
def score(self, axis, point):
left, right = self.partition(axis, point)
return max(len(left), len(right))
def partition(self, axis, point):
left = []
right = []
for shape in self.shapes:
a, b = shape.box()
v1 = a[axis]
v2 = b[axis]
if v1 <= point:
left.append(shape)
if v2 >= point:
right.append(shape)
return left, right
def split(self, depth=0):
if len(self.shapes) < 8:
return
xs = []
ys = []
zs = []
for shape in self.shapes:
(x1, y1, z1), (x2, y2, z2) = shape.box()
xs.append(x1)
ys.append(y1)
zs.append(z1)
xs.append(x2)
ys.append(y2)
zs.append(z2)
xs = sorted(set(xs))
ys = sorted(set(ys))
zs = sorted(set(zs))
best = len(self.shapes) * 0.85
bestAxis = None
bestPoint = None
points = [xs[len(xs) / 2], ys[len(ys) / 2], zs[len(zs) / 2]]
for axis, point in enumerate(points):
score = self.score(axis, point)
if score < best:
best = score
bestAxis = axis
bestPoint = point
if bestAxis is None:
return
l, r = self.partition(bestAxis, bestPoint)
self.shapes = None
self.axis = bestAxis
self.point = bestPoint
self.left = Node(l)
self.right = Node(r)
self.left.split(depth + 1)
self.right.split(depth + 1)
| 29.690265 | 68 | 0.469747 | class Tree(object):
def __init__(self, shapes):
self.root = Node(shapes)
self.root.split()
def intersect(self, o, d, tmin, tmax):
return self.root.intersect(o, d, tmin, tmax)
class Node(object):
def __init__(self, shapes):
self.shapes = shapes
self.axis = None
self.point = None
self.left = None
self.right = None
def intersect(self, o, d, tmin, tmax):
axis = self.axis
point = self.point
if axis is None:
return self.intersect_shapes(o, d)
if d[axis] == 0:
return None
tsplit = (point - o[axis]) / d[axis]
if (o[axis] < point) or (o[axis] == point and d[axis] <= 0):
first, second = self.left, self.right
else:
first, second = self.right, self.left
if tsplit > tmax or tsplit <= 0:
return first.intersect(o, d, tmin, tmax)
elif tsplit < tmin:
return second.intersect(o, d, tmin, tmax)
else:
inf = 1e9
h1 = first.intersect(o, d, tmin, tsplit)
if h1 is None:
h1 = inf
if h1 <= tsplit:
result = h1
else:
h2 = second.intersect(o, d, tsplit, min(tmax, h1))
if h2 is None:
h2 = inf
if h1 <= h2:
result = h1
else:
result = h2
if result >= inf:
result = None
return result
def intersect_shapes(self, o, d):
ts = [x.intersect(o, d) for x in self.shapes]
ts = [x for x in ts if x is not None]
return min(ts) if ts else None
def score(self, axis, point):
left, right = self.partition(axis, point)
return max(len(left), len(right))
def partition(self, axis, point):
left = []
right = []
for shape in self.shapes:
a, b = shape.box()
v1 = a[axis]
v2 = b[axis]
if v1 <= point:
left.append(shape)
if v2 >= point:
right.append(shape)
return left, right
def split(self, depth=0):
if len(self.shapes) < 8:
return
xs = []
ys = []
zs = []
for shape in self.shapes:
(x1, y1, z1), (x2, y2, z2) = shape.box()
xs.append(x1)
ys.append(y1)
zs.append(z1)
xs.append(x2)
ys.append(y2)
zs.append(z2)
xs = sorted(set(xs))
ys = sorted(set(ys))
zs = sorted(set(zs))
best = len(self.shapes) * 0.85
bestAxis = None
bestPoint = None
points = [xs[len(xs) / 2], ys[len(ys) / 2], zs[len(zs) / 2]]
for axis, point in enumerate(points):
score = self.score(axis, point)
if score < best:
best = score
bestAxis = axis
bestPoint = point
if bestAxis is None:
return
l, r = self.partition(bestAxis, bestPoint)
self.shapes = None
self.axis = bestAxis
self.point = bestPoint
self.left = Node(l)
self.right = Node(r)
self.left.split(depth + 1)
self.right.split(depth + 1)
| true | true |
f738916df25cc5631054f7f6c953ee8ea8fa49b8 | 5,787 | py | Python | king_phisher/client/dialogs/login.py | chachabooboo/king-phisher | 8a91b9043de0f12b5cad9a5f1d64ebd0179a6c4d | [
"BSD-3-Clause"
] | 1,143 | 2015-01-12T15:05:16.000Z | 2020-04-12T16:10:19.000Z | king_phisher/client/dialogs/login.py | chachabooboo/king-phisher | 8a91b9043de0f12b5cad9a5f1d64ebd0179a6c4d | [
"BSD-3-Clause"
] | 399 | 2015-01-22T15:20:03.000Z | 2020-04-08T23:01:46.000Z | king_phisher/client/dialogs/login.py | chachabooboo/king-phisher | 8a91b9043de0f12b5cad9a5f1d64ebd0179a6c4d | [
"BSD-3-Clause"
] | 351 | 2015-02-02T21:39:38.000Z | 2020-03-21T11:45:20.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/dialogs/login.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import functools
from king_phisher.client import gui_utilities
from king_phisher.client.dialogs import about
from king_phisher.client.widget import extras
from king_phisher.client.widget import managers
from gi.repository import Gdk
from gi.repository import Gtk
__all__ = ('LoginDialog', 'SMTPLoginDialog', 'SSHLoginDialog')
class LoginDialogBase(gui_utilities.GladeGObject):
"""
This object is basic login dialog object that can be inherited from and
customized.
"""
dependencies = gui_utilities.GladeDependencies(
children=(
'button_connect',
'entry_server',
'entry_username',
'entry_password',
'label_main'
),
name='LoginDialogBase'
)
label = None
top_gobject = 'dialog'
def __init__(self, *args, **kwargs):
super(LoginDialogBase, self).__init__(*args, **kwargs)
if self.label is not None:
self.gobjects['label_main'].set_text(self.label)
def interact(self):
self.dialog.show_all()
response = self.dialog.run()
if response != Gtk.ResponseType.CANCEL:
self.objects_save_to_config()
self.dialog.destroy()
return response
def signal_entry_activate(self, entry):
self.gobjects['button_connect'].emit('clicked')
class LoginDialog(LoginDialogBase):
"""
This object is the main King Phisher login dialog, it is used to
prompt for connection information for the King Phisher server.
It allows the user to specify the host and port to connect to and
credentials for authentication.
"""
dependencies = gui_utilities.GladeDependencies(
children=(
'button_connect',
'entry_server',
'entry_server_username',
'entry_server_password',
'entry_server_one_time_password',
'label_main',
'label_server_one_time_password',
'revealer_server_one_time_password',
'spinbutton_server_remote_port',
'switch_server_use_ssl'
),
top_level=('PortAdjustment',)
)
def __init__(self, *args, **kwargs):
super(LoginDialog, self).__init__(*args, **kwargs)
self.popup_menu = managers.MenuManager()
self.popup_menu.append('About', lambda x: about.AboutDialog(self.application).interact())
self.popup_menu.append('Import Configuration', self.signal_menuitem_activate_import_config)
# setup server completion
model = Gtk.ListStore(str)
for entry in self.config['server.history']:
model.append((entry,))
completion = Gtk.EntryCompletion()
completion.set_model(model)
completion.set_text_column(0)
self.gobjects['entry_server'].set_completion(completion)
def signal_button_pressed(self, _, event):
if not (event.type == Gdk.EventType.BUTTON_PRESS and event.button == Gdk.BUTTON_SECONDARY):
return
self.popup_menu.menu.popup(None, None, functools.partial(gui_utilities.gtk_menu_position, event), None, event.button, event.time)
return True
def signal_menuitem_activate_import_config(self, _):
dialog = extras.FileChooserDialog('Import Configuration File', self.dialog)
response = dialog.run_quick_open()
dialog.destroy()
if response is None:
return
config_path = response['target_path']
try:
self.application.merge_config(config_path, strict=False)
except Exception:
self.logger.warning('failed to merge configuration file: ' + config_path, exc_info=True)
gui_utilities.show_dialog_error('Invalid Configuration File', self.dialog, 'Could not import the configuration file.')
else:
self.objects_load_from_config()
def signal_switch_ssl(self, switch, _):
if switch.get_property('active'):
self.gobjects['spinbutton_server_remote_port'].set_value(443)
else:
self.gobjects['spinbutton_server_remote_port'].set_value(80)
class SMTPLoginDialog(LoginDialogBase):
"""
This object is the King Phisher SMTP login dialog, it is used to prompt for
connection information to an SMTP server.
It allows the user to specify the host and port to connect to and
credentials for authentication.
"""
config_prefix = 'smtp_'
label = 'SMTP Login'
class SSHLoginDialog(LoginDialogBase):
"""
This object is the King Phisher SSH login dialog, it is used to prompt for
connection information to an SSH server.
It allows the user to specify the host and port to connect to and
credentials for authentication.
"""
config_prefix = 'ssh_'
label = 'SSH Login'
| 35.286585 | 131 | 0.762744 |
import functools
from king_phisher.client import gui_utilities
from king_phisher.client.dialogs import about
from king_phisher.client.widget import extras
from king_phisher.client.widget import managers
from gi.repository import Gdk
from gi.repository import Gtk
__all__ = ('LoginDialog', 'SMTPLoginDialog', 'SSHLoginDialog')
class LoginDialogBase(gui_utilities.GladeGObject):
dependencies = gui_utilities.GladeDependencies(
children=(
'button_connect',
'entry_server',
'entry_username',
'entry_password',
'label_main'
),
name='LoginDialogBase'
)
label = None
top_gobject = 'dialog'
def __init__(self, *args, **kwargs):
super(LoginDialogBase, self).__init__(*args, **kwargs)
if self.label is not None:
self.gobjects['label_main'].set_text(self.label)
def interact(self):
self.dialog.show_all()
response = self.dialog.run()
if response != Gtk.ResponseType.CANCEL:
self.objects_save_to_config()
self.dialog.destroy()
return response
def signal_entry_activate(self, entry):
self.gobjects['button_connect'].emit('clicked')
class LoginDialog(LoginDialogBase):
dependencies = gui_utilities.GladeDependencies(
children=(
'button_connect',
'entry_server',
'entry_server_username',
'entry_server_password',
'entry_server_one_time_password',
'label_main',
'label_server_one_time_password',
'revealer_server_one_time_password',
'spinbutton_server_remote_port',
'switch_server_use_ssl'
),
top_level=('PortAdjustment',)
)
def __init__(self, *args, **kwargs):
super(LoginDialog, self).__init__(*args, **kwargs)
self.popup_menu = managers.MenuManager()
self.popup_menu.append('About', lambda x: about.AboutDialog(self.application).interact())
self.popup_menu.append('Import Configuration', self.signal_menuitem_activate_import_config)
model = Gtk.ListStore(str)
for entry in self.config['server.history']:
model.append((entry,))
completion = Gtk.EntryCompletion()
completion.set_model(model)
completion.set_text_column(0)
self.gobjects['entry_server'].set_completion(completion)
def signal_button_pressed(self, _, event):
if not (event.type == Gdk.EventType.BUTTON_PRESS and event.button == Gdk.BUTTON_SECONDARY):
return
self.popup_menu.menu.popup(None, None, functools.partial(gui_utilities.gtk_menu_position, event), None, event.button, event.time)
return True
def signal_menuitem_activate_import_config(self, _):
dialog = extras.FileChooserDialog('Import Configuration File', self.dialog)
response = dialog.run_quick_open()
dialog.destroy()
if response is None:
return
config_path = response['target_path']
try:
self.application.merge_config(config_path, strict=False)
except Exception:
self.logger.warning('failed to merge configuration file: ' + config_path, exc_info=True)
gui_utilities.show_dialog_error('Invalid Configuration File', self.dialog, 'Could not import the configuration file.')
else:
self.objects_load_from_config()
def signal_switch_ssl(self, switch, _):
if switch.get_property('active'):
self.gobjects['spinbutton_server_remote_port'].set_value(443)
else:
self.gobjects['spinbutton_server_remote_port'].set_value(80)
class SMTPLoginDialog(LoginDialogBase):
config_prefix = 'smtp_'
label = 'SMTP Login'
class SSHLoginDialog(LoginDialogBase):
config_prefix = 'ssh_'
label = 'SSH Login'
| true | true |
f7389198d497633059e288fdbf1378847c4c40e9 | 4,604 | py | Python | examples/functional_api/chapter_4.py | Ankur3107/zenml | 5dc05a833b50ac9cc49e851b9d91255da6016dfd | [
"Apache-2.0"
] | null | null | null | examples/functional_api/chapter_4.py | Ankur3107/zenml | 5dc05a833b50ac9cc49e851b9d91255da6016dfd | [
"Apache-2.0"
] | null | null | null | examples/functional_api/chapter_4.py | Ankur3107/zenml | 5dc05a833b50ac9cc49e851b9d91255da6016dfd | [
"Apache-2.0"
] | null | null | null | # Copyright (c) ZenML GmbH 2021. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
import numpy as np
import tensorflow as tf
from sklearn.base import ClassifierMixin
from sklearn.linear_model import LogisticRegression
from zenml.integrations.constants import SKLEARN, TENSORFLOW
from zenml.pipelines import pipeline
from zenml.repository import Repository
from zenml.steps import BaseStepConfig, Output, step
class TrainerConfig(BaseStepConfig):
"""Trainer params"""
epochs: int = 1
gamma: float = 0.7
lr: float = 0.001
@step
def importer_mnist() -> Output(
X_train=np.ndarray, y_train=np.ndarray, X_test=np.ndarray, y_test=np.ndarray
):
"""Download the MNIST data store it as an artifact"""
(X_train, y_train), (
X_test,
y_test,
) = tf.keras.datasets.mnist.load_data()
return X_train, y_train, X_test, y_test
@step
def normalize_mnist(
X_train: np.ndarray, X_test: np.ndarray
) -> Output(X_train_normed=np.ndarray, X_test_normed=np.ndarray):
"""Normalize the values for all the images so they are between 0 and 1"""
X_train_normed = X_train / 255.0
X_test_normed = X_test / 255.0
return X_train_normed, X_test_normed
@step
def tf_trainer(
config: TrainerConfig,
X_train: np.ndarray,
y_train: np.ndarray,
) -> tf.keras.Model:
"""Train a neural net from scratch to recognize MNIST digits return our
model or the learner"""
model = tf.keras.Sequential(
[
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(10, activation="relu"),
tf.keras.layers.Dense(10),
]
)
model.compile(
optimizer=tf.keras.optimizers.Adam(0.001),
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=["accuracy"],
)
model.fit(
X_train,
y_train,
epochs=config.epochs,
)
# write model
return model
@step
def tf_evaluator(
X_test: np.ndarray,
y_test: np.ndarray,
model: tf.keras.Model,
) -> float:
"""Calculate the loss for the model for each epoch in a graph"""
_, test_acc = model.evaluate(X_test, y_test, verbose=2)
return test_acc
@step
def sklearn_trainer(
config: TrainerConfig,
X_train: np.ndarray,
y_train: np.ndarray,
) -> ClassifierMixin:
"""Train SVC from sklearn."""
clf = LogisticRegression(penalty="l1", solver="saga", tol=0.1)
clf.fit(X_train.reshape((X_train.shape[0], -1)), y_train)
return clf
@step
def sklearn_evaluator(
X_test: np.ndarray,
y_test: np.ndarray,
model: ClassifierMixin,
) -> float:
"""Calculate accuracy score with classifier."""
test_acc = model.score(X_test.reshape((X_test.shape[0], -1)), y_test)
return test_acc
@pipeline(required_integrations=[SKLEARN, TENSORFLOW])
def mnist_pipeline(
importer,
normalizer,
trainer,
evaluator,
):
# Link all the steps artifacts together
X_train, y_train, X_test, y_test = importer()
X_trained_normed, X_test_normed = normalizer(X_train=X_train, X_test=X_test)
model = trainer(X_train=X_trained_normed, y_train=y_train)
evaluator(X_test=X_test_normed, y_test=y_test, model=model)
# Run the pipeline
# Initialize a pipeline run
tf_p = mnist_pipeline(
importer=importer_mnist(),
normalizer=normalize_mnist(),
trainer=tf_trainer(config=TrainerConfig(epochs=1)),
evaluator=tf_evaluator(),
)
# Run the pipeline
tf_p.run()
# Initialize a new pipeline run
scikit_p = mnist_pipeline(
importer=importer_mnist(),
normalizer=normalize_mnist(),
trainer=sklearn_trainer(config=TrainerConfig()),
evaluator=sklearn_evaluator(),
)
# Run the new pipeline
scikit_p.run()
# Post execution flow
repo = Repository()
p = repo.get_pipeline(pipeline_name="mnist_pipeline")
print(f"Pipeline `mnist_pipeline` has {len(p.runs)} run(s)")
for r in p.runs[0:2]:
eval_step = r.get_step("evaluator")
print(
f"For {eval_step.entrypoint_name}, the accuracy is: "
f"{eval_step.output.read():.2f}"
)
| 26.767442 | 80 | 0.695482 |
import numpy as np
import tensorflow as tf
from sklearn.base import ClassifierMixin
from sklearn.linear_model import LogisticRegression
from zenml.integrations.constants import SKLEARN, TENSORFLOW
from zenml.pipelines import pipeline
from zenml.repository import Repository
from zenml.steps import BaseStepConfig, Output, step
class TrainerConfig(BaseStepConfig):
epochs: int = 1
gamma: float = 0.7
lr: float = 0.001
@step
def importer_mnist() -> Output(
X_train=np.ndarray, y_train=np.ndarray, X_test=np.ndarray, y_test=np.ndarray
):
(X_train, y_train), (
X_test,
y_test,
) = tf.keras.datasets.mnist.load_data()
return X_train, y_train, X_test, y_test
@step
def normalize_mnist(
X_train: np.ndarray, X_test: np.ndarray
) -> Output(X_train_normed=np.ndarray, X_test_normed=np.ndarray):
X_train_normed = X_train / 255.0
X_test_normed = X_test / 255.0
return X_train_normed, X_test_normed
@step
def tf_trainer(
config: TrainerConfig,
X_train: np.ndarray,
y_train: np.ndarray,
) -> tf.keras.Model:
model = tf.keras.Sequential(
[
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(10, activation="relu"),
tf.keras.layers.Dense(10),
]
)
model.compile(
optimizer=tf.keras.optimizers.Adam(0.001),
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=["accuracy"],
)
model.fit(
X_train,
y_train,
epochs=config.epochs,
)
return model
@step
def tf_evaluator(
X_test: np.ndarray,
y_test: np.ndarray,
model: tf.keras.Model,
) -> float:
_, test_acc = model.evaluate(X_test, y_test, verbose=2)
return test_acc
@step
def sklearn_trainer(
config: TrainerConfig,
X_train: np.ndarray,
y_train: np.ndarray,
) -> ClassifierMixin:
clf = LogisticRegression(penalty="l1", solver="saga", tol=0.1)
clf.fit(X_train.reshape((X_train.shape[0], -1)), y_train)
return clf
@step
def sklearn_evaluator(
X_test: np.ndarray,
y_test: np.ndarray,
model: ClassifierMixin,
) -> float:
test_acc = model.score(X_test.reshape((X_test.shape[0], -1)), y_test)
return test_acc
@pipeline(required_integrations=[SKLEARN, TENSORFLOW])
def mnist_pipeline(
importer,
normalizer,
trainer,
evaluator,
):
X_train, y_train, X_test, y_test = importer()
X_trained_normed, X_test_normed = normalizer(X_train=X_train, X_test=X_test)
model = trainer(X_train=X_trained_normed, y_train=y_train)
evaluator(X_test=X_test_normed, y_test=y_test, model=model)
tf_p = mnist_pipeline(
importer=importer_mnist(),
normalizer=normalize_mnist(),
trainer=tf_trainer(config=TrainerConfig(epochs=1)),
evaluator=tf_evaluator(),
)
tf_p.run()
scikit_p = mnist_pipeline(
importer=importer_mnist(),
normalizer=normalize_mnist(),
trainer=sklearn_trainer(config=TrainerConfig()),
evaluator=sklearn_evaluator(),
)
scikit_p.run()
repo = Repository()
p = repo.get_pipeline(pipeline_name="mnist_pipeline")
print(f"Pipeline `mnist_pipeline` has {len(p.runs)} run(s)")
for r in p.runs[0:2]:
eval_step = r.get_step("evaluator")
print(
f"For {eval_step.entrypoint_name}, the accuracy is: "
f"{eval_step.output.read():.2f}"
)
| true | true |
f738928eed8d844a1268df7a89e8ce633f758e55 | 1,296 | py | Python | scripts/4_similarity.py | ersilia-os/osm-series4-candidates-2 | a0b7f55d79c65182dcc4c102791d2ababbfb176e | [
"MIT"
] | 1 | 2021-07-23T14:57:30.000Z | 2021-07-23T14:57:30.000Z | scripts/4_similarity.py | ersilia-os/osm-series4-candidates-2 | a0b7f55d79c65182dcc4c102791d2ababbfb176e | [
"MIT"
] | null | null | null | scripts/4_similarity.py | ersilia-os/osm-series4-candidates-2 | a0b7f55d79c65182dcc4c102791d2ababbfb176e | [
"MIT"
] | null | null | null | from __init__ import OUTPUT
from tqdm import tqdm
import pandas as pd
import numpy as np
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.DataStructs import BulkTanimotoSimilarity
import os, sys
print("SIMILARITY SCORES")
def mols_to_fingerprints(molecules, radius=3, useCounts=False, useFeatures=True):
fingerprints = [AllChem.GetMorganFingerprint(
mol,
radius,
useCounts=useCounts,
useFeatures=useFeatures
) for mol in tqdm(molecules)]
return fingerprints
raw_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "data", "raw")
sys.path.append(raw_folder)
#get series4 molecules for tanimoto similarity
s4 = pd.read_csv(os.path.join(raw_folder, "series4_processed.csv"))
s4_smiles = s4["smiles"].tolist()
s4_mols = [Chem.MolFromSmiles(smi) for smi in s4_smiles]
ref_fps=mols_to_fingerprints(s4_mols)
df = pd.read_csv(os.path.join(OUTPUT, "data_3.csv"))
smiles=df["Smiles"].tolist()
mols = [Chem.MolFromSmiles(smi) for smi in tqdm(smiles)]
fps=mols_to_fingerprints(mols)
sims = []
for fp in tqdm(fps):
sim=BulkTanimotoSimilarity(fp, ref_fps)
maxsim = np.max(sim)
sims += [maxsim]
df["Similarity"]=sims
df=df[df["Similarity"] <= 0.70]
df.to_csv(os.path.join(OUTPUT, "data_4.csv"), index = False)
| 28.173913 | 90 | 0.733025 | from __init__ import OUTPUT
from tqdm import tqdm
import pandas as pd
import numpy as np
from rdkit import Chem
from rdkit.Chem import AllChem
from rdkit.DataStructs import BulkTanimotoSimilarity
import os, sys
print("SIMILARITY SCORES")
def mols_to_fingerprints(molecules, radius=3, useCounts=False, useFeatures=True):
fingerprints = [AllChem.GetMorganFingerprint(
mol,
radius,
useCounts=useCounts,
useFeatures=useFeatures
) for mol in tqdm(molecules)]
return fingerprints
raw_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "data", "raw")
sys.path.append(raw_folder)
s4 = pd.read_csv(os.path.join(raw_folder, "series4_processed.csv"))
s4_smiles = s4["smiles"].tolist()
s4_mols = [Chem.MolFromSmiles(smi) for smi in s4_smiles]
ref_fps=mols_to_fingerprints(s4_mols)
df = pd.read_csv(os.path.join(OUTPUT, "data_3.csv"))
smiles=df["Smiles"].tolist()
mols = [Chem.MolFromSmiles(smi) for smi in tqdm(smiles)]
fps=mols_to_fingerprints(mols)
sims = []
for fp in tqdm(fps):
sim=BulkTanimotoSimilarity(fp, ref_fps)
maxsim = np.max(sim)
sims += [maxsim]
df["Similarity"]=sims
df=df[df["Similarity"] <= 0.70]
df.to_csv(os.path.join(OUTPUT, "data_4.csv"), index = False)
| true | true |
f73892c10f8e2431f7809355be6604ff30a63386 | 1,106 | py | Python | caches/importer.py | AndreMiras/async-caches | d87ffc4d154617acc1622e21db536d9062be8cc4 | [
"BSD-3-Clause"
] | 29 | 2019-07-14T15:07:47.000Z | 2022-03-20T14:41:14.000Z | caches/importer.py | AndreMiras/async-caches | d87ffc4d154617acc1622e21db536d9062be8cc4 | [
"BSD-3-Clause"
] | 11 | 2019-07-14T03:29:08.000Z | 2021-06-13T11:27:30.000Z | caches/importer.py | AndreMiras/async-caches | d87ffc4d154617acc1622e21db536d9062be8cc4 | [
"BSD-3-Clause"
] | 4 | 2019-11-03T18:27:41.000Z | 2021-05-19T07:55:33.000Z | import importlib
from typing import Any
class ImportFromStringError(Exception):
pass
def import_from_string(import_str: str) -> Any:
module_str, _, attrs_str = import_str.partition(":")
if not module_str or not attrs_str:
message = (
'Import string "{import_str}" must be in format "<module>:<attribute>".'
)
raise ImportFromStringError(message.format(import_str=import_str))
try:
module = importlib.import_module(module_str)
except ImportError as exc:
if exc.name != module_str:
raise exc from None
message = 'Could not import module "{module_str}".'
raise ImportFromStringError(message.format(module_str=module_str))
instance = module
try:
for attr_str in attrs_str.split("."):
instance = getattr(instance, attr_str)
except AttributeError as exc:
message = 'Attribute "{attrs_str}" not found in module "{module_str}".'
raise ImportFromStringError(
message.format(attrs_str=attrs_str, module_str=module_str)
)
return instance
| 30.722222 | 84 | 0.66094 | import importlib
from typing import Any
class ImportFromStringError(Exception):
pass
def import_from_string(import_str: str) -> Any:
module_str, _, attrs_str = import_str.partition(":")
if not module_str or not attrs_str:
message = (
'Import string "{import_str}" must be in format "<module>:<attribute>".'
)
raise ImportFromStringError(message.format(import_str=import_str))
try:
module = importlib.import_module(module_str)
except ImportError as exc:
if exc.name != module_str:
raise exc from None
message = 'Could not import module "{module_str}".'
raise ImportFromStringError(message.format(module_str=module_str))
instance = module
try:
for attr_str in attrs_str.split("."):
instance = getattr(instance, attr_str)
except AttributeError as exc:
message = 'Attribute "{attrs_str}" not found in module "{module_str}".'
raise ImportFromStringError(
message.format(attrs_str=attrs_str, module_str=module_str)
)
return instance
| true | true |
f73892c4d95ebb75d3f233eeabc4bc6f86c071a8 | 2,442 | py | Python | python/slack_types/rtm_api/file_public_event.py | warrenseine/slack-types | 19e280aa43186ab4b9126ac6891f43cac22d3aaf | [
"MIT"
] | null | null | null | python/slack_types/rtm_api/file_public_event.py | warrenseine/slack-types | 19e280aa43186ab4b9126ac6891f43cac22d3aaf | [
"MIT"
] | null | null | null | python/slack_types/rtm_api/file_public_event.py | warrenseine/slack-types | 19e280aa43186ab4b9126ac6891f43cac22d3aaf | [
"MIT"
] | null | null | null | # To use this code, make sure you
#
# import json
#
# and then, to convert JSON from a string, do
#
# result = file_public_event_from_dict(json.loads(json_string))
from dataclasses import dataclass
from typing import Optional, Any, TypeVar, Type, cast
T = TypeVar("T")
def from_str(x: Any) -> str:
assert isinstance(x, str)
return x
def from_none(x: Any) -> Any:
assert x is None
return x
def from_union(fs, x):
for f in fs:
try:
return f(x)
except:
pass
assert False
def to_class(c: Type[T], x: Any) -> dict:
assert isinstance(x, c)
return cast(Any, x).to_dict()
@dataclass
class File:
id: Optional[str] = None
@staticmethod
def from_dict(obj: Any) -> 'File':
assert isinstance(obj, dict)
id = from_union([from_str, from_none], obj.get("id"))
return File(id)
def to_dict(self) -> dict:
result: dict = {}
result["id"] = from_union([from_str, from_none], self.id)
return result
@dataclass
class FilePublicEvent:
type: Optional[str] = None
file_id: Optional[str] = None
file: Optional[File] = None
user_id: Optional[str] = None
event_ts: Optional[str] = None
@staticmethod
def from_dict(obj: Any) -> 'FilePublicEvent':
assert isinstance(obj, dict)
type = from_union([from_str, from_none], obj.get("type"))
file_id = from_union([from_str, from_none], obj.get("file_id"))
file = from_union([File.from_dict, from_none], obj.get("file"))
user_id = from_union([from_str, from_none], obj.get("user_id"))
event_ts = from_union([from_str, from_none], obj.get("event_ts"))
return FilePublicEvent(type, file_id, file, user_id, event_ts)
def to_dict(self) -> dict:
result: dict = {}
result["type"] = from_union([from_str, from_none], self.type)
result["file_id"] = from_union([from_str, from_none], self.file_id)
result["file"] = from_union([lambda x: to_class(File, x), from_none], self.file)
result["user_id"] = from_union([from_str, from_none], self.user_id)
result["event_ts"] = from_union([from_str, from_none], self.event_ts)
return result
def file_public_event_from_dict(s: Any) -> FilePublicEvent:
return FilePublicEvent.from_dict(s)
def file_public_event_to_dict(x: FilePublicEvent) -> Any:
return to_class(FilePublicEvent, x)
| 27.133333 | 88 | 0.640868 |
from dataclasses import dataclass
from typing import Optional, Any, TypeVar, Type, cast
T = TypeVar("T")
def from_str(x: Any) -> str:
assert isinstance(x, str)
return x
def from_none(x: Any) -> Any:
assert x is None
return x
def from_union(fs, x):
for f in fs:
try:
return f(x)
except:
pass
assert False
def to_class(c: Type[T], x: Any) -> dict:
assert isinstance(x, c)
return cast(Any, x).to_dict()
@dataclass
class File:
id: Optional[str] = None
@staticmethod
def from_dict(obj: Any) -> 'File':
assert isinstance(obj, dict)
id = from_union([from_str, from_none], obj.get("id"))
return File(id)
def to_dict(self) -> dict:
result: dict = {}
result["id"] = from_union([from_str, from_none], self.id)
return result
@dataclass
class FilePublicEvent:
type: Optional[str] = None
file_id: Optional[str] = None
file: Optional[File] = None
user_id: Optional[str] = None
event_ts: Optional[str] = None
@staticmethod
def from_dict(obj: Any) -> 'FilePublicEvent':
assert isinstance(obj, dict)
type = from_union([from_str, from_none], obj.get("type"))
file_id = from_union([from_str, from_none], obj.get("file_id"))
file = from_union([File.from_dict, from_none], obj.get("file"))
user_id = from_union([from_str, from_none], obj.get("user_id"))
event_ts = from_union([from_str, from_none], obj.get("event_ts"))
return FilePublicEvent(type, file_id, file, user_id, event_ts)
def to_dict(self) -> dict:
result: dict = {}
result["type"] = from_union([from_str, from_none], self.type)
result["file_id"] = from_union([from_str, from_none], self.file_id)
result["file"] = from_union([lambda x: to_class(File, x), from_none], self.file)
result["user_id"] = from_union([from_str, from_none], self.user_id)
result["event_ts"] = from_union([from_str, from_none], self.event_ts)
return result
def file_public_event_from_dict(s: Any) -> FilePublicEvent:
return FilePublicEvent.from_dict(s)
def file_public_event_to_dict(x: FilePublicEvent) -> Any:
return to_class(FilePublicEvent, x)
| true | true |
f738930e77a762d27692c625571113f029d1504c | 15,545 | py | Python | endpoints/appr/models_cnr.py | syed/quay | 3b94cda75180d1355046ed656490aa45f63ee479 | [
"Apache-2.0"
] | null | null | null | endpoints/appr/models_cnr.py | syed/quay | 3b94cda75180d1355046ed656490aa45f63ee479 | [
"Apache-2.0"
] | null | null | null | endpoints/appr/models_cnr.py | syed/quay | 3b94cda75180d1355046ed656490aa45f63ee479 | [
"Apache-2.0"
] | null | null | null | from collections import namedtuple
from datetime import datetime
import cnr.semver
from cnr.exception import raise_package_not_found, raise_channel_not_found, CnrException
import features
import data.model
from app import app, storage, authentication, model_cache
from data import appr_model
from data import model as data_model
from data.cache import cache_key
from data.database import Repository, MediaType, db_transaction
from data.appr_model.models import NEW_MODELS
from endpoints.appr.models_interface import (
ApplicationManifest,
ApplicationRelease,
ApplicationSummaryView,
AppRegistryDataInterface,
BlobDescriptor,
ChannelView,
ChannelReleasesView,
)
from util.audit import track_and_log
from util.morecollections import AttrDict
from util.names import parse_robot_username
class ReadOnlyException(CnrException):
status_code = 405
errorcode = "read-only"
def _strip_sha256_header(digest):
if digest.startswith("sha256:"):
return digest.split("sha256:")[1]
return digest
def _split_package_name(package):
"""
Returns the namespace and package-name.
"""
return package.split("/")
def _join_package_name(ns, name):
"""
Returns a app-name in the 'namespace/name' format.
"""
return "%s/%s" % (ns, name)
def _timestamp_to_iso(timestamp, in_ms=True):
if in_ms:
timestamp = timestamp // 1000
return datetime.fromtimestamp(timestamp).isoformat()
def _application(package):
ns, name = _split_package_name(package)
repo = data.model.repository.get_app_repository(ns, name)
if repo is None:
raise_package_not_found(package)
return repo
class CNRAppModel(AppRegistryDataInterface):
def __init__(self, models_ref, is_readonly):
self.models_ref = models_ref
self.is_readonly = is_readonly
def log_action(
self,
event_name,
namespace_name,
repo_name=None,
analytics_name=None,
analytics_sample=1,
metadata=None,
):
metadata = {} if metadata is None else metadata
repo = None
if repo_name is not None:
db_repo = data.model.repository.get_repository(
namespace_name, repo_name, kind_filter="application"
)
repo = AttrDict(
{
"id": db_repo.id,
"name": db_repo.name,
"namespace_name": db_repo.namespace_user.username,
"is_free_namespace": db_repo.namespace_user.stripe_id is None,
}
)
track_and_log(
event_name,
repo,
analytics_name=analytics_name,
analytics_sample=analytics_sample,
**metadata,
)
def list_applications(
self, namespace=None, media_type=None, search=None, username=None, with_channels=False
):
"""
Lists all repositories that contain applications, with optional filtering to a specific
namespace and view a specific user.
"""
limit = app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
namespace_whitelist = app.config.get("APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST", [])
# NOTE: This caching only applies for the super-large and commonly requested results
# sets.
if (
namespace is not None
and namespace in namespace_whitelist
and media_type is None
and search is None
and username is None
and not with_channels
):
def _list_applications():
return [
found._asdict()
for found in self._list_applications(namespace=namespace, limit=limit)
]
apps_cache_key = cache_key.for_appr_applications_list(namespace, limit)
return [
ApplicationSummaryView(**found)
for found in model_cache.retrieve(apps_cache_key, _list_applications)
]
else:
return self._list_applications(
namespace, media_type, search, username, with_channels, limit=limit
)
def _list_applications(
self,
namespace=None,
media_type=None,
search=None,
username=None,
with_channels=False,
limit=None,
):
limit = limit or app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
views = []
for repo in appr_model.package.list_packages_query(
self.models_ref, namespace, media_type, search, username=username, limit=limit
):
tag_set_prefetch = getattr(repo, self.models_ref.tag_set_prefetch_name)
releases = [t.name for t in tag_set_prefetch]
if not releases:
continue
available_releases = [
str(x) for x in sorted(cnr.semver.versions(releases, False), reverse=True)
]
channels = None
if with_channels:
channels = [
ChannelView(name=chan.name, current=chan.linked_tag.name)
for chan in appr_model.channel.get_repo_channels(repo, self.models_ref)
]
app_name = _join_package_name(repo.namespace_user.username, repo.name)
manifests = self.list_manifests(app_name, available_releases[0])
view = ApplicationSummaryView(
namespace=repo.namespace_user.username,
name=app_name,
visibility=data_model.repository.repository_visibility_name(repo),
default=available_releases[0],
channels=channels,
manifests=manifests,
releases=available_releases,
updated_at=_timestamp_to_iso(tag_set_prefetch[-1].lifetime_start),
created_at=_timestamp_to_iso(tag_set_prefetch[0].lifetime_start),
)
views.append(view)
return views
def application_is_public(self, package_name):
"""
Returns:
* True if the repository is public
"""
namespace, name = _split_package_name(package_name)
return data.model.repository.repository_is_public(namespace, name)
def create_application(self, package_name, visibility, owner):
"""
Create a new app repository, owner is the user who creates it.
"""
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
ns, name = _split_package_name(package_name)
data.model.repository.create_repository(ns, name, owner, visibility, "application")
def application_exists(self, package_name):
"""
Create a new app repository, owner is the user who creates it.
"""
ns, name = _split_package_name(package_name)
return data.model.repository.get_repository(ns, name, kind_filter="application") is not None
def basic_search(self, query, username=None):
"""Returns an array of matching AppRepositories in the format: 'namespace/name'
Note:
* Only 'public' repositories are returned
Todo:
* Filter results with readeable reposistory for the user (including visibilitys)
"""
limit = app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
return [
_join_package_name(r.namespace_user.username, r.name)
for r in data.model.repository.get_app_search(
lookup=query, username=username, limit=limit
)
]
def list_releases(self, package_name, media_type=None):
"""Return the list of all releases of an Application
Example:
>>> get_app_releases('ant31/rocketchat')
['1.7.1', '1.7.0', '1.7.2']
Todo:
* Paginate
"""
return appr_model.release.get_releases(
_application(package_name), self.models_ref, media_type
)
def list_manifests(self, package_name, release=None):
"""
Returns the list of all manifests of an Application.
Todo:
* Paginate
"""
try:
repo = _application(package_name)
return list(appr_model.manifest.get_manifest_types(repo, self.models_ref, release))
except (Repository.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_package_not_found(package_name, release)
def fetch_release(self, package_name, release, media_type):
"""
Retrieves an AppRelease from it's repository-name and release-name.
"""
repo = _application(package_name)
try:
tag, manifest, blob = appr_model.release.get_app_release(
repo, release, media_type, self.models_ref
)
created_at = _timestamp_to_iso(tag.lifetime_start)
blob_descriptor = BlobDescriptor(
digest=_strip_sha256_header(blob.digest),
mediaType=blob.media_type.name,
size=blob.size,
urls=[],
)
app_manifest = ApplicationManifest(
digest=manifest.digest, mediaType=manifest.media_type.name, content=blob_descriptor
)
app_release = ApplicationRelease(
release=tag.name, created_at=created_at, name=package_name, manifest=app_manifest
)
return app_release
except (
self.models_ref.Tag.DoesNotExist,
self.models_ref.Manifest.DoesNotExist,
self.models_ref.Blob.DoesNotExist,
Repository.DoesNotExist,
MediaType.DoesNotExist,
):
raise_package_not_found(package_name, release, media_type)
def store_blob(self, cnrblob, content_media_type):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
fp = cnrblob.packager.io_file
path = cnrblob.upload_url(cnrblob.digest)
locations = storage.preferred_locations
storage.stream_write(locations, path, fp, "application/x-gzip")
db_blob = appr_model.blob.get_or_create_blob(
cnrblob.digest, cnrblob.size, content_media_type, locations, self.models_ref
)
return BlobDescriptor(
mediaType=content_media_type,
digest=_strip_sha256_header(db_blob.digest),
size=db_blob.size,
urls=[],
)
def create_release(self, package, user, visibility, force=False):
"""
Add an app-release to a repository package is an instance of data.cnr.package.Package.
"""
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
manifest = package.manifest()
ns, name = package.namespace, package.name
repo = data.model.repository.get_or_create_repository(
ns, name, user, visibility=visibility, repo_kind="application"
)
tag_name = package.release
appr_model.release.create_app_release(
repo,
tag_name,
package.manifest(),
manifest["content"]["digest"],
self.models_ref,
force,
)
def delete_release(self, package_name, release, media_type):
"""
Remove/Delete an app-release from an app-repository.
It does not delete the entire app-repository, only a single release
"""
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
repo = _application(package_name)
try:
appr_model.release.delete_app_release(repo, release, media_type, self.models_ref)
except (
self.models_ref.Channel.DoesNotExist,
self.models_ref.Tag.DoesNotExist,
MediaType.DoesNotExist,
):
raise_package_not_found(package_name, release, media_type)
def release_exists(self, package, release):
"""
Return true if a release with that name already exist or have existed (include deleted ones)
"""
# TODO: Figure out why this isn't implemented.
def channel_exists(self, package_name, channel_name):
"""
Returns true if channel exists.
"""
repo = _application(package_name)
return appr_model.tag.tag_exists(repo, channel_name, self.models_ref, "channel")
def delete_channel(self, package_name, channel_name):
"""Delete an AppChannel
Note:
It doesn't delete the AppReleases
"""
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
repo = _application(package_name)
try:
appr_model.channel.delete_channel(repo, channel_name, self.models_ref)
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_channel_not_found(package_name, channel_name)
def list_channels(self, package_name):
"""
Returns all AppChannel for a package.
"""
repo = _application(package_name)
channels = appr_model.channel.get_repo_channels(repo, self.models_ref)
return [ChannelView(name=chan.name, current=chan.linked_tag.name) for chan in channels]
def fetch_channel(self, package_name, channel_name, with_releases=True):
"""
Returns an AppChannel.
"""
repo = _application(package_name)
try:
channel = appr_model.channel.get_channel(repo, channel_name, self.models_ref)
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_channel_not_found(package_name, channel_name)
if with_releases:
releases = appr_model.channel.get_channel_releases(repo, channel, self.models_ref)
chanview = ChannelReleasesView(
current=channel.linked_tag.name,
name=channel.name,
releases=[channel.linked_tag.name] + [c.name for c in releases],
)
else:
chanview = ChannelView(current=channel.linked_tag.name, name=channel.name)
return chanview
def list_release_channels(self, package_name, release, active=True):
repo = _application(package_name)
try:
channels = appr_model.channel.get_tag_channels(
repo, release, self.models_ref, active=active
)
return [ChannelView(name=c.name, current=release) for c in channels]
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_package_not_found(package_name, release)
def update_channel(self, package_name, channel_name, release):
"""Append a new release to the AppChannel
Returns:
A new AppChannel with the release
"""
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
repo = _application(package_name)
channel = appr_model.channel.create_or_update_channel(
repo, channel_name, release, self.models_ref
)
return ChannelView(current=channel.linked_tag.name, name=channel.name)
def get_blob_locations(self, digest):
return appr_model.blob.get_blob_locations(digest, self.models_ref)
# Phase 3: Read and write from new tables.
model = CNRAppModel(NEW_MODELS, features.READONLY_APP_REGISTRY)
| 35.490868 | 100 | 0.632422 | from collections import namedtuple
from datetime import datetime
import cnr.semver
from cnr.exception import raise_package_not_found, raise_channel_not_found, CnrException
import features
import data.model
from app import app, storage, authentication, model_cache
from data import appr_model
from data import model as data_model
from data.cache import cache_key
from data.database import Repository, MediaType, db_transaction
from data.appr_model.models import NEW_MODELS
from endpoints.appr.models_interface import (
ApplicationManifest,
ApplicationRelease,
ApplicationSummaryView,
AppRegistryDataInterface,
BlobDescriptor,
ChannelView,
ChannelReleasesView,
)
from util.audit import track_and_log
from util.morecollections import AttrDict
from util.names import parse_robot_username
class ReadOnlyException(CnrException):
status_code = 405
errorcode = "read-only"
def _strip_sha256_header(digest):
if digest.startswith("sha256:"):
return digest.split("sha256:")[1]
return digest
def _split_package_name(package):
return package.split("/")
def _join_package_name(ns, name):
return "%s/%s" % (ns, name)
def _timestamp_to_iso(timestamp, in_ms=True):
if in_ms:
timestamp = timestamp // 1000
return datetime.fromtimestamp(timestamp).isoformat()
def _application(package):
ns, name = _split_package_name(package)
repo = data.model.repository.get_app_repository(ns, name)
if repo is None:
raise_package_not_found(package)
return repo
class CNRAppModel(AppRegistryDataInterface):
def __init__(self, models_ref, is_readonly):
self.models_ref = models_ref
self.is_readonly = is_readonly
def log_action(
self,
event_name,
namespace_name,
repo_name=None,
analytics_name=None,
analytics_sample=1,
metadata=None,
):
metadata = {} if metadata is None else metadata
repo = None
if repo_name is not None:
db_repo = data.model.repository.get_repository(
namespace_name, repo_name, kind_filter="application"
)
repo = AttrDict(
{
"id": db_repo.id,
"name": db_repo.name,
"namespace_name": db_repo.namespace_user.username,
"is_free_namespace": db_repo.namespace_user.stripe_id is None,
}
)
track_and_log(
event_name,
repo,
analytics_name=analytics_name,
analytics_sample=analytics_sample,
**metadata,
)
def list_applications(
self, namespace=None, media_type=None, search=None, username=None, with_channels=False
):
limit = app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
namespace_whitelist = app.config.get("APP_REGISTRY_PACKAGE_LIST_CACHE_WHITELIST", [])
if (
namespace is not None
and namespace in namespace_whitelist
and media_type is None
and search is None
and username is None
and not with_channels
):
def _list_applications():
return [
found._asdict()
for found in self._list_applications(namespace=namespace, limit=limit)
]
apps_cache_key = cache_key.for_appr_applications_list(namespace, limit)
return [
ApplicationSummaryView(**found)
for found in model_cache.retrieve(apps_cache_key, _list_applications)
]
else:
return self._list_applications(
namespace, media_type, search, username, with_channels, limit=limit
)
def _list_applications(
self,
namespace=None,
media_type=None,
search=None,
username=None,
with_channels=False,
limit=None,
):
limit = limit or app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
views = []
for repo in appr_model.package.list_packages_query(
self.models_ref, namespace, media_type, search, username=username, limit=limit
):
tag_set_prefetch = getattr(repo, self.models_ref.tag_set_prefetch_name)
releases = [t.name for t in tag_set_prefetch]
if not releases:
continue
available_releases = [
str(x) for x in sorted(cnr.semver.versions(releases, False), reverse=True)
]
channels = None
if with_channels:
channels = [
ChannelView(name=chan.name, current=chan.linked_tag.name)
for chan in appr_model.channel.get_repo_channels(repo, self.models_ref)
]
app_name = _join_package_name(repo.namespace_user.username, repo.name)
manifests = self.list_manifests(app_name, available_releases[0])
view = ApplicationSummaryView(
namespace=repo.namespace_user.username,
name=app_name,
visibility=data_model.repository.repository_visibility_name(repo),
default=available_releases[0],
channels=channels,
manifests=manifests,
releases=available_releases,
updated_at=_timestamp_to_iso(tag_set_prefetch[-1].lifetime_start),
created_at=_timestamp_to_iso(tag_set_prefetch[0].lifetime_start),
)
views.append(view)
return views
def application_is_public(self, package_name):
namespace, name = _split_package_name(package_name)
return data.model.repository.repository_is_public(namespace, name)
def create_application(self, package_name, visibility, owner):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
ns, name = _split_package_name(package_name)
data.model.repository.create_repository(ns, name, owner, visibility, "application")
def application_exists(self, package_name):
ns, name = _split_package_name(package_name)
return data.model.repository.get_repository(ns, name, kind_filter="application") is not None
def basic_search(self, query, username=None):
limit = app.config.get("APP_REGISTRY_RESULTS_LIMIT", 50)
return [
_join_package_name(r.namespace_user.username, r.name)
for r in data.model.repository.get_app_search(
lookup=query, username=username, limit=limit
)
]
def list_releases(self, package_name, media_type=None):
return appr_model.release.get_releases(
_application(package_name), self.models_ref, media_type
)
def list_manifests(self, package_name, release=None):
try:
repo = _application(package_name)
return list(appr_model.manifest.get_manifest_types(repo, self.models_ref, release))
except (Repository.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_package_not_found(package_name, release)
def fetch_release(self, package_name, release, media_type):
repo = _application(package_name)
try:
tag, manifest, blob = appr_model.release.get_app_release(
repo, release, media_type, self.models_ref
)
created_at = _timestamp_to_iso(tag.lifetime_start)
blob_descriptor = BlobDescriptor(
digest=_strip_sha256_header(blob.digest),
mediaType=blob.media_type.name,
size=blob.size,
urls=[],
)
app_manifest = ApplicationManifest(
digest=manifest.digest, mediaType=manifest.media_type.name, content=blob_descriptor
)
app_release = ApplicationRelease(
release=tag.name, created_at=created_at, name=package_name, manifest=app_manifest
)
return app_release
except (
self.models_ref.Tag.DoesNotExist,
self.models_ref.Manifest.DoesNotExist,
self.models_ref.Blob.DoesNotExist,
Repository.DoesNotExist,
MediaType.DoesNotExist,
):
raise_package_not_found(package_name, release, media_type)
def store_blob(self, cnrblob, content_media_type):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
fp = cnrblob.packager.io_file
path = cnrblob.upload_url(cnrblob.digest)
locations = storage.preferred_locations
storage.stream_write(locations, path, fp, "application/x-gzip")
db_blob = appr_model.blob.get_or_create_blob(
cnrblob.digest, cnrblob.size, content_media_type, locations, self.models_ref
)
return BlobDescriptor(
mediaType=content_media_type,
digest=_strip_sha256_header(db_blob.digest),
size=db_blob.size,
urls=[],
)
def create_release(self, package, user, visibility, force=False):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
manifest = package.manifest()
ns, name = package.namespace, package.name
repo = data.model.repository.get_or_create_repository(
ns, name, user, visibility=visibility, repo_kind="application"
)
tag_name = package.release
appr_model.release.create_app_release(
repo,
tag_name,
package.manifest(),
manifest["content"]["digest"],
self.models_ref,
force,
)
def delete_release(self, package_name, release, media_type):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
repo = _application(package_name)
try:
appr_model.release.delete_app_release(repo, release, media_type, self.models_ref)
except (
self.models_ref.Channel.DoesNotExist,
self.models_ref.Tag.DoesNotExist,
MediaType.DoesNotExist,
):
raise_package_not_found(package_name, release, media_type)
def release_exists(self, package, release):
def channel_exists(self, package_name, channel_name):
repo = _application(package_name)
return appr_model.tag.tag_exists(repo, channel_name, self.models_ref, "channel")
def delete_channel(self, package_name, channel_name):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
repo = _application(package_name)
try:
appr_model.channel.delete_channel(repo, channel_name, self.models_ref)
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_channel_not_found(package_name, channel_name)
def list_channels(self, package_name):
repo = _application(package_name)
channels = appr_model.channel.get_repo_channels(repo, self.models_ref)
return [ChannelView(name=chan.name, current=chan.linked_tag.name) for chan in channels]
def fetch_channel(self, package_name, channel_name, with_releases=True):
repo = _application(package_name)
try:
channel = appr_model.channel.get_channel(repo, channel_name, self.models_ref)
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_channel_not_found(package_name, channel_name)
if with_releases:
releases = appr_model.channel.get_channel_releases(repo, channel, self.models_ref)
chanview = ChannelReleasesView(
current=channel.linked_tag.name,
name=channel.name,
releases=[channel.linked_tag.name] + [c.name for c in releases],
)
else:
chanview = ChannelView(current=channel.linked_tag.name, name=channel.name)
return chanview
def list_release_channels(self, package_name, release, active=True):
repo = _application(package_name)
try:
channels = appr_model.channel.get_tag_channels(
repo, release, self.models_ref, active=active
)
return [ChannelView(name=c.name, current=release) for c in channels]
except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
raise_package_not_found(package_name, release)
def update_channel(self, package_name, channel_name, release):
if self.is_readonly:
raise ReadOnlyException("Currently in read-only mode")
repo = _application(package_name)
channel = appr_model.channel.create_or_update_channel(
repo, channel_name, release, self.models_ref
)
return ChannelView(current=channel.linked_tag.name, name=channel.name)
def get_blob_locations(self, digest):
return appr_model.blob.get_blob_locations(digest, self.models_ref)
# Phase 3: Read and write from new tables.
model = CNRAppModel(NEW_MODELS, features.READONLY_APP_REGISTRY)
| true | true |
f73893397861444506c7617568c6486458e910db | 1,428 | py | Python | planning/search.py | talhaHavadar/RobotLocalization | 58f8347012d5d9690b2f742a4d692817673d602a | [
"MIT"
] | null | null | null | planning/search.py | talhaHavadar/RobotLocalization | 58f8347012d5d9690b2f742a4d692817673d602a | [
"MIT"
] | null | null | null | planning/search.py | talhaHavadar/RobotLocalization | 58f8347012d5d9690b2f742a4d692817673d602a | [
"MIT"
] | null | null | null | """
MIT License
Copyright (c) 2017 Talha Can Havadar
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Basic search algorithm for path planning
"""
class Search(object):
"""
"""
def __init__(self, map_, initial_pos, cost):
self.map = map_
self.current_position = initial_pos
self.cost = cost
def set_map(self, map_):
self.map = map_
def set_position(self, pos):
self.current_position = pos
| 36.615385 | 78 | 0.755602 | class Search(object):
def __init__(self, map_, initial_pos, cost):
self.map = map_
self.current_position = initial_pos
self.cost = cost
def set_map(self, map_):
self.map = map_
def set_position(self, pos):
self.current_position = pos
| true | true |
f73893503301d9004f6efa86764f40295f756317 | 5,667 | py | Python | website/venv/lib/python2.7/site-packages/google/cloud/pubsub_v1/futures.py | wenhuiyang/ARgot | 3fd1eacca6f81a3157649dda95ab427ca1f5efe1 | [
"MIT"
] | null | null | null | website/venv/lib/python2.7/site-packages/google/cloud/pubsub_v1/futures.py | wenhuiyang/ARgot | 3fd1eacca6f81a3157649dda95ab427ca1f5efe1 | [
"MIT"
] | 2 | 2021-02-08T20:18:59.000Z | 2021-04-30T20:35:11.000Z | website/venv/lib/python2.7/site-packages/google/cloud/pubsub_v1/futures.py | wenhuiyang/ARgot | 3fd1eacca6f81a3157649dda95ab427ca1f5efe1 | [
"MIT"
] | null | null | null | # Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import threading
import google.api_core.future
from google.cloud.pubsub_v1.publisher import exceptions
class Future(google.api_core.future.Future):
"""Encapsulation of the asynchronous execution of an action.
This object is returned from asychronous Pub/Sub calls, and is the
interface to determine the status of those calls.
This object should not be created directly, but is returned by other
methods in this library.
"""
_SENTINEL = object()
def __init__(self):
self._result = self._SENTINEL
self._exception = self._SENTINEL
self._callbacks = []
self._completed = threading.Event()
def cancel(self):
"""Actions in Pub/Sub generally may not be canceled.
This method always returns False.
"""
return False
def cancelled(self):
"""Actions in Pub/Sub generally may not be canceled.
This method always returns False.
"""
return False
def running(self):
"""Actions in Pub/Sub generally may not be canceled.
Returns:
bool: ``True`` if this method has not yet completed, or
``False`` if it has completed.
"""
if self.done():
return False
return True
def done(self):
"""Return True the future is done, False otherwise.
This still returns True in failure cases; checking :meth:`result` or
:meth:`exception` is the canonical way to assess success or failure.
"""
return (self._exception is not self._SENTINEL or
self._result is not self._SENTINEL)
def result(self, timeout=None):
"""Return the message ID, or raise an exception.
This blocks until the message has successfully been published, and
returns the message ID.
Args:
timeout (Union[int, float]): The number of seconds before this call
times out and raises TimeoutError.
Returns:
str: The message ID.
Raises:
~.pubsub_v1.TimeoutError: If the request times out.
Exception: For undefined exceptions in the underlying
call execution.
"""
# Attempt to get the exception if there is one.
# If there is not one, then we know everything worked, and we can
# return an appropriate value.
err = self.exception(timeout=timeout)
if err is None:
return self._result
raise err
def exception(self, timeout=None):
"""Return the exception raised by the call, if any.
This blocks until the message has successfully been published, and
returns the exception. If the call succeeded, return None.
Args:
timeout (Union[int, float]): The number of seconds before this call
times out and raises TimeoutError.
Raises:
TimeoutError: If the request times out.
Returns:
Exception: The exception raised by the call, if any.
"""
# Wait until the future is done.
if not self._completed.wait(timeout=timeout):
raise exceptions.TimeoutError('Timed out waiting for result.')
# If the batch completed successfully, this should return None.
if self._result is not self._SENTINEL:
return None
# Okay, this batch had an error; this should return it.
return self._exception
def add_done_callback(self, fn):
"""Attach the provided callable to the future.
The provided function is called, with this future as its only argument,
when the future finishes running.
"""
if self.done():
return fn(self)
self._callbacks.append(fn)
def set_result(self, result):
"""Set the result of the future to the provided result.
Args:
result (Any): The result
"""
# Sanity check: A future can only complete once.
if self.done():
raise RuntimeError('set_result can only be called once.')
# Set the result and trigger the future.
self._result = result
self._trigger()
def set_exception(self, exception):
"""Set the result of the future to the given exception.
Args:
exception (:exc:`Exception`): The exception raised.
"""
# Sanity check: A future can only complete once.
if self.done():
raise RuntimeError('set_exception can only be called once.')
# Set the exception and trigger the future.
self._exception = exception
self._trigger()
def _trigger(self):
"""Trigger all callbacks registered to this Future.
This method is called internally by the batch once the batch
completes.
Args:
message_id (str): The message ID, as a string.
"""
self._completed.set()
for callback in self._callbacks:
callback(self)
| 32.016949 | 79 | 0.630669 |
from __future__ import absolute_import
import threading
import google.api_core.future
from google.cloud.pubsub_v1.publisher import exceptions
class Future(google.api_core.future.Future):
_SENTINEL = object()
def __init__(self):
self._result = self._SENTINEL
self._exception = self._SENTINEL
self._callbacks = []
self._completed = threading.Event()
def cancel(self):
return False
def cancelled(self):
return False
def running(self):
if self.done():
return False
return True
def done(self):
return (self._exception is not self._SENTINEL or
self._result is not self._SENTINEL)
def result(self, timeout=None):
err = self.exception(timeout=timeout)
if err is None:
return self._result
raise err
def exception(self, timeout=None):
if not self._completed.wait(timeout=timeout):
raise exceptions.TimeoutError('Timed out waiting for result.')
if self._result is not self._SENTINEL:
return None
return self._exception
def add_done_callback(self, fn):
if self.done():
return fn(self)
self._callbacks.append(fn)
def set_result(self, result):
if self.done():
raise RuntimeError('set_result can only be called once.')
self._result = result
self._trigger()
def set_exception(self, exception):
if self.done():
raise RuntimeError('set_exception can only be called once.')
self._exception = exception
self._trigger()
def _trigger(self):
self._completed.set()
for callback in self._callbacks:
callback(self)
| true | true |
f738949eb75771c3ce36937a0a251f1a66189fa0 | 1,851 | py | Python | tessellation/tessellation.py | My-MindPalace/Tessellation | 655cc34aa405e6fdd029f6cf073a846e0e9b7dd5 | [
"Apache-2.0"
] | null | null | null | tessellation/tessellation.py | My-MindPalace/Tessellation | 655cc34aa405e6fdd029f6cf073a846e0e9b7dd5 | [
"Apache-2.0"
] | null | null | null | tessellation/tessellation.py | My-MindPalace/Tessellation | 655cc34aa405e6fdd029f6cf073a846e0e9b7dd5 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
filename - tessellation.py
author - MyMindPalace
Description
A Tessellation (or Tiling) is the act of covering a surface with a pattern of flat shapes so that there are no overlaps or gaps.
Tessellations express fascinating geometric and symmetric properties as art,
and famously appear in Islamic art with four, five, and six-fold regular tessellations.
Input
tessellation dimension - 2
tile dimension - 4
# # # #
# - - #
# + + #
# # # #
Output
# # # # # # # #
# - - # # - - #
# + + # # + + #
# # # # # # # #
# # # # # # # #
# - - # # - - #
# + + # # + + #
# # # # # # # #
"""
tessellation_dimension = 0
tile_dimension = 0
tile = []
tess = []
def tessellation():
global tile,tessellation_dimension,tile_dimension
tile_row = 0
tile_column = 0
print("Enter the tile")
for i in range(tile_dimension):
tile.append([])
for i in range(tile_dimension):
tile[i] = list(input().split(" "))
for i in range(tile_dimension*tessellation_dimension):
tess.append([])
for i in range(tile_dimension*tessellation_dimension):
for j in range(tile_dimension*tessellation_dimension):
tess[i].append(tile[tile_row][tile_column])
tile_column = tile_column + 1
if tile_column == tile_dimension:
tile_column = 0
tile_row = tile_row + 1
if tile_row == tile_dimension:
tile_row = 0
for i in range(tile_dimension*tessellation_dimension):
print(tess[i])
def main():
global tile_dimension,tessellation_dimension
tessellation_dimension = int(input("Tessellation Dimension - "))
tile_dimension = int(input("Tile Dimension - "))
tessellation()
if __name__ == "__main__":
main()
| 22.301205 | 132 | 0.591032 |
tessellation_dimension = 0
tile_dimension = 0
tile = []
tess = []
def tessellation():
global tile,tessellation_dimension,tile_dimension
tile_row = 0
tile_column = 0
print("Enter the tile")
for i in range(tile_dimension):
tile.append([])
for i in range(tile_dimension):
tile[i] = list(input().split(" "))
for i in range(tile_dimension*tessellation_dimension):
tess.append([])
for i in range(tile_dimension*tessellation_dimension):
for j in range(tile_dimension*tessellation_dimension):
tess[i].append(tile[tile_row][tile_column])
tile_column = tile_column + 1
if tile_column == tile_dimension:
tile_column = 0
tile_row = tile_row + 1
if tile_row == tile_dimension:
tile_row = 0
for i in range(tile_dimension*tessellation_dimension):
print(tess[i])
def main():
global tile_dimension,tessellation_dimension
tessellation_dimension = int(input("Tessellation Dimension - "))
tile_dimension = int(input("Tile Dimension - "))
tessellation()
if __name__ == "__main__":
main()
| true | true |
f7389511a27c7a3dfc1807776ab247fc893bfe60 | 4,443 | py | Python | grmpy/read/read_auxiliary.py | OpenSourceEconomics/grmpy | 13a262fb615c79829eb4869cbb6693c9c51fb101 | [
"MIT"
] | 18 | 2018-04-10T01:08:22.000Z | 2022-02-23T02:37:24.000Z | grmpy/read/read_auxiliary.py | grmToolbox/grmpy | 13a262fb615c79829eb4869cbb6693c9c51fb101 | [
"MIT"
] | 127 | 2017-08-02T13:29:26.000Z | 2018-03-27T19:42:07.000Z | grmpy/read/read_auxiliary.py | SeBecker/grmpy | 3ff5ec9cd108582c23cb61e6b8d87f4db6ceaee1 | [
"MIT"
] | 13 | 2018-04-28T09:46:22.000Z | 2020-11-06T09:32:27.000Z | """This module provides auxiliary functions for the import process of the init file."""
import numpy as np
def create_attr_dict_est(init_dict, semipar=False, include_constant=False):
"""This function processes the imported initialization file so that it fulfills the
requirements for the subsequent estimation process.
"""
init_dict["AUX"] = {"init_values"}
init_values = []
if semipar is True:
if include_constant is True:
init_dict = add_constant(init_dict, semipar)
else:
pass
init_dict = read_keys_semipar(init_dict, init_values)
# semipar is False
else:
if include_constant is True:
init_dict = add_constant(init_dict, semipar)
else:
pass
init_dict = read_keys_par(init_dict, init_values)
init_dict = provide_auxiliary_information(init_dict, init_values)
return init_dict
def create_attr_dict_sim(init_dict):
"""This function processes the imported initialization file so that it fulfills the
requirements for the following simulation and estimation process.
"""
init_dict["AUX"] = {"init_values"}
init_values = []
init_dict = read_keys_par(init_dict, init_values)
init_dict = provide_auxiliary_information(init_dict, init_values)
return init_dict
def add_constant(init_dict, semipar=False):
"""The function checks if the user has provided a constant
for the relevant subsections:
["TREATED", "UNTREATED", "CHOICE"] for the parametric, and
["CHOICE"] for the semiparamteric estimation, respectively.
"""
if semipar is True:
if "const" not in init_dict["CHOICE"]["order"]:
init_dict["CHOICE"]["order"].insert(0, "const")
init_dict["CHOICE"]["params"] = np.array([1.0])
else:
pass
# semipar is False
else:
for key in ["TREATED", "UNTREATED", "CHOICE"]:
if "const" not in init_dict[key]["order"]:
init_dict[key]["order"].insert(0, "const")
init_dict[key]["params"] = np.array([1.0])
else:
pass
return init_dict
def read_keys_par(init_dict, init_values):
"""This function reads the information provided by the
["TREATED", "UNTREATED", "CHOICE", "DIST"] keys for
the simulation and parametric estimation.
"""
for key in ["TREATED", "UNTREATED", "CHOICE", "DIST"]:
if "params" in init_dict[key].keys():
init_dict[key]["params"] = np.array(init_dict[key]["params"])
init_values += list(init_dict[key]["params"])
else:
init_values += [0.0] * len(init_dict[key]["order"])
if np.all(init_dict["DIST"]["params"] == 0):
init_dict["DETERMINISTIC"] = True
else:
init_dict["DETERMINISTIC"] = False
return init_dict
def read_keys_semipar(init_dict, init_values):
"""This function reads the information provided by the
["TREATED", "UNTREATED", "CHOICE"] keys for
semiparametric estimation.
"""
for key in ["TREATED", "UNTREATED", "CHOICE"]:
if "params" in init_dict[key].keys():
init_dict[key]["params"] = np.array(init_dict[key]["params"])
init_values += list(init_dict[key]["params"])
else:
init_values += [0.0] * len(init_dict[key]["order"])
return init_dict
def provide_auxiliary_information(init_dict, init_values):
"""This function generates auxiliary information
given the parameters in the initialization dictionary
"""
num_covars = len(
set(
init_dict["TREATED"]["order"]
+ init_dict["UNTREATED"]["order"]
+ init_dict["CHOICE"]["order"]
)
)
covar_label = []
for section in ["TREATED", "UNTREATED", "CHOICE"]:
covar_label += [i for i in init_dict[section]["order"] if i not in covar_label]
# Generate the AUX section that include some additional auxiliary information
init_dict["AUX"] = {
"init_values": np.array(init_values),
"num_covars_choice": len(init_dict["CHOICE"]["order"]),
"num_covars_treated": len(init_dict["TREATED"]["order"]),
"num_covars_untreated": len(init_dict["UNTREATED"]["order"]),
"num_paras": len(init_values) + 1,
"num_covars": num_covars,
"labels": covar_label,
}
return init_dict
| 32.669118 | 87 | 0.625703 | import numpy as np
def create_attr_dict_est(init_dict, semipar=False, include_constant=False):
init_dict["AUX"] = {"init_values"}
init_values = []
if semipar is True:
if include_constant is True:
init_dict = add_constant(init_dict, semipar)
else:
pass
init_dict = read_keys_semipar(init_dict, init_values)
else:
if include_constant is True:
init_dict = add_constant(init_dict, semipar)
else:
pass
init_dict = read_keys_par(init_dict, init_values)
init_dict = provide_auxiliary_information(init_dict, init_values)
return init_dict
def create_attr_dict_sim(init_dict):
init_dict["AUX"] = {"init_values"}
init_values = []
init_dict = read_keys_par(init_dict, init_values)
init_dict = provide_auxiliary_information(init_dict, init_values)
return init_dict
def add_constant(init_dict, semipar=False):
if semipar is True:
if "const" not in init_dict["CHOICE"]["order"]:
init_dict["CHOICE"]["order"].insert(0, "const")
init_dict["CHOICE"]["params"] = np.array([1.0])
else:
pass
else:
for key in ["TREATED", "UNTREATED", "CHOICE"]:
if "const" not in init_dict[key]["order"]:
init_dict[key]["order"].insert(0, "const")
init_dict[key]["params"] = np.array([1.0])
else:
pass
return init_dict
def read_keys_par(init_dict, init_values):
for key in ["TREATED", "UNTREATED", "CHOICE", "DIST"]:
if "params" in init_dict[key].keys():
init_dict[key]["params"] = np.array(init_dict[key]["params"])
init_values += list(init_dict[key]["params"])
else:
init_values += [0.0] * len(init_dict[key]["order"])
if np.all(init_dict["DIST"]["params"] == 0):
init_dict["DETERMINISTIC"] = True
else:
init_dict["DETERMINISTIC"] = False
return init_dict
def read_keys_semipar(init_dict, init_values):
for key in ["TREATED", "UNTREATED", "CHOICE"]:
if "params" in init_dict[key].keys():
init_dict[key]["params"] = np.array(init_dict[key]["params"])
init_values += list(init_dict[key]["params"])
else:
init_values += [0.0] * len(init_dict[key]["order"])
return init_dict
def provide_auxiliary_information(init_dict, init_values):
num_covars = len(
set(
init_dict["TREATED"]["order"]
+ init_dict["UNTREATED"]["order"]
+ init_dict["CHOICE"]["order"]
)
)
covar_label = []
for section in ["TREATED", "UNTREATED", "CHOICE"]:
covar_label += [i for i in init_dict[section]["order"] if i not in covar_label]
init_dict["AUX"] = {
"init_values": np.array(init_values),
"num_covars_choice": len(init_dict["CHOICE"]["order"]),
"num_covars_treated": len(init_dict["TREATED"]["order"]),
"num_covars_untreated": len(init_dict["UNTREATED"]["order"]),
"num_paras": len(init_values) + 1,
"num_covars": num_covars,
"labels": covar_label,
}
return init_dict
| true | true |
f738952d6e915761ad398d3ac89ddb1b3550e307 | 22,054 | py | Python | tensorflow_hub/feature_column_test.py | mwizasimbeye11/hub | d743b0f14ee538e8bb50006895779b048d0f4db1 | [
"Apache-2.0"
] | null | null | null | tensorflow_hub/feature_column_test.py | mwizasimbeye11/hub | d743b0f14ee538e8bb50006895779b048d0f4db1 | [
"Apache-2.0"
] | null | null | null | tensorflow_hub/feature_column_test.py | mwizasimbeye11/hub | d743b0f14ee538e8bb50006895779b048d0f4db1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Hub Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow_hub.feature_column."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint:disable=g-import-not-at-top,g-statement-before-imports
try:
import mock as mock
except ImportError:
import unittest.mock as mock
# pylint:disable=g-import-not-at-top,g-statement-before-imports
import os
import numpy as np
import tensorflow as tf
import tensorflow_hub as hub
from tensorflow_hub import tf_v1
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.feature_column import feature_column_v2
from tensorflow.python.ops.lookup_ops import HashTable
from tensorflow.python.ops.lookup_ops import KeyValueTensorInitializer
# pylint: enable=g-direct-tensorflow-import
def text_module_fn():
embeddings = [
("", [0, 0, 0, 0]), # OOV items are mapped to this embedding.
("hello world", [1, 2, 3, 4]),
("pair-programming", [5, 5, 5, 5]),
]
keys = tf.constant([item[0] for item in embeddings], dtype=tf.string)
indices = tf.constant(list(range(len(embeddings))), dtype=tf.int64)
tbl_init = KeyValueTensorInitializer(keys, indices)
table = HashTable(tbl_init, 0)
weights_initializer = tf.cast(
tf.constant(list([item[1] for item in embeddings])), tf.float32)
weights = tf_v1.get_variable(
"weights", dtype=tf.float32, initializer=weights_initializer)
text_tensor = tf_v1.placeholder(dtype=tf.string, name="text", shape=[None])
indices_tensor = table.lookup(text_tensor)
embedding_tensor = tf.gather(weights, indices_tensor)
hub.add_signature(inputs=text_tensor, outputs=embedding_tensor)
def invalid_text_module_fn():
text = tf_v1.placeholder(tf.string, shape=[10])
hub.add_signature(inputs=text, outputs=tf.zeros([10, 3]))
def export_module_spec(spec, export_path):
"""Export module with random initialization."""
with tf_v1.Graph().as_default():
m = hub.Module(spec)
with tf_v1.Session() as session:
session.run(tf_v1.initializers.global_variables())
m.export(export_path, session)
class CommonColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(text_module_fn)
@mock.patch.object(feature_column_v2._StateManagerImpl, "add_resource")
def testFeatureColumnsWithResources(self, mock_add_resource):
feature_column = hub.text_embedding_column("text_a", self.spec)
if not isinstance(feature_column, feature_column_v2.FeatureColumn):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
self.assertTrue(feature_column_v2.is_feature_column_v2([feature_column]))
@mock.patch.object(feature_column_v2._StateManagerImpl, "add_resource")
def testFeatureColumnsWithNoResources(self, mock_add_resource):
mock_add_resource.side_effect = NotImplementedError
feature_column = hub.text_embedding_column("text_a", self.spec)
self.assertFalse(feature_column_v2.is_feature_column_v2([feature_column]))
class TextEmbeddingColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(text_module_fn)
def testVariableShape(self):
text_column = hub.text_embedding_column("text", self.spec, trainable=False)
self.assertEqual(text_column._variable_shape, [4])
def testParents(self):
text_column = hub.text_embedding_column("text", self.spec, trainable=False)
self.assertEqual(["text"], text_column.parents)
def testMakeParseExampleSpec(self):
text_column = hub.text_embedding_column("text", self.spec, trainable=False)
parsing_spec = tf_v1.feature_column.make_parse_example_spec([text_column])
self.assertEqual(parsing_spec,
{"text": tf_v1.FixedLenFeature([1], dtype=tf.string)})
def testInputLayer(self):
features = {
"text_a": ["hello world", "pair-programming"],
"text_b": ["hello world", "oov token"],
}
feature_columns = [
hub.text_embedding_column("text_a", self.spec, trainable=False),
hub.text_embedding_column("text_b", self.spec, trainable=False),
]
with tf.Graph().as_default():
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(input_layer)
self.assertAllEqual(
output, [[1, 2, 3, 4, 1, 2, 3, 4], [5, 5, 5, 5, 0, 0, 0, 0]])
def testDenseFeatures(self):
features = {
"text_a": ["hello world", "pair-programming"],
"text_b": ["hello world", "oov token"],
}
feature_columns = [
hub.text_embedding_column("text_a", self.spec, trainable=False),
hub.text_embedding_column("text_b", self.spec, trainable=False),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
# We want to test with dense_features_v2.DenseFeatures. This symbol was
# added in https://github.com/tensorflow/tensorflow/commit/64586f18724f737393071125a91b19adf013cf8a.
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out = feature_layer(features)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(feature_layer_out)
self.assertAllEqual(
output, [[1, 2, 3, 4, 1, 2, 3, 4], [5, 5, 5, 5, 0, 0, 0, 0]])
def testDenseFeatures_shareAcrossApplication(self):
features = {
"text": ["hello world", "pair-programming"],
}
feature_columns = [
hub.text_embedding_column("text", self.spec, trainable=True),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
# We want to test with dense_features_v2.DenseFeatures. This symbol was
# added in https://github.com/tensorflow/tensorflow/commit/64586f18724f737393071125a91b19adf013cf8a.
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out_1 = feature_layer(features)
feature_layer_out_2 = feature_layer(features)
# We define loss only on the first layer. Since layers should have shared
# weights, we expect the second layer will change too.
loss = feature_layer_out_1 - tf.constant(0.005)
optimizer = tf_v1.train.GradientDescentOptimizer(learning_rate=0.7)
train_op = optimizer.minimize(loss)
with tf_v1.train.MonitoredSession() as sess:
before_update_1 = sess.run(feature_layer_out_1)
sess.run(train_op)
after_update_1 = sess.run(feature_layer_out_1)
after_update_2 = sess.run(feature_layer_out_2)
self.assertAllEqual(before_update_1, [[1, 2, 3, 4],
[5, 5, 5, 5]])
self.assertAllEqual(after_update_1, after_update_2)
def testWorksWithCannedEstimator(self):
comment_embedding_column = hub.text_embedding_column(
"comment", self.spec, trainable=False)
upvotes = tf_v1.feature_column.numeric_column("upvotes")
feature_columns = [comment_embedding_column, upvotes]
estimator = tf_v1.estimator.DNNClassifier(
hidden_units=[10],
feature_columns=feature_columns,
model_dir=self.get_temp_dir())
# This only tests that estimator apis are working with the feature
# column without throwing exceptions.
features = {
"comment": np.array([
["the quick brown fox"],
["spam spam spam"],
]),
"upvotes": np.array([
[20],
[1],
]),
}
labels = np.array([[1], [0]])
numpy_input_fn = tf_v1.estimator.inputs.numpy_input_fn
input_fn = numpy_input_fn(features, labels, shuffle=True)
estimator.train(input_fn, max_steps=1)
estimator.evaluate(input_fn, steps=1)
estimator.predict(input_fn)
def testTrainableEmbeddingColumn(self):
feature_columns = [
hub.text_embedding_column("text", self.spec, trainable=True),
]
with tf.Graph().as_default():
features = {
"text": ["hello world", "pair-programming"],
}
target = [[1, 1, 1, 1], [4, 3, 2, 1]]
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
loss = tf.cast(
tf_v1.losses.mean_squared_error(input_layer, target), tf.float64)
optimizer = tf_v1.train.GradientDescentOptimizer(learning_rate=0.97)
train_op = optimizer.minimize(loss)
with tf_v1.train.MonitoredSession() as sess:
self.assertAllEqual(sess.run(input_layer), [[1, 2, 3, 4], [5, 5, 5, 5]])
for _ in range(10):
sess.run(train_op)
self.assertAllClose(sess.run(input_layer), target, atol=0.5)
def testInvalidTextModule(self):
spec = hub.create_module_spec(invalid_text_module_fn)
with self.assertRaisesRegexp(ValueError, "only one input"):
hub.text_embedding_column("coment", spec, trainable=False)
def testConfig(self):
module_path = os.path.join(self.get_temp_dir(), "module")
export_module_spec(self.spec, module_path)
text_column = hub.text_embedding_column("text", module_path)
config = text_column.get_config()
cloned_text_column = hub.feature_column._TextEmbeddingColumn.from_config(
config)
self.assertEqual(cloned_text_column.module_spec_path,
text_column.module_spec_path)
with self.assertRaisesRegexp(NotImplementedError, "Can only generate"):
text_column = hub.text_embedding_column("text", self.spec)
config = text_column.get_config()
def create_image_module_fn(randomly_initialized=False):
def image_module_fn():
"""Maps 1x2 images to sums of each color channel."""
images = tf_v1.placeholder(dtype=tf.float32, shape=[None, 1, 2, 3])
if randomly_initialized:
initializer = tf_v1.random_uniform_initializer(
minval=-1, maxval=1, dtype=tf.float32)
else:
initializer = tf_v1.constant_initializer(1.0, dtype=tf.float32)
weight = tf_v1.get_variable(
name="weight", shape=[1], initializer=initializer)
sum_channels = tf.reduce_sum(images, axis=[1, 2]) * weight
hub.add_signature(inputs={"images": images}, outputs=sum_channels)
return image_module_fn
class ImageEmbeddingColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(create_image_module_fn())
self.randomly_initialized_spec = hub.create_module_spec(
create_image_module_fn(randomly_initialized=True))
def testExpectedImageSize(self):
image_column = hub.image_embedding_column("image", self.spec)
# The usage comment recommends this code pattern, so we test it here.
self.assertSequenceEqual(
hub.get_expected_image_size(image_column.module_spec), [1, 2])
def testVariableShape(self):
image_column = hub.image_embedding_column("image", self.spec)
self.assertEqual(image_column.variable_shape, [3])
def testParents(self):
image_column = hub.image_embedding_column("image", self.spec)
self.assertEqual(["image"], image_column.parents)
def testMakeParseExampleSpec(self):
image_column = hub.image_embedding_column("image", self.spec)
parsing_spec = tf_v1.feature_column.make_parse_example_spec([image_column])
self.assertEqual(
parsing_spec,
{"image": tf_v1.FixedLenFeature([1, 2, 3], dtype=tf.float32)})
def testInputLayer(self):
features = {
"image_a": [[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
"image_b": [[[[0.1, 0.2, 0.1], [0.2, 0.1, 0.2]]],
[[[0.1, 0.2, 0.3], [0.3, 0.2, 0.1]]]],
}
feature_columns = [
hub.image_embedding_column("image_a", self.spec),
hub.image_embedding_column("image_b", self.spec),
]
with tf.Graph().as_default():
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(input_layer)
self.assertAllClose(
output,
[[0.5, 0.7, 0.9, 0.3, 0.3, 0.3], [0.8, 0.9, 1.0, 0.4, 0.4, 0.4]])
def testDenseFeatures(self):
features = {
"image_a": [[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
"image_b": [[[[0.1, 0.2, 0.1], [0.2, 0.1, 0.2]]],
[[[0.1, 0.2, 0.3], [0.3, 0.2, 0.1]]]],
}
feature_columns = [
hub.image_embedding_column("image_a", self.spec),
hub.image_embedding_column("image_b", self.spec),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
# We want to test with dense_features_v2.DenseFeatures. This symbol was
# added in https://github.com/tensorflow/tensorflow/commit/64586f18724f737393071125a91b19adf013cf8a.
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out = feature_layer(features)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(feature_layer_out)
self.assertAllClose(
output,
[[0.5, 0.7, 0.9, 0.3, 0.3, 0.3], [0.8, 0.9, 1.0, 0.4, 0.4, 0.4]])
def testDenseFeatures_shareAcrossApplication(self):
features = {
"image": [[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
}
feature_columns = [
hub.image_embedding_column("image", self.randomly_initialized_spec),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
# We want to test with dense_features_v2.DenseFeatures. This symbol was
# added in https://github.com/tensorflow/tensorflow/commit/64586f18724f737393071125a91b19adf013cf8a.
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out_1 = feature_layer(features)
feature_layer_out_2 = feature_layer(features)
with tf_v1.train.MonitoredSession() as sess:
output_1 = sess.run(feature_layer_out_1)
output_2 = sess.run(feature_layer_out_2)
self.assertAllClose(output_1, output_2)
def testWorksWithCannedEstimator(self):
image_column = hub.image_embedding_column("image", self.spec)
other_column = tf_v1.feature_column.numeric_column("number")
feature_columns = [image_column, other_column]
estimator = tf_v1.estimator.DNNClassifier(
hidden_units=[10],
feature_columns=feature_columns,
model_dir=self.get_temp_dir())
# This only tests that estimator apis are working with the feature
# column without throwing exceptions.
features = {
"image":
np.array([[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
dtype=np.float32),
"number":
np.array([[20], [1]]),
}
labels = np.array([[1], [0]])
numpy_input_fn = tf_v1.estimator.inputs.numpy_input_fn
input_fn = numpy_input_fn(features, labels, shuffle=True)
estimator.train(input_fn, max_steps=1)
estimator.evaluate(input_fn, steps=1)
estimator.predict(input_fn)
def testConfig(self):
module_path = os.path.join(self.get_temp_dir(), "module")
export_module_spec(self.spec, module_path)
image_column = hub.image_embedding_column("image", module_path)
config = image_column.get_config()
cloned_image_column = hub.feature_column._ImageEmbeddingColumn.from_config(
config)
self.assertEqual(cloned_image_column.module_spec_path,
image_column.module_spec_path)
with self.assertRaisesRegexp(NotImplementedError, "Can only generate"):
image_column = hub.image_embedding_column("image", self.spec)
config = image_column.get_config()
def testName(self):
image_column = hub.image_embedding_column(
tf.feature_column.numeric_column("image"), self.spec)
self.assertEqual("image_hub_module_embedding", image_column.name)
class SparseTextEmbeddingColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(text_module_fn)
def testVariableShape(self):
text_column = hub.sparse_text_embedding_column(
"text", self.spec, combiner="mean", default_value=None, trainable=False)
self.assertEqual(text_column._variable_shape, [4])
def testMakeParseExampleSpec(self):
text_column = hub.sparse_text_embedding_column(
"text", self.spec, combiner="mean", default_value=None, trainable=False)
parsing_spec = tf_v1.feature_column.make_parse_example_spec([text_column])
self.assertEqual(parsing_spec, {"text": tf_v1.VarLenFeature(tf.string)})
def testParents(self):
text_column = hub.sparse_text_embedding_column(
"text", self.spec, "sum", "", trainable=False)
self.assertEqual(["text"], text_column.parents)
def testInputLayer(self):
with tf.Graph().as_default():
text_a = tf.SparseTensor(
values=["hello world", "pair-programming", "hello world"],
indices=[[0, 0], [0, 1], [1, 0]],
dense_shape=[2, 2])
text_b = tf.SparseTensor(
values=["hello world", "oov token"],
indices=[[0, 0], [0, 1]],
dense_shape=[2, 3])
features = {
"text_a": text_a,
"text_b": text_b,
}
feature_columns = [
hub.sparse_text_embedding_column(
"text_a",
self.spec,
combiner="mean",
default_value="__UNKNOWN__",
trainable=False),
hub.sparse_text_embedding_column(
"text_b",
self.spec,
combiner="mean",
default_value="__UNKNOWN__",
trainable=False),
]
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(input_layer)
self.assertAllEqual(
output,
[[3, 3.5, 4, 4.5, 0.5, 1, 1.5, 2], [1, 2, 3, 4, 0, 0, 0, 0]])
# ([1, 2, 3, 4] + [5, 5, 5, 5])/2 extend ([1, 2, 3, 4] + [0, 0, 0, 0])/2
# [1, 2, 3, 4] extend [0, 0, 0, 0]
def testTrainableEmbeddingColumn(self):
feature_columns = [
hub.sparse_text_embedding_column(
"text",
self.spec,
combiner="mean",
default_value=None,
trainable=True),
]
with tf.Graph().as_default():
text = tf.SparseTensor(
values=["hello world", "pair-programming"],
indices=[[0, 0], [1, 0]],
dense_shape=[2, 2])
target = [[1, 1, 1, 1], [4, 3, 2, 1]]
input_layer = tf_v1.feature_column.input_layer({"text": text},
feature_columns)
loss = tf_v1.losses.mean_squared_error(input_layer, target)
optimizer = tf_v1.train.GradientDescentOptimizer(learning_rate=0.97)
train_op = optimizer.minimize(loss)
with tf_v1.train.MonitoredSession() as sess:
self.assertAllEqual(sess.run(input_layer), [[1, 2, 3, 4], [5, 5, 5, 5]])
for _ in range(10):
sess.run(train_op)
self.assertAllClose(sess.run(input_layer), target, atol=0.5)
def testEmptySparseTensorBatch(self):
feature_columns = [
hub.sparse_text_embedding_column(
"text",
self.spec,
combiner="mean",
default_value="default",
trainable=True),
]
with tf.Graph().as_default():
text = tf.SparseTensor(
values=tf_v1.constant([], dtype=tf_v1.string, shape=[0]),
indices=tf_v1.constant([], dtype=tf_v1.int64, shape=[0, 2]),
dense_shape=[3, 0])
input_layer = tf_v1.feature_column.input_layer({"text": text},
feature_columns)
with tf_v1.train.MonitoredSession() as sess:
embeddings = sess.run(input_layer)
self.assertAllEqual(embeddings,
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
def testEmptySparseTensorRow(self):
feature_columns = [
hub.sparse_text_embedding_column(
"text",
self.spec,
combiner="mean",
default_value="default",
trainable=True),
]
with tf.Graph().as_default():
text = tf.SparseTensor(
values=tf_v1.constant(["hello world"], dtype=tf_v1.string, shape=[1]),
indices=tf_v1.constant([[0, 0]], dtype=tf_v1.int64, shape=[1, 2]),
dense_shape=[2, 1])
input_layer = tf_v1.feature_column.input_layer({"text": text},
feature_columns)
with tf_v1.train.MonitoredSession() as sess:
embeddings = sess.run(input_layer)
self.assertAllEqual(embeddings, [[1, 2, 3, 4], [0, 0, 0, 0]])
if __name__ == "__main__":
tf.test.main()
| 39.382143 | 106 | 0.65657 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
import mock as mock
except ImportError:
import unittest.mock as mock
import os
import numpy as np
import tensorflow as tf
import tensorflow_hub as hub
from tensorflow_hub import tf_v1
from tensorflow.python.feature_column import feature_column_v2
from tensorflow.python.ops.lookup_ops import HashTable
from tensorflow.python.ops.lookup_ops import KeyValueTensorInitializer
def text_module_fn():
embeddings = [
("", [0, 0, 0, 0]),
("hello world", [1, 2, 3, 4]),
("pair-programming", [5, 5, 5, 5]),
]
keys = tf.constant([item[0] for item in embeddings], dtype=tf.string)
indices = tf.constant(list(range(len(embeddings))), dtype=tf.int64)
tbl_init = KeyValueTensorInitializer(keys, indices)
table = HashTable(tbl_init, 0)
weights_initializer = tf.cast(
tf.constant(list([item[1] for item in embeddings])), tf.float32)
weights = tf_v1.get_variable(
"weights", dtype=tf.float32, initializer=weights_initializer)
text_tensor = tf_v1.placeholder(dtype=tf.string, name="text", shape=[None])
indices_tensor = table.lookup(text_tensor)
embedding_tensor = tf.gather(weights, indices_tensor)
hub.add_signature(inputs=text_tensor, outputs=embedding_tensor)
def invalid_text_module_fn():
text = tf_v1.placeholder(tf.string, shape=[10])
hub.add_signature(inputs=text, outputs=tf.zeros([10, 3]))
def export_module_spec(spec, export_path):
with tf_v1.Graph().as_default():
m = hub.Module(spec)
with tf_v1.Session() as session:
session.run(tf_v1.initializers.global_variables())
m.export(export_path, session)
class CommonColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(text_module_fn)
@mock.patch.object(feature_column_v2._StateManagerImpl, "add_resource")
def testFeatureColumnsWithResources(self, mock_add_resource):
feature_column = hub.text_embedding_column("text_a", self.spec)
if not isinstance(feature_column, feature_column_v2.FeatureColumn):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
self.assertTrue(feature_column_v2.is_feature_column_v2([feature_column]))
@mock.patch.object(feature_column_v2._StateManagerImpl, "add_resource")
def testFeatureColumnsWithNoResources(self, mock_add_resource):
mock_add_resource.side_effect = NotImplementedError
feature_column = hub.text_embedding_column("text_a", self.spec)
self.assertFalse(feature_column_v2.is_feature_column_v2([feature_column]))
class TextEmbeddingColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(text_module_fn)
def testVariableShape(self):
text_column = hub.text_embedding_column("text", self.spec, trainable=False)
self.assertEqual(text_column._variable_shape, [4])
def testParents(self):
text_column = hub.text_embedding_column("text", self.spec, trainable=False)
self.assertEqual(["text"], text_column.parents)
def testMakeParseExampleSpec(self):
text_column = hub.text_embedding_column("text", self.spec, trainable=False)
parsing_spec = tf_v1.feature_column.make_parse_example_spec([text_column])
self.assertEqual(parsing_spec,
{"text": tf_v1.FixedLenFeature([1], dtype=tf.string)})
def testInputLayer(self):
features = {
"text_a": ["hello world", "pair-programming"],
"text_b": ["hello world", "oov token"],
}
feature_columns = [
hub.text_embedding_column("text_a", self.spec, trainable=False),
hub.text_embedding_column("text_b", self.spec, trainable=False),
]
with tf.Graph().as_default():
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(input_layer)
self.assertAllEqual(
output, [[1, 2, 3, 4, 1, 2, 3, 4], [5, 5, 5, 5, 0, 0, 0, 0]])
def testDenseFeatures(self):
features = {
"text_a": ["hello world", "pair-programming"],
"text_b": ["hello world", "oov token"],
}
feature_columns = [
hub.text_embedding_column("text_a", self.spec, trainable=False),
hub.text_embedding_column("text_b", self.spec, trainable=False),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out = feature_layer(features)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(feature_layer_out)
self.assertAllEqual(
output, [[1, 2, 3, 4, 1, 2, 3, 4], [5, 5, 5, 5, 0, 0, 0, 0]])
def testDenseFeatures_shareAcrossApplication(self):
features = {
"text": ["hello world", "pair-programming"],
}
feature_columns = [
hub.text_embedding_column("text", self.spec, trainable=True),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out_1 = feature_layer(features)
feature_layer_out_2 = feature_layer(features)
loss = feature_layer_out_1 - tf.constant(0.005)
optimizer = tf_v1.train.GradientDescentOptimizer(learning_rate=0.7)
train_op = optimizer.minimize(loss)
with tf_v1.train.MonitoredSession() as sess:
before_update_1 = sess.run(feature_layer_out_1)
sess.run(train_op)
after_update_1 = sess.run(feature_layer_out_1)
after_update_2 = sess.run(feature_layer_out_2)
self.assertAllEqual(before_update_1, [[1, 2, 3, 4],
[5, 5, 5, 5]])
self.assertAllEqual(after_update_1, after_update_2)
def testWorksWithCannedEstimator(self):
comment_embedding_column = hub.text_embedding_column(
"comment", self.spec, trainable=False)
upvotes = tf_v1.feature_column.numeric_column("upvotes")
feature_columns = [comment_embedding_column, upvotes]
estimator = tf_v1.estimator.DNNClassifier(
hidden_units=[10],
feature_columns=feature_columns,
model_dir=self.get_temp_dir())
features = {
"comment": np.array([
["the quick brown fox"],
["spam spam spam"],
]),
"upvotes": np.array([
[20],
[1],
]),
}
labels = np.array([[1], [0]])
numpy_input_fn = tf_v1.estimator.inputs.numpy_input_fn
input_fn = numpy_input_fn(features, labels, shuffle=True)
estimator.train(input_fn, max_steps=1)
estimator.evaluate(input_fn, steps=1)
estimator.predict(input_fn)
def testTrainableEmbeddingColumn(self):
feature_columns = [
hub.text_embedding_column("text", self.spec, trainable=True),
]
with tf.Graph().as_default():
features = {
"text": ["hello world", "pair-programming"],
}
target = [[1, 1, 1, 1], [4, 3, 2, 1]]
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
loss = tf.cast(
tf_v1.losses.mean_squared_error(input_layer, target), tf.float64)
optimizer = tf_v1.train.GradientDescentOptimizer(learning_rate=0.97)
train_op = optimizer.minimize(loss)
with tf_v1.train.MonitoredSession() as sess:
self.assertAllEqual(sess.run(input_layer), [[1, 2, 3, 4], [5, 5, 5, 5]])
for _ in range(10):
sess.run(train_op)
self.assertAllClose(sess.run(input_layer), target, atol=0.5)
def testInvalidTextModule(self):
spec = hub.create_module_spec(invalid_text_module_fn)
with self.assertRaisesRegexp(ValueError, "only one input"):
hub.text_embedding_column("coment", spec, trainable=False)
def testConfig(self):
module_path = os.path.join(self.get_temp_dir(), "module")
export_module_spec(self.spec, module_path)
text_column = hub.text_embedding_column("text", module_path)
config = text_column.get_config()
cloned_text_column = hub.feature_column._TextEmbeddingColumn.from_config(
config)
self.assertEqual(cloned_text_column.module_spec_path,
text_column.module_spec_path)
with self.assertRaisesRegexp(NotImplementedError, "Can only generate"):
text_column = hub.text_embedding_column("text", self.spec)
config = text_column.get_config()
def create_image_module_fn(randomly_initialized=False):
def image_module_fn():
images = tf_v1.placeholder(dtype=tf.float32, shape=[None, 1, 2, 3])
if randomly_initialized:
initializer = tf_v1.random_uniform_initializer(
minval=-1, maxval=1, dtype=tf.float32)
else:
initializer = tf_v1.constant_initializer(1.0, dtype=tf.float32)
weight = tf_v1.get_variable(
name="weight", shape=[1], initializer=initializer)
sum_channels = tf.reduce_sum(images, axis=[1, 2]) * weight
hub.add_signature(inputs={"images": images}, outputs=sum_channels)
return image_module_fn
class ImageEmbeddingColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(create_image_module_fn())
self.randomly_initialized_spec = hub.create_module_spec(
create_image_module_fn(randomly_initialized=True))
def testExpectedImageSize(self):
image_column = hub.image_embedding_column("image", self.spec)
self.assertSequenceEqual(
hub.get_expected_image_size(image_column.module_spec), [1, 2])
def testVariableShape(self):
image_column = hub.image_embedding_column("image", self.spec)
self.assertEqual(image_column.variable_shape, [3])
def testParents(self):
image_column = hub.image_embedding_column("image", self.spec)
self.assertEqual(["image"], image_column.parents)
def testMakeParseExampleSpec(self):
image_column = hub.image_embedding_column("image", self.spec)
parsing_spec = tf_v1.feature_column.make_parse_example_spec([image_column])
self.assertEqual(
parsing_spec,
{"image": tf_v1.FixedLenFeature([1, 2, 3], dtype=tf.float32)})
def testInputLayer(self):
features = {
"image_a": [[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
"image_b": [[[[0.1, 0.2, 0.1], [0.2, 0.1, 0.2]]],
[[[0.1, 0.2, 0.3], [0.3, 0.2, 0.1]]]],
}
feature_columns = [
hub.image_embedding_column("image_a", self.spec),
hub.image_embedding_column("image_b", self.spec),
]
with tf.Graph().as_default():
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(input_layer)
self.assertAllClose(
output,
[[0.5, 0.7, 0.9, 0.3, 0.3, 0.3], [0.8, 0.9, 1.0, 0.4, 0.4, 0.4]])
def testDenseFeatures(self):
features = {
"image_a": [[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
"image_b": [[[[0.1, 0.2, 0.1], [0.2, 0.1, 0.2]]],
[[[0.1, 0.2, 0.3], [0.3, 0.2, 0.1]]]],
}
feature_columns = [
hub.image_embedding_column("image_a", self.spec),
hub.image_embedding_column("image_b", self.spec),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out = feature_layer(features)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(feature_layer_out)
self.assertAllClose(
output,
[[0.5, 0.7, 0.9, 0.3, 0.3, 0.3], [0.8, 0.9, 1.0, 0.4, 0.4, 0.4]])
def testDenseFeatures_shareAcrossApplication(self):
features = {
"image": [[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
}
feature_columns = [
hub.image_embedding_column("image", self.randomly_initialized_spec),
]
if not feature_column_v2.is_feature_column_v2(feature_columns):
self.skipTest("Resources not implemented in the state manager of feature "
"column v2.")
with tf.Graph().as_default():
feature_layer = tf.compat.v2.keras.layers.DenseFeatures(feature_columns)
feature_layer_out_1 = feature_layer(features)
feature_layer_out_2 = feature_layer(features)
with tf_v1.train.MonitoredSession() as sess:
output_1 = sess.run(feature_layer_out_1)
output_2 = sess.run(feature_layer_out_2)
self.assertAllClose(output_1, output_2)
def testWorksWithCannedEstimator(self):
image_column = hub.image_embedding_column("image", self.spec)
other_column = tf_v1.feature_column.numeric_column("number")
feature_columns = [image_column, other_column]
estimator = tf_v1.estimator.DNNClassifier(
hidden_units=[10],
feature_columns=feature_columns,
model_dir=self.get_temp_dir())
features = {
"image":
np.array([[[[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]],
[[[0.7, 0.7, 0.7], [0.1, 0.2, 0.3]]]],
dtype=np.float32),
"number":
np.array([[20], [1]]),
}
labels = np.array([[1], [0]])
numpy_input_fn = tf_v1.estimator.inputs.numpy_input_fn
input_fn = numpy_input_fn(features, labels, shuffle=True)
estimator.train(input_fn, max_steps=1)
estimator.evaluate(input_fn, steps=1)
estimator.predict(input_fn)
def testConfig(self):
module_path = os.path.join(self.get_temp_dir(), "module")
export_module_spec(self.spec, module_path)
image_column = hub.image_embedding_column("image", module_path)
config = image_column.get_config()
cloned_image_column = hub.feature_column._ImageEmbeddingColumn.from_config(
config)
self.assertEqual(cloned_image_column.module_spec_path,
image_column.module_spec_path)
with self.assertRaisesRegexp(NotImplementedError, "Can only generate"):
image_column = hub.image_embedding_column("image", self.spec)
config = image_column.get_config()
def testName(self):
image_column = hub.image_embedding_column(
tf.feature_column.numeric_column("image"), self.spec)
self.assertEqual("image_hub_module_embedding", image_column.name)
class SparseTextEmbeddingColumnTest(tf.test.TestCase):
def setUp(self):
self.spec = hub.create_module_spec(text_module_fn)
def testVariableShape(self):
text_column = hub.sparse_text_embedding_column(
"text", self.spec, combiner="mean", default_value=None, trainable=False)
self.assertEqual(text_column._variable_shape, [4])
def testMakeParseExampleSpec(self):
text_column = hub.sparse_text_embedding_column(
"text", self.spec, combiner="mean", default_value=None, trainable=False)
parsing_spec = tf_v1.feature_column.make_parse_example_spec([text_column])
self.assertEqual(parsing_spec, {"text": tf_v1.VarLenFeature(tf.string)})
def testParents(self):
text_column = hub.sparse_text_embedding_column(
"text", self.spec, "sum", "", trainable=False)
self.assertEqual(["text"], text_column.parents)
def testInputLayer(self):
with tf.Graph().as_default():
text_a = tf.SparseTensor(
values=["hello world", "pair-programming", "hello world"],
indices=[[0, 0], [0, 1], [1, 0]],
dense_shape=[2, 2])
text_b = tf.SparseTensor(
values=["hello world", "oov token"],
indices=[[0, 0], [0, 1]],
dense_shape=[2, 3])
features = {
"text_a": text_a,
"text_b": text_b,
}
feature_columns = [
hub.sparse_text_embedding_column(
"text_a",
self.spec,
combiner="mean",
default_value="__UNKNOWN__",
trainable=False),
hub.sparse_text_embedding_column(
"text_b",
self.spec,
combiner="mean",
default_value="__UNKNOWN__",
trainable=False),
]
input_layer = tf_v1.feature_column.input_layer(features, feature_columns)
with tf_v1.train.MonitoredSession() as sess:
output = sess.run(input_layer)
self.assertAllEqual(
output,
[[3, 3.5, 4, 4.5, 0.5, 1, 1.5, 2], [1, 2, 3, 4, 0, 0, 0, 0]])
def testTrainableEmbeddingColumn(self):
feature_columns = [
hub.sparse_text_embedding_column(
"text",
self.spec,
combiner="mean",
default_value=None,
trainable=True),
]
with tf.Graph().as_default():
text = tf.SparseTensor(
values=["hello world", "pair-programming"],
indices=[[0, 0], [1, 0]],
dense_shape=[2, 2])
target = [[1, 1, 1, 1], [4, 3, 2, 1]]
input_layer = tf_v1.feature_column.input_layer({"text": text},
feature_columns)
loss = tf_v1.losses.mean_squared_error(input_layer, target)
optimizer = tf_v1.train.GradientDescentOptimizer(learning_rate=0.97)
train_op = optimizer.minimize(loss)
with tf_v1.train.MonitoredSession() as sess:
self.assertAllEqual(sess.run(input_layer), [[1, 2, 3, 4], [5, 5, 5, 5]])
for _ in range(10):
sess.run(train_op)
self.assertAllClose(sess.run(input_layer), target, atol=0.5)
def testEmptySparseTensorBatch(self):
feature_columns = [
hub.sparse_text_embedding_column(
"text",
self.spec,
combiner="mean",
default_value="default",
trainable=True),
]
with tf.Graph().as_default():
text = tf.SparseTensor(
values=tf_v1.constant([], dtype=tf_v1.string, shape=[0]),
indices=tf_v1.constant([], dtype=tf_v1.int64, shape=[0, 2]),
dense_shape=[3, 0])
input_layer = tf_v1.feature_column.input_layer({"text": text},
feature_columns)
with tf_v1.train.MonitoredSession() as sess:
embeddings = sess.run(input_layer)
self.assertAllEqual(embeddings,
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
def testEmptySparseTensorRow(self):
feature_columns = [
hub.sparse_text_embedding_column(
"text",
self.spec,
combiner="mean",
default_value="default",
trainable=True),
]
with tf.Graph().as_default():
text = tf.SparseTensor(
values=tf_v1.constant(["hello world"], dtype=tf_v1.string, shape=[1]),
indices=tf_v1.constant([[0, 0]], dtype=tf_v1.int64, shape=[1, 2]),
dense_shape=[2, 1])
input_layer = tf_v1.feature_column.input_layer({"text": text},
feature_columns)
with tf_v1.train.MonitoredSession() as sess:
embeddings = sess.run(input_layer)
self.assertAllEqual(embeddings, [[1, 2, 3, 4], [0, 0, 0, 0]])
if __name__ == "__main__":
tf.test.main()
| true | true |
f7389596bdb749b9575aff40037d9ad81d08005c | 1,260 | py | Python | qiskit/extensions/standard/x.py | jagunnels/qiskit-sdk-py | 153cdde972e65c0f23675bbe17c93e18be27bd51 | [
"Apache-2.0"
] | null | null | null | qiskit/extensions/standard/x.py | jagunnels/qiskit-sdk-py | 153cdde972e65c0f23675bbe17c93e18be27bd51 | [
"Apache-2.0"
] | null | null | null | qiskit/extensions/standard/x.py | jagunnels/qiskit-sdk-py | 153cdde972e65c0f23675bbe17c93e18be27bd51 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2017, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
# pylint: disable=invalid-name
"""
Pauli X (bit-flip) gate.
"""
from qiskit.circuit import CompositeGate
from qiskit.circuit import Gate
from qiskit.circuit import QuantumCircuit
from qiskit.circuit import QuantumRegister
from qiskit.circuit.decorators import _op_expand
from qiskit.qasm import pi
from qiskit.extensions.standard.u3 import U3Gate
class XGate(Gate):
"""Pauli X (bit-flip) gate."""
def __init__(self):
"""Create new X gate."""
super().__init__("x", 1, [])
def _define(self):
"""
gate x a {
u3(pi,0,pi) a;
}
"""
definition = []
q = QuantumRegister(1, "q")
rule = [
(U3Gate(pi, 0, pi), [q[0]], [])
]
for inst in rule:
definition.append(inst)
self.definition = definition
def inverse(self):
"""Invert this gate."""
return XGate() # self-inverse
@_op_expand(1)
def x(self, q):
"""Apply X to q."""
return self.append(XGate(), [q], [])
QuantumCircuit.x = x
CompositeGate.x = x
| 22.105263 | 77 | 0.603968 |
from qiskit.circuit import CompositeGate
from qiskit.circuit import Gate
from qiskit.circuit import QuantumCircuit
from qiskit.circuit import QuantumRegister
from qiskit.circuit.decorators import _op_expand
from qiskit.qasm import pi
from qiskit.extensions.standard.u3 import U3Gate
class XGate(Gate):
def __init__(self):
super().__init__("x", 1, [])
def _define(self):
definition = []
q = QuantumRegister(1, "q")
rule = [
(U3Gate(pi, 0, pi), [q[0]], [])
]
for inst in rule:
definition.append(inst)
self.definition = definition
def inverse(self):
return XGate()
@_op_expand(1)
def x(self, q):
return self.append(XGate(), [q], [])
QuantumCircuit.x = x
CompositeGate.x = x
| true | true |
f73897464eec5dfd237e31a4d8f787d41ed1bef8 | 1,619 | py | Python | procgen_adventure/utils/torch_utils.py | Laurans/procgen_adventure | 5f88f3f647f7854c8fb2ae516f3490d89845eefa | [
"MIT"
] | 2 | 2020-04-02T11:51:43.000Z | 2020-04-20T20:07:03.000Z | procgen_adventure/utils/torch_utils.py | Laurans/procgen_adventure | 5f88f3f647f7854c8fb2ae516f3490d89845eefa | [
"MIT"
] | 1 | 2020-04-08T10:34:29.000Z | 2020-04-29T21:08:48.000Z | procgen_adventure/utils/torch_utils.py | Laurans/procgen_adventure | 5f88f3f647f7854c8fb2ae516f3490d89845eefa | [
"MIT"
] | null | null | null | import numpy as np
import torch
import torch.distributed as dist
def tensor(x, device):
if isinstance(x, torch.Tensor):
return x.to(device)
x = np.asarray(x, dtype=np.float)
x = torch.tensor(x, device=device, dtype=torch.float32)
return x
def input_preprocessing(x, device):
x = tensor(x, device)
x = x.float()
x /= 255.0
return x
def to_np(t):
return t.cpu().detach().numpy()
def random_seed(seed=None):
np.random.seed(seed)
torch.manual_seed(np.random.randint(int(1e6)))
def restore_model(model, save_path):
checkpoint = torch.load(save_path)
model.network.load_state_dict(checkpoint["model_state_dict"])
model.optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
update = checkpoint["update"]
return update
def sync_initial_weights(model):
for param in model.parameters():
dist.broadcast(param.data, src=0)
def sync_gradients(model):
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.ReduceOp.SUM)
def cleanup():
dist.destroy_process_group()
def sync_values(tensor_sum_values, tensor_nb_values):
dist.reduce(tensor_sum_values, dst=0)
dist.reduce(tensor_nb_values, dst=0)
return tensor_sum_values / tensor_nb_values
def range_tensor(t, device):
return torch.arange(t).long().to(device)
def zeros(shape, dtype):
"""Attempt to return torch tensor of zeros, or if numpy dtype provided,
return numpy array or zeros."""
try:
return torch.zeros(shape, dtype=dtype)
except TypeError:
return np.zeros(shape, dtype=dtype)
| 23.128571 | 75 | 0.697344 | import numpy as np
import torch
import torch.distributed as dist
def tensor(x, device):
if isinstance(x, torch.Tensor):
return x.to(device)
x = np.asarray(x, dtype=np.float)
x = torch.tensor(x, device=device, dtype=torch.float32)
return x
def input_preprocessing(x, device):
x = tensor(x, device)
x = x.float()
x /= 255.0
return x
def to_np(t):
return t.cpu().detach().numpy()
def random_seed(seed=None):
np.random.seed(seed)
torch.manual_seed(np.random.randint(int(1e6)))
def restore_model(model, save_path):
checkpoint = torch.load(save_path)
model.network.load_state_dict(checkpoint["model_state_dict"])
model.optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
update = checkpoint["update"]
return update
def sync_initial_weights(model):
for param in model.parameters():
dist.broadcast(param.data, src=0)
def sync_gradients(model):
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.ReduceOp.SUM)
def cleanup():
dist.destroy_process_group()
def sync_values(tensor_sum_values, tensor_nb_values):
dist.reduce(tensor_sum_values, dst=0)
dist.reduce(tensor_nb_values, dst=0)
return tensor_sum_values / tensor_nb_values
def range_tensor(t, device):
return torch.arange(t).long().to(device)
def zeros(shape, dtype):
try:
return torch.zeros(shape, dtype=dtype)
except TypeError:
return np.zeros(shape, dtype=dtype)
| true | true |
f7389a07051a305f3313f7a63271b2f4966e093d | 6,985 | py | Python | ECG_Dataloader_Brazilian_records.py | vgliner/Chlng_20_Sub | 169d098e5315510df83ad988c7e2067317cef4cf | [
"BSD-2-Clause"
] | null | null | null | ECG_Dataloader_Brazilian_records.py | vgliner/Chlng_20_Sub | 169d098e5315510df83ad988c7e2067317cef4cf | [
"BSD-2-Clause"
] | null | null | null | ECG_Dataloader_Brazilian_records.py | vgliner/Chlng_20_Sub | 169d098e5315510df83ad988c7e2067317cef4cf | [
"BSD-2-Clause"
] | null | null | null | from torch.utils.data import Dataset
import os
import scipy.io as sio
import numpy as np
import matplotlib.pyplot as plt
import h5py
import pandas as pd
import random
from scipy.io import loadmat
import Utils
from scipy import interpolate
from scipy import signal
import csv
from scipy.signal import butter, lfilter, freqz
import re
from glob import glob
import time
import pickle
"""
It contains annotations about 6 different ECGs abnormalities:
- 1st degree AV block (1dAVb);
- right bundle branch block (RBBB);
- left bundle branch block (LBBB);
- sinus bradycardia (SB);
- atrial fibrillation (AF);
- sinus tachycardia (ST).
Notation of multiclass_to_binary_type:
[-1] Return multiclass [0] I-AVB, [1] RBBB, [2] LBBB, [3] SB, [4] AF, [5] ST
"""
PRINT_FLAG = False
class ECG_Multilead_Dataset_Brazilian_records(Dataset):
def __init__(self, root_dir=None, transform=None, multiclass=False,
binary_class_type=1, apply_aurmentation=True, random_augmentation=True,
augmentation_method=None, record_length=60, to_normalize=True, Uploading_method='HDD',
old_format= False):
# record_length [sec]
# Uploading_method = 'HDD'\'RAM'\'cache'
super().__init__()
self.data = []
self.samples = None
self.root_dir = root_dir
self.transform = transform
self.multiclass = multiclass
self.binary_class_type = binary_class_type
self.apply_aurmentation = apply_aurmentation
self.random_augmentation = random_augmentation
self.augmentation_method = augmentation_method
self.database_length = 0
self.data_mutual_sample_rate = 500
self.record_length = record_length * self.data_mutual_sample_rate
self.to_normalize = to_normalize
self.Uploading_method = Uploading_method
self.brazilian_database_path = None
self.brazilian_annotations_path = None
self.sample_rate = 400
self.maximal_length = self.sample_rate * self.record_length
if not multiclass:
assert binary_class_type >= 0, 'Class selection is mandatory for single class classification'
if self.root_dir is None:
paths = Utils.read_config_file()
self.brazilian_database_path = paths[1]
self.brazilian_annotations_path = paths[2]
self.brazilian_annotations_dict_path = paths[3]
else:
self.brazilian_database_path = self.root_dir + dataset_filename
self.f = h5py.File(self.brazilian_database_path, "r")
self.data_ids = np.array(self.f['id_exam'])
self.data = self.f['signal']
start = time.process_time()
self.annotations = pd.read_csv(self.brazilian_annotations_path)
end = time.process_time()
print(f'Uploading annotations took {end-start} sec.')
start = time.process_time()
# Convert Data Frame to Dictionary (set_index method allows any column to be used as index)
with open(self.brazilian_annotations_dict_path, 'rb') as handle:
self.annotations_dict = pickle.load(handle)
#self.annotations_dict = self.annotations.set_index('id_exam').transpose().to_dict(orient='dict')
end = time.process_time()
print(f'Uploading annotations dictionary took {end-start} sec.')
print('finished')
self.loaded_data = {}
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
if idx not in self.loaded_data.keys():
sample = self.data[idx]
data_id = self.data_ids[idx]
sample = np.transpose(sample)
annotation = self.annotations_dict[data_id]
annotation = list(annotation.values())[3:]
sample = (sample, annotation)
else:
sample = self.loaded_data[idx]
if self.to_normalize:
sample = self.normalization(sample)
if self.binary_class_type >= 0 and not self.multiclass:
sample[1] = sample[1][int(self.binary_class_type)]
if self.multiclass:
sample[1] = np.stack(sample[1])
if self.Uploading_method == 'cache' and idx not in self.loaded_data.keys():
self.loaded_data[idx] = sample
if self.apply_aurmentation:
sample = self.augmentation_algorithm(sample)
return sample
def find_annotations(self, id_to_find):
a= list(self.annotations['id_exam']).index(id_to_find)
return list(self.annotations.iloc[a].values[4:])
@staticmethod
def plot(sample):
item_to_plot = sample[0]
fig, axes = plt.subplots(nrows=6, ncols=2)
fig.suptitle(np.array2string(sample[1]), fontsize=14)
titles = ['Lead1', 'Lead2', 'Lead3', 'aVR', 'aVL', 'aVF', 'V1', 'V2', 'V3', 'V4', 'V5', 'V6']
b = item_to_plot
for ax, cntr in zip(axes.flatten(), range(12)):
ax.plot(b[cntr, :], linewidth=1.0)
ax.set(title=titles[cntr])
plt.plot()
plt.show()
return
@staticmethod
def plot_one_strip(one_strip):
item_to_plot = one_strip
plt.plot(item_to_plot)
plt.show()
return
def augmentation_algorithm(self, record):
current_record_length = record[0].shape[1]
if current_record_length == self.record_length:
return record
if current_record_length <= self.record_length: # record is shorter than maximal length or similar
new_sample = np.zeros((12, self.record_length))
index_for_pasting = random.sample(range(self.record_length - current_record_length), 1)
new_sample[:, index_for_pasting[0]:index_for_pasting[0] + current_record_length] = record[0]
else: # record is longer than maximal length
index_for_pasting = random.sample(range(current_record_length - self.record_length), 1)
new_sample = record[0][:, index_for_pasting[0]:index_for_pasting[0] + self.record_length]
return [new_sample, record[1]]
@staticmethod
def normalization(record):
sample = record[0]
for i, strip in enumerate(sample):
max_ = np.max(strip)
min_ = np.min(strip)
if max_ - min_ == 0:
sample[i] = strip
else:
sample[i] = (strip - min_) / (max_ - min_)
return [sample, record[1]]
def test_Brazilian_db_dataloader():
print('Testing Brazilian database')
ds = ECG_Multilead_Dataset_Brazilian_records()
start = time.process_time()
for record_counter in range(len(ds)):
ds_record = ds[record_counter]
# ds.plot(ds_record)
if record_counter %10000 ==0:
stop = time.process_time()
print(f'Loaded record # {record_counter}, time : {stop-start}')
print('Finished testing')
if __name__ == "__main__":
test_Brazilian_db_dataloader()
| 36.005155 | 107 | 0.643951 | from torch.utils.data import Dataset
import os
import scipy.io as sio
import numpy as np
import matplotlib.pyplot as plt
import h5py
import pandas as pd
import random
from scipy.io import loadmat
import Utils
from scipy import interpolate
from scipy import signal
import csv
from scipy.signal import butter, lfilter, freqz
import re
from glob import glob
import time
import pickle
PRINT_FLAG = False
class ECG_Multilead_Dataset_Brazilian_records(Dataset):
def __init__(self, root_dir=None, transform=None, multiclass=False,
binary_class_type=1, apply_aurmentation=True, random_augmentation=True,
augmentation_method=None, record_length=60, to_normalize=True, Uploading_method='HDD',
old_format= False):
super().__init__()
self.data = []
self.samples = None
self.root_dir = root_dir
self.transform = transform
self.multiclass = multiclass
self.binary_class_type = binary_class_type
self.apply_aurmentation = apply_aurmentation
self.random_augmentation = random_augmentation
self.augmentation_method = augmentation_method
self.database_length = 0
self.data_mutual_sample_rate = 500
self.record_length = record_length * self.data_mutual_sample_rate
self.to_normalize = to_normalize
self.Uploading_method = Uploading_method
self.brazilian_database_path = None
self.brazilian_annotations_path = None
self.sample_rate = 400
self.maximal_length = self.sample_rate * self.record_length
if not multiclass:
assert binary_class_type >= 0, 'Class selection is mandatory for single class classification'
if self.root_dir is None:
paths = Utils.read_config_file()
self.brazilian_database_path = paths[1]
self.brazilian_annotations_path = paths[2]
self.brazilian_annotations_dict_path = paths[3]
else:
self.brazilian_database_path = self.root_dir + dataset_filename
self.f = h5py.File(self.brazilian_database_path, "r")
self.data_ids = np.array(self.f['id_exam'])
self.data = self.f['signal']
start = time.process_time()
self.annotations = pd.read_csv(self.brazilian_annotations_path)
end = time.process_time()
print(f'Uploading annotations took {end-start} sec.')
start = time.process_time()
with open(self.brazilian_annotations_dict_path, 'rb') as handle:
self.annotations_dict = pickle.load(handle)
end = time.process_time()
print(f'Uploading annotations dictionary took {end-start} sec.')
print('finished')
self.loaded_data = {}
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
if idx not in self.loaded_data.keys():
sample = self.data[idx]
data_id = self.data_ids[idx]
sample = np.transpose(sample)
annotation = self.annotations_dict[data_id]
annotation = list(annotation.values())[3:]
sample = (sample, annotation)
else:
sample = self.loaded_data[idx]
if self.to_normalize:
sample = self.normalization(sample)
if self.binary_class_type >= 0 and not self.multiclass:
sample[1] = sample[1][int(self.binary_class_type)]
if self.multiclass:
sample[1] = np.stack(sample[1])
if self.Uploading_method == 'cache' and idx not in self.loaded_data.keys():
self.loaded_data[idx] = sample
if self.apply_aurmentation:
sample = self.augmentation_algorithm(sample)
return sample
def find_annotations(self, id_to_find):
a= list(self.annotations['id_exam']).index(id_to_find)
return list(self.annotations.iloc[a].values[4:])
@staticmethod
def plot(sample):
item_to_plot = sample[0]
fig, axes = plt.subplots(nrows=6, ncols=2)
fig.suptitle(np.array2string(sample[1]), fontsize=14)
titles = ['Lead1', 'Lead2', 'Lead3', 'aVR', 'aVL', 'aVF', 'V1', 'V2', 'V3', 'V4', 'V5', 'V6']
b = item_to_plot
for ax, cntr in zip(axes.flatten(), range(12)):
ax.plot(b[cntr, :], linewidth=1.0)
ax.set(title=titles[cntr])
plt.plot()
plt.show()
return
@staticmethod
def plot_one_strip(one_strip):
item_to_plot = one_strip
plt.plot(item_to_plot)
plt.show()
return
def augmentation_algorithm(self, record):
current_record_length = record[0].shape[1]
if current_record_length == self.record_length:
return record
if current_record_length <= self.record_length:
new_sample = np.zeros((12, self.record_length))
index_for_pasting = random.sample(range(self.record_length - current_record_length), 1)
new_sample[:, index_for_pasting[0]:index_for_pasting[0] + current_record_length] = record[0]
else:
index_for_pasting = random.sample(range(current_record_length - self.record_length), 1)
new_sample = record[0][:, index_for_pasting[0]:index_for_pasting[0] + self.record_length]
return [new_sample, record[1]]
@staticmethod
def normalization(record):
sample = record[0]
for i, strip in enumerate(sample):
max_ = np.max(strip)
min_ = np.min(strip)
if max_ - min_ == 0:
sample[i] = strip
else:
sample[i] = (strip - min_) / (max_ - min_)
return [sample, record[1]]
def test_Brazilian_db_dataloader():
print('Testing Brazilian database')
ds = ECG_Multilead_Dataset_Brazilian_records()
start = time.process_time()
for record_counter in range(len(ds)):
ds_record = ds[record_counter]
if record_counter %10000 ==0:
stop = time.process_time()
print(f'Loaded record # {record_counter}, time : {stop-start}')
print('Finished testing')
if __name__ == "__main__":
test_Brazilian_db_dataloader()
| true | true |
f7389a19e9d0ff7b128f5ec0d44950498ceefaf7 | 5,501 | py | Python | PaintsChainer/server.py | xiaofengShi/Gans | 03938440f5f83da22602a0b3b78d689f310c1a95 | [
"MIT"
] | null | null | null | PaintsChainer/server.py | xiaofengShi/Gans | 03938440f5f83da22602a0b3b78d689f310c1a95 | [
"MIT"
] | null | null | null | PaintsChainer/server.py | xiaofengShi/Gans | 03938440f5f83da22602a0b3b78d689f310c1a95 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import http.server
import sys
import time
import re
import argparse
from cgi import parse_header, parse_multipart
from urllib.parse import parse_qs
sys.path.append('./sketchKeras')
import run
sys.path.append('./cgi-bin/paint_x2_unet')
import cgi_exe
sys.path.append('./cgi-bin/helpers')
from platformAdapter import OSHelper
class MyHandler(http.server.CGIHTTPRequestHandler):
t = []
def __init__(self, req, client_addr, server):
OSHelper.detect_environment()
http.server.CGIHTTPRequestHandler.__init__(
self, req, client_addr, server)
def parse_POST(self):
ctype, pdict = parse_header(self.headers['content-type'])
pdict['boundary'] = bytes(pdict['boundary'], "utf-8")
if ctype == 'multipart/form-data':
postvars = parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers['content-length'])
postvars = parse_qs(
self.rfile.read(length),
keep_blank_values=1)
else:
postvars = {}
return postvars
def log_t(self):
if(args.debug):
self.t.append(time.time())
return
def print_log(self):
if(args.debug):
for i, j in zip(self.t, self.t[1:]):
print("time [sec]", j - i)
self.t = []
return
def post_process(self, form, id_str):
if "line" in form:
bin1 = form["line"][0]
fout1 = open("./images/line/" + id_str + ".png", 'wb')
fout1.write(bin1)
fout1.close()
else:
self.ret_result(False)
return
if "ref" in form:
bin2 = form["ref"][0]
fout2 = open("./images/ref/" + id_str + ".png", 'wb')
fout2.write(bin2)
fout2.close()
else:
self.ret_result(False)
return
self.log_t()
self.ret_result(True)
self.log_t()
self.print_log()
return
def paint_process(self, form, id_str):
blur = 0
if "blur" in form:
blur = form["blur"][0].decode()
try:
blur = int(blur)
except ValueError:
blur = 0
self.log_t()
painter.colorize(id_str, form["step"][0].decode() if "step" in form else "C", blur=blur)
self.log_t()
self.ret_result(True)
self.log_t()
self.print_log()
return
def sketch_process(self, form, id_str):
blur = 0
if "blur" in form:
blur = form["blur"][0].decode()
try:
blur = int(blur)
except ValueError:
blur = 0
self.log_t()
sketcher.tosketch(id_str)
self.log_t()
self.ret_result(True)
self.log_t()
self.print_log()
return
def ret_result(self, success):
if success:
content = bytes(
"{ 'message':'The command Completed Successfully' , 'Status':'200 OK','success':true , 'used':"
+ str(args.gpu) + "}", "UTF-8")
self.send_response(200)
else:
content = bytes(
"{ 'message':'The command Failed' , 'Status':'503 NG','success':false , 'used':" +
str(args.gpu) + "}", "UTF-8")
self.send_response(503)
self.send_header("Content-type", "application/json")
self.send_header("Content-Length", len(content))
self.send_header("Access-Control-Allow-Origin", "*") # hard coding...
self.end_headers()
self.wfile.write(content)
self.log_t()
def do_POST(self):
self.log_t()
form = self.parse_POST()
self.log_t()
if "id" in form:
id_str = form["id"][0]
id_str = re.sub(r'\W+', '', id_str.decode())
else:
self.ret_result(False)
return
if(re.search('/post/*', self.path) != None):
self.post_process(form, id_str)
elif (re.search('/paint/*', self.path) != None):
self.paint_process(form, id_str)
elif (re.search('/sketch/*', self.path) != None):
self.sketch_process(form, id_str)
else:
self.ret_result(False)
return
# set args
if "__main__" in __name__:
parser = argparse.ArgumentParser(description='chainer line drawing colorization server')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--mode', '-m', default="stand_alone",
help='set process mode')
# other mode "post_server" "paint_server"
parser.add_argument('--port', '-p', type=int, default=9172, help='using port')
parser.add_argument('--debug', dest='debug', action='store_true')
parser.set_defaults(feature=False)
parser.add_argument('--host', '-ho', default='0.0.0.0', help='using host')
args = parser.parse_args()
if args.mode == "stand_alone" or args.mode == "paint_server":
print('GPU: {}'.format(args.gpu))
painter = cgi_exe.Painter(gpu=args.gpu)
sketcher = run.Sketch(gpu=args.gpu)
httpd = http.server.HTTPServer((args.host, args.port), MyHandler)
print('serving at {:s}:{:d}'.format(args.host, args.port))
httpd.serve_forever()
| 29.260638 | 111 | 0.548446 |
import http.server
import sys
import time
import re
import argparse
from cgi import parse_header, parse_multipart
from urllib.parse import parse_qs
sys.path.append('./sketchKeras')
import run
sys.path.append('./cgi-bin/paint_x2_unet')
import cgi_exe
sys.path.append('./cgi-bin/helpers')
from platformAdapter import OSHelper
class MyHandler(http.server.CGIHTTPRequestHandler):
t = []
def __init__(self, req, client_addr, server):
OSHelper.detect_environment()
http.server.CGIHTTPRequestHandler.__init__(
self, req, client_addr, server)
def parse_POST(self):
ctype, pdict = parse_header(self.headers['content-type'])
pdict['boundary'] = bytes(pdict['boundary'], "utf-8")
if ctype == 'multipart/form-data':
postvars = parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers['content-length'])
postvars = parse_qs(
self.rfile.read(length),
keep_blank_values=1)
else:
postvars = {}
return postvars
def log_t(self):
if(args.debug):
self.t.append(time.time())
return
def print_log(self):
if(args.debug):
for i, j in zip(self.t, self.t[1:]):
print("time [sec]", j - i)
self.t = []
return
def post_process(self, form, id_str):
if "line" in form:
bin1 = form["line"][0]
fout1 = open("./images/line/" + id_str + ".png", 'wb')
fout1.write(bin1)
fout1.close()
else:
self.ret_result(False)
return
if "ref" in form:
bin2 = form["ref"][0]
fout2 = open("./images/ref/" + id_str + ".png", 'wb')
fout2.write(bin2)
fout2.close()
else:
self.ret_result(False)
return
self.log_t()
self.ret_result(True)
self.log_t()
self.print_log()
return
def paint_process(self, form, id_str):
blur = 0
if "blur" in form:
blur = form["blur"][0].decode()
try:
blur = int(blur)
except ValueError:
blur = 0
self.log_t()
painter.colorize(id_str, form["step"][0].decode() if "step" in form else "C", blur=blur)
self.log_t()
self.ret_result(True)
self.log_t()
self.print_log()
return
def sketch_process(self, form, id_str):
blur = 0
if "blur" in form:
blur = form["blur"][0].decode()
try:
blur = int(blur)
except ValueError:
blur = 0
self.log_t()
sketcher.tosketch(id_str)
self.log_t()
self.ret_result(True)
self.log_t()
self.print_log()
return
def ret_result(self, success):
if success:
content = bytes(
"{ 'message':'The command Completed Successfully' , 'Status':'200 OK','success':true , 'used':"
+ str(args.gpu) + "}", "UTF-8")
self.send_response(200)
else:
content = bytes(
"{ 'message':'The command Failed' , 'Status':'503 NG','success':false , 'used':" +
str(args.gpu) + "}", "UTF-8")
self.send_response(503)
self.send_header("Content-type", "application/json")
self.send_header("Content-Length", len(content))
self.send_header("Access-Control-Allow-Origin", "*")
self.end_headers()
self.wfile.write(content)
self.log_t()
def do_POST(self):
self.log_t()
form = self.parse_POST()
self.log_t()
if "id" in form:
id_str = form["id"][0]
id_str = re.sub(r'\W+', '', id_str.decode())
else:
self.ret_result(False)
return
if(re.search('/post/*', self.path) != None):
self.post_process(form, id_str)
elif (re.search('/paint/*', self.path) != None):
self.paint_process(form, id_str)
elif (re.search('/sketch/*', self.path) != None):
self.sketch_process(form, id_str)
else:
self.ret_result(False)
return
if "__main__" in __name__:
parser = argparse.ArgumentParser(description='chainer line drawing colorization server')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--mode', '-m', default="stand_alone",
help='set process mode')
parser.add_argument('--port', '-p', type=int, default=9172, help='using port')
parser.add_argument('--debug', dest='debug', action='store_true')
parser.set_defaults(feature=False)
parser.add_argument('--host', '-ho', default='0.0.0.0', help='using host')
args = parser.parse_args()
if args.mode == "stand_alone" or args.mode == "paint_server":
print('GPU: {}'.format(args.gpu))
painter = cgi_exe.Painter(gpu=args.gpu)
sketcher = run.Sketch(gpu=args.gpu)
httpd = http.server.HTTPServer((args.host, args.port), MyHandler)
print('serving at {:s}:{:d}'.format(args.host, args.port))
httpd.serve_forever()
| true | true |
f7389b0fa337f966813df242e778dadeafe34bbb | 9,157 | py | Python | python_modules/dagit/dagit/app.py | Kilo59/dagster | ed8f5a86c923ba748618e0a73e7bea0d4544ae54 | [
"Apache-2.0"
] | null | null | null | python_modules/dagit/dagit/app.py | Kilo59/dagster | ed8f5a86c923ba748618e0a73e7bea0d4544ae54 | [
"Apache-2.0"
] | null | null | null | python_modules/dagit/dagit/app.py | Kilo59/dagster | ed8f5a86c923ba748618e0a73e7bea0d4544ae54 | [
"Apache-2.0"
] | null | null | null | import gzip
import io
import os
import uuid
import nbformat
from dagster import __version__ as dagster_version
from dagster import check
from dagster.cli.workspace import Workspace
from dagster.cli.workspace.context import WorkspaceProcessContext
from dagster.core.debug import DebugRunPayload
from dagster.core.execution.compute_logs import warn_if_compute_logs_disabled
from dagster.core.instance import DagsterInstance
from dagster.core.storage.compute_log_manager import ComputeIOType
from dagster.core.telemetry import log_workspace_stats
from dagster_graphql.schema import create_schema
from dagster_graphql.version import __version__ as dagster_graphql_version
from flask import Blueprint, Flask, jsonify, redirect, render_template_string, request, send_file
from flask_cors import CORS
from flask_graphql import GraphQLView
from flask_sockets import Sockets
from graphql.execution.executors.gevent import GeventExecutor as Executor
from nbconvert import HTMLExporter
from .format_error import format_error_with_stack_trace
from .subscription_server import DagsterSubscriptionServer
from .templates.playground import TEMPLATE as PLAYGROUND_TEMPLATE
from .version import __version__
MISSING_SCHEDULER_WARNING = (
"You have defined ScheduleDefinitions for this repository, but have "
"not defined a scheduler on the instance"
)
class DagsterGraphQLView(GraphQLView):
def __init__(self, context, **kwargs):
super(DagsterGraphQLView, self).__init__(**kwargs)
self.context = check.inst_param(context, "context", WorkspaceProcessContext)
def get_context(self):
return self.context.create_request_context()
format_error = staticmethod(format_error_with_stack_trace)
def dagster_graphql_subscription_view(subscription_server, context):
context = check.inst_param(context, "context", WorkspaceProcessContext)
def view(ws):
# Even though this argument is named as the "request_context", we are passing it
# a `WorkspaceProcessContext`. This is a naming restriction from the underlying
# `GeventSubscriptionServer` which we reply on. If you view the implementation
# for the DagsterSubscriptionServer, you will see that we create a request context
# for every GraphQL request in the `on_start` method.
subscription_server.handle(ws, request_context=context)
return []
return view
def info_view():
return (
jsonify(
dagit_version=__version__,
dagster_graphql_version=dagster_graphql_version,
dagster_version=dagster_version,
),
200,
)
def notebook_view(request_args):
check.dict_param(request_args, "request_args")
# This currently provides open access to your file system - the very least we can
# do is limit it to notebook files until we create a more permanent solution.
path = request_args["path"]
if not path.endswith(".ipynb"):
return "Invalid Path", 400
with open(os.path.abspath(path)) as f:
read_data = f.read()
notebook = nbformat.reads(read_data, as_version=4)
html_exporter = HTMLExporter()
html_exporter.template_file = "basic"
(body, resources) = html_exporter.from_notebook_node(notebook)
return "<style>" + resources["inlining"]["css"][0] + "</style>" + body, 200
def download_log_view(context):
context = check.inst_param(context, "context", WorkspaceProcessContext)
def view(run_id, step_key, file_type):
run_id = str(uuid.UUID(run_id)) # raises if not valid run_id
step_key = step_key.split("/")[-1] # make sure we're not diving deep into
out_name = f"{run_id}_{step_key}.{file_type}"
manager = context.instance.compute_log_manager
try:
io_type = ComputeIOType(file_type)
result = manager.get_local_path(run_id, step_key, io_type)
if not os.path.exists(result):
result = io.BytesIO()
timeout = None if manager.is_watch_completed(run_id, step_key) else 0
except ValueError:
result = io.BytesIO()
timeout = 0
if not result:
result = io.BytesIO()
return send_file(
result, as_attachment=True, attachment_filename=out_name, cache_timeout=timeout
)
return view
def download_dump_view(context):
context = check.inst_param(context, "context", WorkspaceProcessContext)
def view(run_id):
run = context.instance.get_run_by_id(run_id)
debug_payload = DebugRunPayload.build(context.instance, run)
check.invariant(run is not None)
out_name = f"{run_id}.gzip"
result = io.BytesIO()
with gzip.GzipFile(fileobj=result, mode="wb") as file:
debug_payload.write(file)
result.seek(0) # be kind, please rewind
return send_file(result, as_attachment=True, attachment_filename=out_name)
return view
def instantiate_app_with_views(
context, schema, app_path_prefix, target_dir=os.path.dirname(__file__)
):
app = Flask(
"dagster-ui",
static_url_path=app_path_prefix,
static_folder=os.path.join(target_dir, "./webapp/build"),
)
subscription_server = DagsterSubscriptionServer(schema=schema)
# Websocket routes
sockets = Sockets(app)
sockets.add_url_rule(
f"{app_path_prefix}/graphql",
"graphql",
dagster_graphql_subscription_view(subscription_server, context),
)
# HTTP routes
bp = Blueprint("routes", __name__, url_prefix=app_path_prefix)
bp.add_url_rule("/graphiql", "graphiql", lambda: redirect(f"{app_path_prefix}/graphql", 301))
bp.add_url_rule(
"/graphql",
"graphql",
DagsterGraphQLView.as_view(
"graphql",
schema=schema,
graphiql=True,
graphiql_template=PLAYGROUND_TEMPLATE,
executor=Executor(),
context=context,
),
)
bp.add_url_rule(
# should match the `build_local_download_url`
"/download/<string:run_id>/<string:step_key>/<string:file_type>",
"download_view",
download_log_view(context),
)
bp.add_url_rule(
"/download_debug/<string:run_id>",
"download_dump_view",
download_dump_view(context),
)
# these routes are specifically for the Dagit UI and are not part of the graphql
# API that we want other people to consume, so they're separate for now.
# Also grabbing the magic global request args dict so that notebook_view is testable
bp.add_url_rule("/dagit/notebook", "notebook", lambda: notebook_view(request.args))
bp.add_url_rule("/dagit_info", "sanity_view", info_view)
index_path = os.path.join(target_dir, "./webapp/build/index.html")
def index_view():
try:
with open(index_path) as f:
rendered_template = render_template_string(f.read())
return rendered_template.replace(
'src="/static', f'src="{app_path_prefix}/static'
).replace('href="/static', f'href="{app_path_prefix}/static')
except FileNotFoundError:
raise Exception(
"""Can't find webapp files. Probably webapp isn't built. If you are using
dagit, then probably it's a corrupted installation or a bug. However, if you are
developing dagit locally, your problem can be fixed as follows:
cd ./python_modules/
make rebuild_dagit"""
)
def error_redirect(_path):
return index_view()
bp.add_url_rule("/", "index_view", index_view)
bp.context_processor(lambda: {"app_path_prefix": app_path_prefix})
app.app_protocol = lambda environ_path_info: "graphql-ws"
app.register_blueprint(bp)
app.register_error_handler(404, error_redirect)
# if the user asked for a path prefix, handle the naked domain just in case they are not
# filtering inbound traffic elsewhere and redirect to the path prefix.
if app_path_prefix:
app.add_url_rule("/", "force-path-prefix", lambda: redirect(app_path_prefix, 301))
CORS(app)
return app
def create_app_from_workspace(
workspace: Workspace, instance: DagsterInstance, path_prefix: str = ""
):
check.inst_param(workspace, "workspace", Workspace)
check.inst_param(instance, "instance", DagsterInstance)
check.str_param(path_prefix, "path_prefix")
if path_prefix:
if not path_prefix.startswith("/"):
raise Exception(f'The path prefix should begin with a leading "/": got {path_prefix}')
if path_prefix.endswith("/"):
raise Exception(f'The path prefix should not include a trailing "/": got {path_prefix}')
warn_if_compute_logs_disabled()
print("Loading repository...") # pylint: disable=print-call
context = WorkspaceProcessContext(instance=instance, workspace=workspace, version=__version__)
log_workspace_stats(instance, context)
schema = create_schema()
return instantiate_app_with_views(context, schema, path_prefix)
| 35.909804 | 100 | 0.694878 | import gzip
import io
import os
import uuid
import nbformat
from dagster import __version__ as dagster_version
from dagster import check
from dagster.cli.workspace import Workspace
from dagster.cli.workspace.context import WorkspaceProcessContext
from dagster.core.debug import DebugRunPayload
from dagster.core.execution.compute_logs import warn_if_compute_logs_disabled
from dagster.core.instance import DagsterInstance
from dagster.core.storage.compute_log_manager import ComputeIOType
from dagster.core.telemetry import log_workspace_stats
from dagster_graphql.schema import create_schema
from dagster_graphql.version import __version__ as dagster_graphql_version
from flask import Blueprint, Flask, jsonify, redirect, render_template_string, request, send_file
from flask_cors import CORS
from flask_graphql import GraphQLView
from flask_sockets import Sockets
from graphql.execution.executors.gevent import GeventExecutor as Executor
from nbconvert import HTMLExporter
from .format_error import format_error_with_stack_trace
from .subscription_server import DagsterSubscriptionServer
from .templates.playground import TEMPLATE as PLAYGROUND_TEMPLATE
from .version import __version__
MISSING_SCHEDULER_WARNING = (
"You have defined ScheduleDefinitions for this repository, but have "
"not defined a scheduler on the instance"
)
class DagsterGraphQLView(GraphQLView):
def __init__(self, context, **kwargs):
super(DagsterGraphQLView, self).__init__(**kwargs)
self.context = check.inst_param(context, "context", WorkspaceProcessContext)
def get_context(self):
return self.context.create_request_context()
format_error = staticmethod(format_error_with_stack_trace)
def dagster_graphql_subscription_view(subscription_server, context):
context = check.inst_param(context, "context", WorkspaceProcessContext)
def view(ws):
subscription_server.handle(ws, request_context=context)
return []
return view
def info_view():
return (
jsonify(
dagit_version=__version__,
dagster_graphql_version=dagster_graphql_version,
dagster_version=dagster_version,
),
200,
)
def notebook_view(request_args):
check.dict_param(request_args, "request_args")
path = request_args["path"]
if not path.endswith(".ipynb"):
return "Invalid Path", 400
with open(os.path.abspath(path)) as f:
read_data = f.read()
notebook = nbformat.reads(read_data, as_version=4)
html_exporter = HTMLExporter()
html_exporter.template_file = "basic"
(body, resources) = html_exporter.from_notebook_node(notebook)
return "<style>" + resources["inlining"]["css"][0] + "</style>" + body, 200
def download_log_view(context):
context = check.inst_param(context, "context", WorkspaceProcessContext)
def view(run_id, step_key, file_type):
run_id = str(uuid.UUID(run_id))
step_key = step_key.split("/")[-1]
out_name = f"{run_id}_{step_key}.{file_type}"
manager = context.instance.compute_log_manager
try:
io_type = ComputeIOType(file_type)
result = manager.get_local_path(run_id, step_key, io_type)
if not os.path.exists(result):
result = io.BytesIO()
timeout = None if manager.is_watch_completed(run_id, step_key) else 0
except ValueError:
result = io.BytesIO()
timeout = 0
if not result:
result = io.BytesIO()
return send_file(
result, as_attachment=True, attachment_filename=out_name, cache_timeout=timeout
)
return view
def download_dump_view(context):
context = check.inst_param(context, "context", WorkspaceProcessContext)
def view(run_id):
run = context.instance.get_run_by_id(run_id)
debug_payload = DebugRunPayload.build(context.instance, run)
check.invariant(run is not None)
out_name = f"{run_id}.gzip"
result = io.BytesIO()
with gzip.GzipFile(fileobj=result, mode="wb") as file:
debug_payload.write(file)
result.seek(0) # be kind, please rewind
return send_file(result, as_attachment=True, attachment_filename=out_name)
return view
def instantiate_app_with_views(
context, schema, app_path_prefix, target_dir=os.path.dirname(__file__)
):
app = Flask(
"dagster-ui",
static_url_path=app_path_prefix,
static_folder=os.path.join(target_dir, "./webapp/build"),
)
subscription_server = DagsterSubscriptionServer(schema=schema)
# Websocket routes
sockets = Sockets(app)
sockets.add_url_rule(
f"{app_path_prefix}/graphql",
"graphql",
dagster_graphql_subscription_view(subscription_server, context),
)
# HTTP routes
bp = Blueprint("routes", __name__, url_prefix=app_path_prefix)
bp.add_url_rule("/graphiql", "graphiql", lambda: redirect(f"{app_path_prefix}/graphql", 301))
bp.add_url_rule(
"/graphql",
"graphql",
DagsterGraphQLView.as_view(
"graphql",
schema=schema,
graphiql=True,
graphiql_template=PLAYGROUND_TEMPLATE,
executor=Executor(),
context=context,
),
)
bp.add_url_rule(
# should match the `build_local_download_url`
"/download/<string:run_id>/<string:step_key>/<string:file_type>",
"download_view",
download_log_view(context),
)
bp.add_url_rule(
"/download_debug/<string:run_id>",
"download_dump_view",
download_dump_view(context),
)
# these routes are specifically for the Dagit UI and are not part of the graphql
# API that we want other people to consume, so they're separate for now.
bp.add_url_rule("/dagit/notebook", "notebook", lambda: notebook_view(request.args))
bp.add_url_rule("/dagit_info", "sanity_view", info_view)
index_path = os.path.join(target_dir, "./webapp/build/index.html")
def index_view():
try:
with open(index_path) as f:
rendered_template = render_template_string(f.read())
return rendered_template.replace(
'src="/static', f'src="{app_path_prefix}/static'
).replace('href="/static', f'href="{app_path_prefix}/static')
except FileNotFoundError:
raise Exception(
"""Can't find webapp files. Probably webapp isn't built. If you are using
dagit, then probably it's a corrupted installation or a bug. However, if you are
developing dagit locally, your problem can be fixed as follows:
cd ./python_modules/
make rebuild_dagit"""
)
def error_redirect(_path):
return index_view()
bp.add_url_rule("/", "index_view", index_view)
bp.context_processor(lambda: {"app_path_prefix": app_path_prefix})
app.app_protocol = lambda environ_path_info: "graphql-ws"
app.register_blueprint(bp)
app.register_error_handler(404, error_redirect)
# if the user asked for a path prefix, handle the naked domain just in case they are not
# filtering inbound traffic elsewhere and redirect to the path prefix.
if app_path_prefix:
app.add_url_rule("/", "force-path-prefix", lambda: redirect(app_path_prefix, 301))
CORS(app)
return app
def create_app_from_workspace(
workspace: Workspace, instance: DagsterInstance, path_prefix: str = ""
):
check.inst_param(workspace, "workspace", Workspace)
check.inst_param(instance, "instance", DagsterInstance)
check.str_param(path_prefix, "path_prefix")
if path_prefix:
if not path_prefix.startswith("/"):
raise Exception(f'The path prefix should begin with a leading "/": got {path_prefix}')
if path_prefix.endswith("/"):
raise Exception(f'The path prefix should not include a trailing "/": got {path_prefix}')
warn_if_compute_logs_disabled()
print("Loading repository...") # pylint: disable=print-call
context = WorkspaceProcessContext(instance=instance, workspace=workspace, version=__version__)
log_workspace_stats(instance, context)
schema = create_schema()
return instantiate_app_with_views(context, schema, path_prefix)
| true | true |
f7389c2b6b455447ae8105af74392a88df24aa73 | 55 | py | Python | python/testData/refactoring/changeSignature/removeDefaultFromParam.after.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/refactoring/changeSignature/removeDefaultFromParam.after.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/refactoring/changeSignature/removeDefaultFromParam.after.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | def bar(a, b):
pass
bar(1, 3)
bar(1, b=3)
bar(1, 2) | 7.857143 | 14 | 0.509091 | def bar(a, b):
pass
bar(1, 3)
bar(1, b=3)
bar(1, 2) | true | true |
f7389c8aa5a728e71f63988b7eb4de5389e7d0f7 | 2,670 | py | Python | inst/python/torchtools/data_util.py | f0nzie/rTorch | 0010f675b7c086a03d0ead38d883f752cf159e80 | [
"MIT"
] | 79 | 2018-11-21T16:07:18.000Z | 2021-11-03T22:19:20.000Z | inst/python/torchtools/data_util.py | f0nzie/rTorch.old | 40292ecd8a9ac1af6a03247cbb5f7a3227d60e2f | [
"MIT"
] | 3 | 2020-04-13T00:39:26.000Z | 2020-08-30T01:51:17.000Z | inst/python/torchtools/data_util.py | f0nzie/rTorch.old | 40292ecd8a9ac1af6a03247cbb5f7a3227d60e2f | [
"MIT"
] | 10 | 2018-11-21T16:42:09.000Z | 2020-10-28T15:33:41.000Z | import gzip
import os
from os import path
import numpy as np
import sys
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as request
DATASET_DIR = 'datasets/'
MNIST_FILES = ["train-images-idx3-ubyte.gz", "train-labels-idx1-ubyte.gz",
"t10k-images-idx3-ubyte.gz", "t10k-labels-idx1-ubyte.gz"]
def download_file(url, local_path):
dir_path = path.dirname(local_path)
if not path.exists(dir_path):
print("Creating the directory '%s' ..." % dir_path)
os.makedirs(dir_path)
print("Downloading from '%s' ..." % url)
if sys.version_info.major < 3:
urllib.URLopener().retrieve(url, local_path)
else:
request.urlretrieve(url, local_path)
def download_mnist(local_path):
url_root = "http://yann.lecun.com/exdb/mnist/"
for f_name in MNIST_FILES:
f_path = os.path.join(local_path, f_name)
if not path.exists(f_path):
download_file(url_root + f_name, f_path)
def one_hot(x, n):
if type(x) == list:
x = np.array(x)
x = x.flatten()
o_h = np.zeros((len(x), n))
o_h[np.arange(len(x)), x] = 1
return o_h
def load_mnist(ntrain=60000, ntest=10000, onehot=True):
data_dir = os.path.join(DATASET_DIR, 'mnist_digits/')
if not path.exists(data_dir):
download_mnist(data_dir)
else:
# check all files
checks = [path.exists(os.path.join(data_dir, f)) for f in MNIST_FILES]
if not np.all(checks):
download_mnist(data_dir)
with gzip.open(os.path.join(data_dir, 'train-images-idx3-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
trX = loaded[16:].reshape((60000, 28 * 28)).astype(float)
with gzip.open(os.path.join(data_dir, 'train-labels-idx1-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
trY = loaded[8:].reshape((60000))
with gzip.open(os.path.join(data_dir, 't10k-images-idx3-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
teX = loaded[16:].reshape((10000, 28 * 28)).astype(float)
with gzip.open(os.path.join(data_dir, 't10k-labels-idx1-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
teY = loaded[8:].reshape((10000))
trX /= 255.
teX /= 255.
trX = trX[:ntrain]
trY = trY[:ntrain]
teX = teX[:ntest]
teY = teY[:ntest]
if onehot:
trY = one_hot(trY, 10)
teY = one_hot(teY, 10)
else:
trY = np.asarray(trY)
teY = np.asarray(teY)
return trX, teX, trY, teY
| 27.8125 | 79 | 0.613109 | import gzip
import os
from os import path
import numpy as np
import sys
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as request
DATASET_DIR = 'datasets/'
MNIST_FILES = ["train-images-idx3-ubyte.gz", "train-labels-idx1-ubyte.gz",
"t10k-images-idx3-ubyte.gz", "t10k-labels-idx1-ubyte.gz"]
def download_file(url, local_path):
dir_path = path.dirname(local_path)
if not path.exists(dir_path):
print("Creating the directory '%s' ..." % dir_path)
os.makedirs(dir_path)
print("Downloading from '%s' ..." % url)
if sys.version_info.major < 3:
urllib.URLopener().retrieve(url, local_path)
else:
request.urlretrieve(url, local_path)
def download_mnist(local_path):
url_root = "http://yann.lecun.com/exdb/mnist/"
for f_name in MNIST_FILES:
f_path = os.path.join(local_path, f_name)
if not path.exists(f_path):
download_file(url_root + f_name, f_path)
def one_hot(x, n):
if type(x) == list:
x = np.array(x)
x = x.flatten()
o_h = np.zeros((len(x), n))
o_h[np.arange(len(x)), x] = 1
return o_h
def load_mnist(ntrain=60000, ntest=10000, onehot=True):
data_dir = os.path.join(DATASET_DIR, 'mnist_digits/')
if not path.exists(data_dir):
download_mnist(data_dir)
else:
checks = [path.exists(os.path.join(data_dir, f)) for f in MNIST_FILES]
if not np.all(checks):
download_mnist(data_dir)
with gzip.open(os.path.join(data_dir, 'train-images-idx3-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
trX = loaded[16:].reshape((60000, 28 * 28)).astype(float)
with gzip.open(os.path.join(data_dir, 'train-labels-idx1-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
trY = loaded[8:].reshape((60000))
with gzip.open(os.path.join(data_dir, 't10k-images-idx3-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
teX = loaded[16:].reshape((10000, 28 * 28)).astype(float)
with gzip.open(os.path.join(data_dir, 't10k-labels-idx1-ubyte.gz')) as fd:
buf = fd.read()
loaded = np.frombuffer(buf, dtype=np.uint8)
teY = loaded[8:].reshape((10000))
trX /= 255.
teX /= 255.
trX = trX[:ntrain]
trY = trY[:ntrain]
teX = teX[:ntest]
teY = teY[:ntest]
if onehot:
trY = one_hot(trY, 10)
teY = one_hot(teY, 10)
else:
trY = np.asarray(trY)
teY = np.asarray(teY)
return trX, teX, trY, teY
| true | true |
f7389ca5b4a58075edda4abe42d5044cedeb2d5d | 270 | py | Python | embedding_utils/__init__.py | Johnny-Wish/fake-news-detection-pipeline | 3bdad59d680968375a23d72c80af7d6ef11d7711 | [
"Apache-2.0"
] | 52 | 2018-11-02T00:00:10.000Z | 2020-06-21T03:55:13.000Z | embedding_utils/__init__.py | Johnny-Wish/fake-news-detection-pipeline | 3bdad59d680968375a23d72c80af7d6ef11d7711 | [
"Apache-2.0"
] | null | null | null | embedding_utils/__init__.py | Johnny-Wish/fake-news-detection-pipeline | 3bdad59d680968375a23d72c80af7d6ef11d7711 | [
"Apache-2.0"
] | 8 | 2018-11-12T20:54:48.000Z | 2020-02-14T08:24:40.000Z | from .embedding_loader import EmbeddingLoader
from .embedding_visualizer import visualize_embeddings
from .embedding_getter import DocumentEmbedder, DocumentSequence
__all__ = [
EmbeddingLoader,
visualize_embeddings,
DocumentSequence,
DocumentEmbedder
] | 27 | 64 | 0.825926 | from .embedding_loader import EmbeddingLoader
from .embedding_visualizer import visualize_embeddings
from .embedding_getter import DocumentEmbedder, DocumentSequence
__all__ = [
EmbeddingLoader,
visualize_embeddings,
DocumentSequence,
DocumentEmbedder
] | true | true |
f7389d8989af34f20343a6ecea9a78bab8f4f000 | 109 | py | Python | tic_tac_toe/ai_agents/minimax_agent/__init__.py | shuvoxcd01/tictactoe | 6c949200734233e1a9ea5cd7280e2ce409f3eb0b | [
"Apache-2.0"
] | null | null | null | tic_tac_toe/ai_agents/minimax_agent/__init__.py | shuvoxcd01/tictactoe | 6c949200734233e1a9ea5cd7280e2ce409f3eb0b | [
"Apache-2.0"
] | null | null | null | tic_tac_toe/ai_agents/minimax_agent/__init__.py | shuvoxcd01/tictactoe | 6c949200734233e1a9ea5cd7280e2ce409f3eb0b | [
"Apache-2.0"
] | null | null | null | import os
memory_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "minimax_memory.json") | 36.333333 | 98 | 0.788991 | import os
memory_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "minimax_memory.json") | true | true |
f7389fb21329c82635c4439aa1232be460237d97 | 2,420 | py | Python | delivery_bots/bots/tgbot/checkout/payment/handlers.py | savilard/delivery-bots | 5385a08c5c02a886c3105413624424d6127b9363 | [
"MIT"
] | null | null | null | delivery_bots/bots/tgbot/checkout/payment/handlers.py | savilard/delivery-bots | 5385a08c5c02a886c3105413624424d6127b9363 | [
"MIT"
] | null | null | null | delivery_bots/bots/tgbot/checkout/payment/handlers.py | savilard/delivery-bots | 5385a08c5c02a886c3105413624424d6127b9363 | [
"MIT"
] | null | null | null | from aiogram import Dispatcher, types
from aiogram.dispatcher import FSMContext
from delivery_bots.bots.tgbot.settings import TgBotSettings
from delivery_bots.bots.tgbot.states import BotState
async def handle_payment(query: types.CallbackQuery, state: FSMContext):
"""Handle payment."""
current_state = await state.get_data()
payment_token = TgBotSettings().payment_token
delivery_total_amount = current_state['delivery_total_amount']
order_description = current_state['order_description']
order_total_amount = current_state['order_total_amount']
await query.message.bot.send_invoice(
query.from_user.id,
title='Оплата заказа',
description=order_description,
provider_token=payment_token,
currency='RUB',
is_flexible=False,
prices=[
types.LabeledPrice(label='Заказ', amount=order_total_amount),
types.LabeledPrice(label='Доставка', amount=delivery_total_amount),
],
start_parameter='create_invoice_pizza',
payload='pizza_order',
need_phone_number=True,
)
async def handle_pre_checkout(query: types.CallbackQuery, pre_checkout_query: types.PreCheckoutQuery):
"""Handle pre checkout."""
await query.message.bot.answer_pre_checkout_query(
pre_checkout_query.id,
ok=True,
error_message=(
'Инопланетяне пытались украсть вашу карту, но мы успешно защитили ваши учетные данные,'
+ 'попробуем заплатить еще раз через несколько минут, нам нужен небольшой отдых.'
),
)
async def handle_got_payment(message: types.Message):
"""Handle got payment."""
await message.bot.send_message(
message.chat.id,
(
'Hoooooray! Спасибо за оплату! Мы обработаем ваш заказ на `{} {}`'
+ ' быстро настолько, насколько это возможно! Оставайтесь на связи.'
).format(
message.successful_payment.total_amount / 100,
message.successful_payment.currency,
),
)
def register_payment_handler(dp: Dispatcher):
"""Register payment handler."""
dp.register_callback_query_handler(handle_payment, state=BotState.payment)
dp.pre_checkout_query_handler(handle_pre_checkout, lambda query: True, state=BotState.payment)
dp.message_handler(handle_got_payment, content_types=types.ContentTypes.SUCCESSFUL_PAYMENT, state=BotState.payment)
| 37.8125 | 119 | 0.709091 | from aiogram import Dispatcher, types
from aiogram.dispatcher import FSMContext
from delivery_bots.bots.tgbot.settings import TgBotSettings
from delivery_bots.bots.tgbot.states import BotState
async def handle_payment(query: types.CallbackQuery, state: FSMContext):
current_state = await state.get_data()
payment_token = TgBotSettings().payment_token
delivery_total_amount = current_state['delivery_total_amount']
order_description = current_state['order_description']
order_total_amount = current_state['order_total_amount']
await query.message.bot.send_invoice(
query.from_user.id,
title='Оплата заказа',
description=order_description,
provider_token=payment_token,
currency='RUB',
is_flexible=False,
prices=[
types.LabeledPrice(label='Заказ', amount=order_total_amount),
types.LabeledPrice(label='Доставка', amount=delivery_total_amount),
],
start_parameter='create_invoice_pizza',
payload='pizza_order',
need_phone_number=True,
)
async def handle_pre_checkout(query: types.CallbackQuery, pre_checkout_query: types.PreCheckoutQuery):
await query.message.bot.answer_pre_checkout_query(
pre_checkout_query.id,
ok=True,
error_message=(
'Инопланетяне пытались украсть вашу карту, но мы успешно защитили ваши учетные данные,'
+ 'попробуем заплатить еще раз через несколько минут, нам нужен небольшой отдых.'
),
)
async def handle_got_payment(message: types.Message):
await message.bot.send_message(
message.chat.id,
(
'Hoooooray! Спасибо за оплату! Мы обработаем ваш заказ на `{} {}`'
+ ' быстро настолько, насколько это возможно! Оставайтесь на связи.'
).format(
message.successful_payment.total_amount / 100,
message.successful_payment.currency,
),
)
def register_payment_handler(dp: Dispatcher):
dp.register_callback_query_handler(handle_payment, state=BotState.payment)
dp.pre_checkout_query_handler(handle_pre_checkout, lambda query: True, state=BotState.payment)
dp.message_handler(handle_got_payment, content_types=types.ContentTypes.SUCCESSFUL_PAYMENT, state=BotState.payment)
| true | true |
f738a075b3abb51ebd1c8c2f76ad6b5d279f6e02 | 759 | py | Python | app/users/forms.py | Nilsen11/django-training-CBV | b470fd1ea11df5d360389cbd24cac18c9383b8bd | [
"MIT"
] | null | null | null | app/users/forms.py | Nilsen11/django-training-CBV | b470fd1ea11df5d360389cbd24cac18c9383b8bd | [
"MIT"
] | null | null | null | app/users/forms.py | Nilsen11/django-training-CBV | b470fd1ea11df5d360389cbd24cac18c9383b8bd | [
"MIT"
] | null | null | null | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
class UserOurRegistration(UserCreationForm):
email = forms.EmailField(required=True)
class Meta:
model = User
fields = ['email', 'username', 'password1', 'password2']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email']
class ProfileImage(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileImage, self).__init__(*args, **kwargs)
self.fields['img'].label = "Изображение профиля"
class Meta:
model = Profile
fields = ['img']
| 23.71875 | 64 | 0.667984 | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
class UserOurRegistration(UserCreationForm):
email = forms.EmailField(required=True)
class Meta:
model = User
fields = ['email', 'username', 'password1', 'password2']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email']
class ProfileImage(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileImage, self).__init__(*args, **kwargs)
self.fields['img'].label = "Изображение профиля"
class Meta:
model = Profile
fields = ['img']
| true | true |
f738a0eb7d1f987ced124ebc931373226ca61e84 | 3,604 | py | Python | src/get_playlist_videos.py | manufactured/Youtube_Scraper_mod1 | df4293655227a43b309fd00f9479d437b12838e8 | [
"MIT"
] | null | null | null | src/get_playlist_videos.py | manufactured/Youtube_Scraper_mod1 | df4293655227a43b309fd00f9479d437b12838e8 | [
"MIT"
] | null | null | null | src/get_playlist_videos.py | manufactured/Youtube_Scraper_mod1 | df4293655227a43b309fd00f9479d437b12838e8 | [
"MIT"
] | null | null | null | import sqlite3
from datetime import timedelta
from get_video_stats import get_videos_stats
from get_channel_details import get_channel_details
def get_playlist_videos(youtube,playlistID,ec=False,ch_id=None):
ch_ID = 'skip'
conn = sqlite3.connect('youtube.db')
cur = conn.cursor()
videos = []
next_page_token = None
video_IDS = []
while 1:
res = youtube.playlistItems().list(part="snippet",
maxResults=50,
playlistId=playlistID,
pageToken=next_page_token
).execute()
videos += res['items']
next_page_token = res.get('nextPageToken')
if next_page_token is None:
break
for video in videos:
Video_id = video['snippet']['resourceId']['videoId']; video_IDS.append(Video_id)
try:
ch_ID = video['snippet']['channelId']
except:
ch_ID = 'skip'
if ec == True:
params = (Video_id,"",0,0,ch_id,None,None,0,ch_id,'',0,0,0,0,0,'',0,0,1,0)
cur.execute("INSERT OR IGNORE INTO tb_videos VALUES (?, ?, ?,? ,?, ?, ?, ?,?, ?,?,?,?,?,?,?,?,?,?,?)", params)
else:
params = (Video_id,"",0,0,"","","")
cur.execute("INSERT OR IGNORE INTO tb_videos VALUES (?, ?, ?,? ,?, ?, ?, 0,'', '',0,0,0,0,0,'',0,0,0,0)", params)
conn.commit() # Push the data into database
conn.close()
if ch_ID == 'skip':
conn = sqlite3.connect('youtube.db')
cur = conn.cursor()
cur.execute("SELECT Current_Video_Count FROM tb_playlists WHERE playlist_ID = ? ",(playlistID,))
num = cur.fetchone()
num=num[0]
print(num)
if num == 0:
cur.execute("UPDATE tb_playlists SET Is_Removed = ? WHERE playlist_ID = ? ",(1,playlistID))
conn.commit() # Push the data into database
conn.close()
return 0
else:
if ec == False:
get_channel_details(youtube,ch_ID,True,playlistID)
Playlist_Seconds,num_new = get_videos_stats(youtube,video_IDS,1,playlistID)
print('Videos in this playlist =',num_new)
Playlist_Duration = str(timedelta(seconds = Playlist_Seconds))
conn = sqlite3.connect('youtube.db')
cur = conn.cursor()
cur.execute("SELECT Current_Video_Count FROM tb_playlists WHERE playlist_ID = ? ",(playlistID,))
num = cur.fetchone()
num=num[0]
if num != num_new:
cur.execute("UPDATE tb_playlists SET Current_Video_Count = ? WHERE playlist_ID = ? ",(num_new,playlistID))
cur.execute("UPDATE tb_playlists SET Playlist_Seconds = ? WHERE playlist_ID = ? ",(Playlist_Seconds,playlistID))
cur.execute("UPDATE tb_playlists SET Playlist_Duration = ? WHERE playlist_ID = ? ",(Playlist_Duration,playlistID))
cur.execute("SELECT COUNT(Video_ID) FROM tb_videos WHERE Is_Deleted = ? AND playlist_ID = ? ",(1,playlistID))
num = cur.fetchone()
num=num[0]
cur.execute("UPDATE tb_playlists SET Deleted_Videos = ? WHERE playlist_ID = ? ",(num,playlistID))
conn.commit() # Push the data into database
conn.close()
if __name__ == "__main__":
pass | 42.4 | 133 | 0.536626 | import sqlite3
from datetime import timedelta
from get_video_stats import get_videos_stats
from get_channel_details import get_channel_details
def get_playlist_videos(youtube,playlistID,ec=False,ch_id=None):
ch_ID = 'skip'
conn = sqlite3.connect('youtube.db')
cur = conn.cursor()
videos = []
next_page_token = None
video_IDS = []
while 1:
res = youtube.playlistItems().list(part="snippet",
maxResults=50,
playlistId=playlistID,
pageToken=next_page_token
).execute()
videos += res['items']
next_page_token = res.get('nextPageToken')
if next_page_token is None:
break
for video in videos:
Video_id = video['snippet']['resourceId']['videoId']; video_IDS.append(Video_id)
try:
ch_ID = video['snippet']['channelId']
except:
ch_ID = 'skip'
if ec == True:
params = (Video_id,"",0,0,ch_id,None,None,0,ch_id,'',0,0,0,0,0,'',0,0,1,0)
cur.execute("INSERT OR IGNORE INTO tb_videos VALUES (?, ?, ?,? ,?, ?, ?, ?,?, ?,?,?,?,?,?,?,?,?,?,?)", params)
else:
params = (Video_id,"",0,0,"","","")
cur.execute("INSERT OR IGNORE INTO tb_videos VALUES (?, ?, ?,? ,?, ?, ?, 0,'', '',0,0,0,0,0,'',0,0,0,0)", params)
conn.commit()
conn.close()
if ch_ID == 'skip':
conn = sqlite3.connect('youtube.db')
cur = conn.cursor()
cur.execute("SELECT Current_Video_Count FROM tb_playlists WHERE playlist_ID = ? ",(playlistID,))
num = cur.fetchone()
num=num[0]
print(num)
if num == 0:
cur.execute("UPDATE tb_playlists SET Is_Removed = ? WHERE playlist_ID = ? ",(1,playlistID))
conn.commit()
conn.close()
return 0
else:
if ec == False:
get_channel_details(youtube,ch_ID,True,playlistID)
Playlist_Seconds,num_new = get_videos_stats(youtube,video_IDS,1,playlistID)
print('Videos in this playlist =',num_new)
Playlist_Duration = str(timedelta(seconds = Playlist_Seconds))
conn = sqlite3.connect('youtube.db')
cur = conn.cursor()
cur.execute("SELECT Current_Video_Count FROM tb_playlists WHERE playlist_ID = ? ",(playlistID,))
num = cur.fetchone()
num=num[0]
if num != num_new:
cur.execute("UPDATE tb_playlists SET Current_Video_Count = ? WHERE playlist_ID = ? ",(num_new,playlistID))
cur.execute("UPDATE tb_playlists SET Playlist_Seconds = ? WHERE playlist_ID = ? ",(Playlist_Seconds,playlistID))
cur.execute("UPDATE tb_playlists SET Playlist_Duration = ? WHERE playlist_ID = ? ",(Playlist_Duration,playlistID))
cur.execute("SELECT COUNT(Video_ID) FROM tb_videos WHERE Is_Deleted = ? AND playlist_ID = ? ",(1,playlistID))
num = cur.fetchone()
num=num[0]
cur.execute("UPDATE tb_playlists SET Deleted_Videos = ? WHERE playlist_ID = ? ",(num,playlistID))
conn.commit()
conn.close()
if __name__ == "__main__":
pass | true | true |
f738a3279aa6cfae687cdccf7c90a0d459f6e3ea | 1,410 | py | Python | actions/plugins/modules/real_facts.py | ccamacho/pystol-galaxy | 99a7281bbfaac436a58255c2ef31c695d3f49fc7 | [
"Apache-2.0"
] | null | null | null | actions/plugins/modules/real_facts.py | ccamacho/pystol-galaxy | 99a7281bbfaac436a58255c2ef31c695d3f49fc7 | [
"Apache-2.0"
] | null | null | null | actions/plugins/modules/real_facts.py | ccamacho/pystol-galaxy | 99a7281bbfaac436a58255c2ef31c695d3f49fc7 | [
"Apache-2.0"
] | null | null | null | import random
from ansible.module_utils.basic import AnsibleModule
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: real_facts
short_description: A module that dishes out the true facts.
version_added: "2.8"
description:
- "A module that dishes out the true facts."
options:
name:
default: Jane Doe
author:
- David Newswanger (@newswangerd)
'''
EXAMPLES = '''
# Pass in a message
- name: Test with a message
real_facts:
name: David Newswanger
'''
RETURN = '''
fact:
description: Actual facts
type: str
sample: Jane Doe is a smart cookie.
'''
FACTS = [
"{name} is looking great today!",
"{name} is a smart cookie.",
"I’d choose {name}'s company over pizza anytime."
]
def run_module():
# define available arguments/parameters a user can pass to the module
module_args = dict(
name=dict(type='str', default='Jane Doe'),
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
result = dict(
changed=False,
fact=''
)
result['fact'] = random.choice(FACTS).format(
name=module.params['name']
)
if module.check_mode:
return result
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
| 17.195122 | 73 | 0.629787 | import random
from ansible.module_utils.basic import AnsibleModule
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: real_facts
short_description: A module that dishes out the true facts.
version_added: "2.8"
description:
- "A module that dishes out the true facts."
options:
name:
default: Jane Doe
author:
- David Newswanger (@newswangerd)
'''
EXAMPLES = '''
# Pass in a message
- name: Test with a message
real_facts:
name: David Newswanger
'''
RETURN = '''
fact:
description: Actual facts
type: str
sample: Jane Doe is a smart cookie.
'''
FACTS = [
"{name} is looking great today!",
"{name} is a smart cookie.",
"I’d choose {name}'s company over pizza anytime."
]
def run_module():
# define available arguments/parameters a user can pass to the module
module_args = dict(
name=dict(type='str', default='Jane Doe'),
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
result = dict(
changed=False,
fact=''
)
result['fact'] = random.choice(FACTS).format(
name=module.params['name']
)
if module.check_mode:
return result
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
| true | true |
f738a3591efb2e9395f343c0d1e211c7ba0b3e16 | 745 | py | Python | kolibri/core/serializers.py | jonboiser/kolibri | 8ea2febc1739ac772007aae4084f0226dfb4ed40 | [
"MIT"
] | 1 | 2021-03-26T03:44:24.000Z | 2021-03-26T03:44:24.000Z | kolibri/core/serializers.py | jonboiser/kolibri | 8ea2febc1739ac772007aae4084f0226dfb4ed40 | [
"MIT"
] | 7 | 2016-06-23T16:01:02.000Z | 2018-12-01T22:15:13.000Z | kolibri/core/serializers.py | MingDai/kolibri | e4719b7d41a40e0cc9fc4150bc137017643fea62 | [
"MIT"
] | 1 | 2021-06-01T23:15:26.000Z | 2021-06-01T23:15:26.000Z | import pytz
from django.utils import timezone
from rest_framework.serializers import DateTimeField, ModelSerializer
from .fields import DateTimeTzField as DjangoDateTimeTzField
class DateTimeTzField(DateTimeField):
def to_internal_value(self, data):
data = super(DateTimeTzField, self).to_internal_value(data)
tz = timezone.get_current_timezone()
if not data.tzinfo:
data = timezone.make_aware(data, pytz.utc)
return data.astimezone(tz)
serializer_field_mapping = {
DjangoDateTimeTzField: DateTimeTzField,
}
serializer_field_mapping.update(ModelSerializer.serializer_field_mapping)
class KolibriModelSerializer(ModelSerializer):
serializer_field_mapping = serializer_field_mapping
| 28.653846 | 73 | 0.785235 | import pytz
from django.utils import timezone
from rest_framework.serializers import DateTimeField, ModelSerializer
from .fields import DateTimeTzField as DjangoDateTimeTzField
class DateTimeTzField(DateTimeField):
def to_internal_value(self, data):
data = super(DateTimeTzField, self).to_internal_value(data)
tz = timezone.get_current_timezone()
if not data.tzinfo:
data = timezone.make_aware(data, pytz.utc)
return data.astimezone(tz)
serializer_field_mapping = {
DjangoDateTimeTzField: DateTimeTzField,
}
serializer_field_mapping.update(ModelSerializer.serializer_field_mapping)
class KolibriModelSerializer(ModelSerializer):
serializer_field_mapping = serializer_field_mapping
| true | true |
f738a393c7d730893fc0783bc0c7e4039c508d50 | 8,334 | py | Python | src_py/hat/monitor/client.py | hrvojekeserica/hat-core | 759def68620cf4f8c11e7bbbdbfd1e701dbafb09 | [
"MIT"
] | null | null | null | src_py/hat/monitor/client.py | hrvojekeserica/hat-core | 759def68620cf4f8c11e7bbbdbfd1e701dbafb09 | [
"MIT"
] | null | null | null | src_py/hat/monitor/client.py | hrvojekeserica/hat-core | 759def68620cf4f8c11e7bbbdbfd1e701dbafb09 | [
"MIT"
] | null | null | null | """Library used by components for communication with Monitor Server
This module provides low-level interface (connect/Client) and high-level
interface (run_component) for communication with Monitor Server.
:func:`connect` is used for establishing single chatter based connection
with Monitor Server which is represented by :class:`Client`. Termination of
connection is signaled with :meth:`Client.closed`.
Example of low-level interface usage::
client = await hat.monitor.client.connect({
'name': 'client1',
'group': 'group1',
'monitor_address': 'tcp+sbs://127.0.0.1:23010',
'component_address': None})
assert client.info in client.components
try:
await client.closed
finally:
await client.async_close()
:func:`run_component` provide high-level interface for communication with
Monitor Server. This function first establishes connection to Monitor
Server and then listens component changes and in regard to blessing
and ready tokens calls or cancels `async_run_cb` callback.
In case blessing token matches ready token, `async_run_cb` is called.
While `async_run_cb` is running, once blessing token changes, `async_run_cb` is
canceled. If `async_run_cb` finishes or raises exception, this function closes
connection to monitor server and returns `async_run_cb` result. If connection
to monitor server is closed, this function raises exception.
Example of high-level interface usage::
async def monitor_async_run(monitor):
await asyncio.sleep(10)
return 13
res = await hat.monitor.client.run_component(
conf={'name': 'client',
'group': 'test clients',
'monitor_address': 'tcp+sbs://127.0.0.1:23010',
'component_address': None},
async_run_cb=monitor_async_run)
assert res == 13
Attributes:
mlog (logging.Logger): module logger
"""
import asyncio
import logging
from hat import chatter
from hat import util
from hat.monitor import common
from hat.util import aio
mlog = logging.getLogger(__name__)
async def connect(conf):
"""Connect to local monitor server
Connection is established once chatter communication is established.
Args:
conf (hat.json.Data): configuration as defined by
``hat://monitor/client.yaml#``
Returns:
Client
"""
client = Client()
client._name = conf['name']
client._group = conf['group']
client._address = conf['component_address']
client._components = []
client._info = None
client._ready = None
client._change_cbs = util.CallbackRegistry()
client._async_group = aio.Group()
client._conn = await chatter.connect(common.sbs_repo,
conf['monitor_address'])
client._async_group.spawn(aio.call_on_cancel, client._conn.async_close)
mlog.debug("connected to local monitor server %s", conf['monitor_address'])
client._async_group.spawn(client._receive_loop)
return client
class Client:
@property
def closed(self):
"""asyncio.Future: closed future"""
return self._async_group.closed
@property
def info(self):
"""Optional[common.ComponentInfo]: client's component info"""
return self._info
@property
def components(self):
"""List[common.ComponentInfo]: global component state"""
return self._components
def register_change_cb(self, cb):
"""Register change callback
Registered callback is called once info and/or components changes.
Args:
cb (Callable[[],None]): callback
Returns:
util.RegisterCallbackHandle
"""
return self._change_cbs.register(cb)
async def async_close(self):
"""Async close"""
await self._async_group.async_close()
def set_ready(self, token):
"""Set ready token
Args:
token (Optional[int]): ready token
"""
if token == self._ready:
return
self._ready = token
self._send_msg_client()
def _send_msg_client(self):
self._conn.send(chatter.Data(
module='HatMonitor',
type='MsgClient',
data=common.create_msg_client_sbs(
name=self._name,
group=self._group,
address=self._address,
ready=self._ready)))
def _set_components(self, msg_server):
if (msg_server.data.module != 'HatMonitor' or
msg_server.data.type != 'MsgServer'):
raise Exception('Message received from server malformed: message '
'MsgServer from HatMonitor module expected')
self._components = [common.component_info_from_sbs(i)
for i in msg_server.data.data['components']]
self._info = util.first(
self._components,
lambda i:
i.cid == msg_server.data.data['cid'] and
i.mid == msg_server.data.data['mid'])
self._change_cbs.notify()
async def _receive_loop(self):
try:
self._send_msg_client()
while True:
msg = await self._conn.receive()
self._set_components(msg)
except chatter.ConnectionClosedError:
mlog.debug('connection closed')
finally:
self._async_group.close()
async def run_component(conf, async_run_cb):
"""Run component
This method opens new connection to Monitor server and starts client's
loop which manages blessing/ready states.
When blessing token matches ready token, `async_run_cb` is called. While
`async_run_cb` is running, if blessing token changes, `async_run_cb` is
canceled.
If `async_run_cb` finishes or raises exception, this function closes
connection to monitor server and returns `async_run_cb` result. If
connection to monitor server is closed, this function raises exception.
TODO:
* provide opportunity for user to react to blessing token prior to
setting ready token (additional async_ready_cb)
Args:
conf (hat.json.Data): configuration as defined by
``hat://monitor/client.yaml#``
async_run_cb (Callable[[Client],None]): run callback
Returns:
Any
"""
client = await connect(conf)
try:
while True:
await _wait_until_blessed_and_ready(client)
async_group = aio.Group()
run_future = async_group.spawn(async_run_cb, client)
blessed_and_ready_future = async_group.spawn(
_wait_while_blessed_and_ready, client)
try:
done, _ = await asyncio.wait(
[run_future, blessed_and_ready_future, client.closed],
return_when=asyncio.FIRST_COMPLETED)
if run_future.done():
mlog.debug('async_run_cb finished or raised an exception')
return run_future.result()
if client.closed.done():
raise Exception('connection to monitor server closed!')
finally:
if not client.closed.done():
client.set_ready(None)
await async_group.async_close()
except asyncio.CancelledError:
raise
except Exception as e:
mlog.error('run component exception: %s', e, exc_info=e)
raise
finally:
await client.async_close()
mlog.debug('component closed')
async def _wait_until_blessed_and_ready(client):
queue = aio.Queue()
with client.register_change_cb(lambda: queue.put_nowait(None)):
while (client.info is None or client.info.blessing is None or
client.info.blessing != client.info.ready):
await queue.get_until_empty()
if client.info is None:
continue
client.set_ready(client.info.blessing)
async def _wait_while_blessed_and_ready(client):
queue = aio.Queue()
with client.register_change_cb(lambda: queue.put_nowait(None)):
while (client.info is not None and
client.info.blessing is not None and
client.info.blessing == client.info.ready):
await queue.get_until_empty()
| 32.682353 | 79 | 0.640509 |
import asyncio
import logging
from hat import chatter
from hat import util
from hat.monitor import common
from hat.util import aio
mlog = logging.getLogger(__name__)
async def connect(conf):
client = Client()
client._name = conf['name']
client._group = conf['group']
client._address = conf['component_address']
client._components = []
client._info = None
client._ready = None
client._change_cbs = util.CallbackRegistry()
client._async_group = aio.Group()
client._conn = await chatter.connect(common.sbs_repo,
conf['monitor_address'])
client._async_group.spawn(aio.call_on_cancel, client._conn.async_close)
mlog.debug("connected to local monitor server %s", conf['monitor_address'])
client._async_group.spawn(client._receive_loop)
return client
class Client:
@property
def closed(self):
return self._async_group.closed
@property
def info(self):
return self._info
@property
def components(self):
return self._components
def register_change_cb(self, cb):
return self._change_cbs.register(cb)
async def async_close(self):
await self._async_group.async_close()
def set_ready(self, token):
if token == self._ready:
return
self._ready = token
self._send_msg_client()
def _send_msg_client(self):
self._conn.send(chatter.Data(
module='HatMonitor',
type='MsgClient',
data=common.create_msg_client_sbs(
name=self._name,
group=self._group,
address=self._address,
ready=self._ready)))
def _set_components(self, msg_server):
if (msg_server.data.module != 'HatMonitor' or
msg_server.data.type != 'MsgServer'):
raise Exception('Message received from server malformed: message '
'MsgServer from HatMonitor module expected')
self._components = [common.component_info_from_sbs(i)
for i in msg_server.data.data['components']]
self._info = util.first(
self._components,
lambda i:
i.cid == msg_server.data.data['cid'] and
i.mid == msg_server.data.data['mid'])
self._change_cbs.notify()
async def _receive_loop(self):
try:
self._send_msg_client()
while True:
msg = await self._conn.receive()
self._set_components(msg)
except chatter.ConnectionClosedError:
mlog.debug('connection closed')
finally:
self._async_group.close()
async def run_component(conf, async_run_cb):
client = await connect(conf)
try:
while True:
await _wait_until_blessed_and_ready(client)
async_group = aio.Group()
run_future = async_group.spawn(async_run_cb, client)
blessed_and_ready_future = async_group.spawn(
_wait_while_blessed_and_ready, client)
try:
done, _ = await asyncio.wait(
[run_future, blessed_and_ready_future, client.closed],
return_when=asyncio.FIRST_COMPLETED)
if run_future.done():
mlog.debug('async_run_cb finished or raised an exception')
return run_future.result()
if client.closed.done():
raise Exception('connection to monitor server closed!')
finally:
if not client.closed.done():
client.set_ready(None)
await async_group.async_close()
except asyncio.CancelledError:
raise
except Exception as e:
mlog.error('run component exception: %s', e, exc_info=e)
raise
finally:
await client.async_close()
mlog.debug('component closed')
async def _wait_until_blessed_and_ready(client):
queue = aio.Queue()
with client.register_change_cb(lambda: queue.put_nowait(None)):
while (client.info is None or client.info.blessing is None or
client.info.blessing != client.info.ready):
await queue.get_until_empty()
if client.info is None:
continue
client.set_ready(client.info.blessing)
async def _wait_while_blessed_and_ready(client):
queue = aio.Queue()
with client.register_change_cb(lambda: queue.put_nowait(None)):
while (client.info is not None and
client.info.blessing is not None and
client.info.blessing == client.info.ready):
await queue.get_until_empty()
| true | true |
f738a4738731c19b10539bd6bcf377dc5b4c2ca9 | 6,260 | py | Python | sqlmodel/engine/create.py | WavesandCode/sqlmodel | ca4afa57e08ed895ce0823ec8c064e7bdd520576 | [
"MIT"
] | null | null | null | sqlmodel/engine/create.py | WavesandCode/sqlmodel | ca4afa57e08ed895ce0823ec8c064e7bdd520576 | [
"MIT"
] | null | null | null | sqlmodel/engine/create.py | WavesandCode/sqlmodel | ca4afa57e08ed895ce0823ec8c064e7bdd520576 | [
"MIT"
] | null | null | null | import json
import sqlite3
from typing import Any, Callable, Dict, List, Optional, Type, Union
from sqlalchemy.ext.asyncio import create_async_engine as _create_async_engine
from sqlalchemy.engine.url import URL
from sqlalchemy.future import Engine as _FutureEngine
from sqlalchemy.pool import Pool
from typing_extensions import Literal, TypedDict
from ..default import Default, _DefaultPlaceholder
# Types defined in sqlalchemy2-stubs, but can't be imported, so re-define here
_Debug = Literal["debug"]
_IsolationLevel = Literal[
"SERIALIZABLE",
"REPEATABLE READ",
"READ COMMITTED",
"READ UNCOMMITTED",
"AUTOCOMMIT",
]
_ParamStyle = Literal["qmark", "numeric", "named", "format", "pyformat"]
_ResetOnReturn = Literal["rollback", "commit"]
class _SQLiteConnectArgs(TypedDict, total=False):
timeout: float
detect_types: Any
isolation_level: Optional[Literal["DEFERRED", "IMMEDIATE", "EXCLUSIVE"]]
check_same_thread: bool
factory: Type[sqlite3.Connection]
cached_statements: int
uri: bool
_ConnectArgs = Union[_SQLiteConnectArgs, Dict[str, Any]]
# Re-define create_engine to have by default future=True, and assume that's what is used
# Also show the default values used for each parameter, but don't set them unless
# explicitly passed as arguments by the user to prevent errors. E.g. SQLite doesn't
# support pool connection arguments.
def create_async_engine(
url: Union[str, URL],
*,
connect_args: _ConnectArgs = Default({}), # type: ignore
echo: Union[bool, _Debug] = Default(False),
echo_pool: Union[bool, _Debug] = Default(False),
enable_from_linting: bool = Default(True),
encoding: str = Default("utf-8"),
execution_options: Dict[Any, Any] = Default({}),
future: bool = True,
hide_parameters: bool = Default(False),
implicit_returning: bool = Default(True),
isolation_level: Optional[_IsolationLevel] = Default(None),
json_deserializer: Callable[..., Any] = Default(json.loads),
json_serializer: Callable[..., Any] = Default(json.dumps),
label_length: Optional[int] = Default(None),
logging_name: Optional[str] = Default(None),
max_identifier_length: Optional[int] = Default(None),
max_overflow: int = Default(10),
module: Optional[Any] = Default(None),
paramstyle: Optional[_ParamStyle] = Default(None),
pool: Optional[Pool] = Default(None),
poolclass: Optional[Type[Pool]] = Default(None),
pool_logging_name: Optional[str] = Default(None),
pool_pre_ping: bool = Default(False),
pool_size: int = Default(5),
pool_recycle: int = Default(-1),
pool_reset_on_return: Optional[_ResetOnReturn] = Default("rollback"),
pool_timeout: float = Default(30),
pool_use_lifo: bool = Default(False),
plugins: Optional[List[str]] = Default(None),
query_cache_size: Optional[int] = Default(None),
**kwargs: Any,
) -> _FutureEngine:
current_kwargs: Dict[str, Any] = {
"future": future,
}
if not isinstance(echo, _DefaultPlaceholder):
current_kwargs["echo"] = echo
if not isinstance(echo_pool, _DefaultPlaceholder):
current_kwargs["echo_pool"] = echo_pool
if not isinstance(enable_from_linting, _DefaultPlaceholder):
current_kwargs["enable_from_linting"] = enable_from_linting
if not isinstance(connect_args, _DefaultPlaceholder):
current_kwargs["connect_args"] = connect_args
if not isinstance(encoding, _DefaultPlaceholder):
current_kwargs["encoding"] = encoding
if not isinstance(execution_options, _DefaultPlaceholder):
current_kwargs["execution_options"] = execution_options
if not isinstance(hide_parameters, _DefaultPlaceholder):
current_kwargs["hide_parameters"] = hide_parameters
if not isinstance(implicit_returning, _DefaultPlaceholder):
current_kwargs["implicit_returning"] = implicit_returning
if not isinstance(isolation_level, _DefaultPlaceholder):
current_kwargs["isolation_level"] = isolation_level
if not isinstance(json_deserializer, _DefaultPlaceholder):
current_kwargs["json_deserializer"] = json_deserializer
if not isinstance(json_serializer, _DefaultPlaceholder):
current_kwargs["json_serializer"] = json_serializer
if not isinstance(label_length, _DefaultPlaceholder):
current_kwargs["label_length"] = label_length
if not isinstance(logging_name, _DefaultPlaceholder):
current_kwargs["logging_name"] = logging_name
if not isinstance(max_identifier_length, _DefaultPlaceholder):
current_kwargs["max_identifier_length"] = max_identifier_length
if not isinstance(max_overflow, _DefaultPlaceholder):
current_kwargs["max_overflow"] = max_overflow
if not isinstance(module, _DefaultPlaceholder):
current_kwargs["module"] = module
if not isinstance(paramstyle, _DefaultPlaceholder):
current_kwargs["paramstyle"] = paramstyle
if not isinstance(pool, _DefaultPlaceholder):
current_kwargs["pool"] = pool
if not isinstance(poolclass, _DefaultPlaceholder):
current_kwargs["poolclass"] = poolclass
if not isinstance(pool_logging_name, _DefaultPlaceholder):
current_kwargs["pool_logging_name"] = pool_logging_name
if not isinstance(pool_pre_ping, _DefaultPlaceholder):
current_kwargs["pool_pre_ping"] = pool_pre_ping
if not isinstance(pool_size, _DefaultPlaceholder):
current_kwargs["pool_size"] = pool_size
if not isinstance(pool_recycle, _DefaultPlaceholder):
current_kwargs["pool_recycle"] = pool_recycle
if not isinstance(pool_reset_on_return, _DefaultPlaceholder):
current_kwargs["pool_reset_on_return"] = pool_reset_on_return
if not isinstance(pool_timeout, _DefaultPlaceholder):
current_kwargs["pool_timeout"] = pool_timeout
if not isinstance(pool_use_lifo, _DefaultPlaceholder):
current_kwargs["pool_use_lifo"] = pool_use_lifo
if not isinstance(plugins, _DefaultPlaceholder):
current_kwargs["plugins"] = plugins
if not isinstance(query_cache_size, _DefaultPlaceholder):
current_kwargs["query_cache_size"] = query_cache_size
current_kwargs.update(kwargs)
return _create_async_engine(url, **current_kwargs)
| 44.714286 | 88 | 0.735304 | import json
import sqlite3
from typing import Any, Callable, Dict, List, Optional, Type, Union
from sqlalchemy.ext.asyncio import create_async_engine as _create_async_engine
from sqlalchemy.engine.url import URL
from sqlalchemy.future import Engine as _FutureEngine
from sqlalchemy.pool import Pool
from typing_extensions import Literal, TypedDict
from ..default import Default, _DefaultPlaceholder
_Debug = Literal["debug"]
_IsolationLevel = Literal[
"SERIALIZABLE",
"REPEATABLE READ",
"READ COMMITTED",
"READ UNCOMMITTED",
"AUTOCOMMIT",
]
_ParamStyle = Literal["qmark", "numeric", "named", "format", "pyformat"]
_ResetOnReturn = Literal["rollback", "commit"]
class _SQLiteConnectArgs(TypedDict, total=False):
timeout: float
detect_types: Any
isolation_level: Optional[Literal["DEFERRED", "IMMEDIATE", "EXCLUSIVE"]]
check_same_thread: bool
factory: Type[sqlite3.Connection]
cached_statements: int
uri: bool
_ConnectArgs = Union[_SQLiteConnectArgs, Dict[str, Any]]
# Re-define create_engine to have by default future=True, and assume that's what is used
# explicitly passed as arguments by the user to prevent errors. E.g. SQLite doesn't
def create_async_engine(
url: Union[str, URL],
*,
connect_args: _ConnectArgs = Default({}),
echo: Union[bool, _Debug] = Default(False),
echo_pool: Union[bool, _Debug] = Default(False),
enable_from_linting: bool = Default(True),
encoding: str = Default("utf-8"),
execution_options: Dict[Any, Any] = Default({}),
future: bool = True,
hide_parameters: bool = Default(False),
implicit_returning: bool = Default(True),
isolation_level: Optional[_IsolationLevel] = Default(None),
json_deserializer: Callable[..., Any] = Default(json.loads),
json_serializer: Callable[..., Any] = Default(json.dumps),
label_length: Optional[int] = Default(None),
logging_name: Optional[str] = Default(None),
max_identifier_length: Optional[int] = Default(None),
max_overflow: int = Default(10),
module: Optional[Any] = Default(None),
paramstyle: Optional[_ParamStyle] = Default(None),
pool: Optional[Pool] = Default(None),
poolclass: Optional[Type[Pool]] = Default(None),
pool_logging_name: Optional[str] = Default(None),
pool_pre_ping: bool = Default(False),
pool_size: int = Default(5),
pool_recycle: int = Default(-1),
pool_reset_on_return: Optional[_ResetOnReturn] = Default("rollback"),
pool_timeout: float = Default(30),
pool_use_lifo: bool = Default(False),
plugins: Optional[List[str]] = Default(None),
query_cache_size: Optional[int] = Default(None),
**kwargs: Any,
) -> _FutureEngine:
current_kwargs: Dict[str, Any] = {
"future": future,
}
if not isinstance(echo, _DefaultPlaceholder):
current_kwargs["echo"] = echo
if not isinstance(echo_pool, _DefaultPlaceholder):
current_kwargs["echo_pool"] = echo_pool
if not isinstance(enable_from_linting, _DefaultPlaceholder):
current_kwargs["enable_from_linting"] = enable_from_linting
if not isinstance(connect_args, _DefaultPlaceholder):
current_kwargs["connect_args"] = connect_args
if not isinstance(encoding, _DefaultPlaceholder):
current_kwargs["encoding"] = encoding
if not isinstance(execution_options, _DefaultPlaceholder):
current_kwargs["execution_options"] = execution_options
if not isinstance(hide_parameters, _DefaultPlaceholder):
current_kwargs["hide_parameters"] = hide_parameters
if not isinstance(implicit_returning, _DefaultPlaceholder):
current_kwargs["implicit_returning"] = implicit_returning
if not isinstance(isolation_level, _DefaultPlaceholder):
current_kwargs["isolation_level"] = isolation_level
if not isinstance(json_deserializer, _DefaultPlaceholder):
current_kwargs["json_deserializer"] = json_deserializer
if not isinstance(json_serializer, _DefaultPlaceholder):
current_kwargs["json_serializer"] = json_serializer
if not isinstance(label_length, _DefaultPlaceholder):
current_kwargs["label_length"] = label_length
if not isinstance(logging_name, _DefaultPlaceholder):
current_kwargs["logging_name"] = logging_name
if not isinstance(max_identifier_length, _DefaultPlaceholder):
current_kwargs["max_identifier_length"] = max_identifier_length
if not isinstance(max_overflow, _DefaultPlaceholder):
current_kwargs["max_overflow"] = max_overflow
if not isinstance(module, _DefaultPlaceholder):
current_kwargs["module"] = module
if not isinstance(paramstyle, _DefaultPlaceholder):
current_kwargs["paramstyle"] = paramstyle
if not isinstance(pool, _DefaultPlaceholder):
current_kwargs["pool"] = pool
if not isinstance(poolclass, _DefaultPlaceholder):
current_kwargs["poolclass"] = poolclass
if not isinstance(pool_logging_name, _DefaultPlaceholder):
current_kwargs["pool_logging_name"] = pool_logging_name
if not isinstance(pool_pre_ping, _DefaultPlaceholder):
current_kwargs["pool_pre_ping"] = pool_pre_ping
if not isinstance(pool_size, _DefaultPlaceholder):
current_kwargs["pool_size"] = pool_size
if not isinstance(pool_recycle, _DefaultPlaceholder):
current_kwargs["pool_recycle"] = pool_recycle
if not isinstance(pool_reset_on_return, _DefaultPlaceholder):
current_kwargs["pool_reset_on_return"] = pool_reset_on_return
if not isinstance(pool_timeout, _DefaultPlaceholder):
current_kwargs["pool_timeout"] = pool_timeout
if not isinstance(pool_use_lifo, _DefaultPlaceholder):
current_kwargs["pool_use_lifo"] = pool_use_lifo
if not isinstance(plugins, _DefaultPlaceholder):
current_kwargs["plugins"] = plugins
if not isinstance(query_cache_size, _DefaultPlaceholder):
current_kwargs["query_cache_size"] = query_cache_size
current_kwargs.update(kwargs)
return _create_async_engine(url, **current_kwargs)
| true | true |
f738a6cb7be7d4b4fd71486e7095742443a648d5 | 2,457 | py | Python | WiredQT/examples/BySomeBody/DataGrid/SQLLITE/frmmain.py | chiptrontech/WiredQTv1.0 | 760948bb736867db4e772031b23ed9151e0364b9 | [
"MIT"
] | 1 | 2021-12-18T09:17:08.000Z | 2021-12-18T09:17:08.000Z | WiredQT/examples/BySomeBody/DataGrid/SQLLITE/frmmain.py | chiptrontech/WiredQTv1.0 | 760948bb736867db4e772031b23ed9151e0364b9 | [
"MIT"
] | null | null | null | WiredQT/examples/BySomeBody/DataGrid/SQLLITE/frmmain.py | chiptrontech/WiredQTv1.0 | 760948bb736867db4e772031b23ed9151e0364b9 | [
"MIT"
] | 2 | 2021-12-18T09:15:53.000Z | 2022-01-19T15:10:14.000Z | from PyQt5 import QtCore, QtWidgets
from PyQt5.QtGui import *
from wired_module import *
# Generated By WiredQT for Python: by Rocky Nuarin, 2021 Phils
class Handler(QtWidgets.QWidget,usercontrol):
def __init__(self, *param):
super(Handler, self).__init__(None)
initUI(self,param,w=400,h=400,title="WiredQTv5.0",controlbox=True,startpos=(0,30),timeoutdestroy=-1)
self.GTKForms()
self.timer=QtCore.QTimer()
self.timer.timeout.connect(self.loop)
self.timer.start(10)
self.sch=Scheduler(5000)#500 ms
self.sch.Start()
def createwidget(self,prop,control,parent,event=[]):
createWidget(self,prop,control,parent,event)
def GTKForms(self):
self.createwidget("{'dbtype': 'sqllite', 'Enable': 'True', 'Height': '173', 'Text': '', 'Name': 'DataGrid1', 'Visible': 'True', 'Left': '79', 'Width': '318', 'Var': '', 'Tag': 'Activex', 'Events': [[['clicked', 'self,arg1']]], 'Help': '', 'Picture': '', 'Font': '', 'ParentsType': '', 'Top': '49', 'ForeColor': '(0,0,0,1)', 'BackColor': '(1,1,1,0.25)'}",'DataGrid','usercontrol',"[['clicked', 'self,arg1']]")
self.createwidget("{'Enable': 'True', 'Height': '100', 'Text': 'QPushButton1', 'Name': 'QPushButton1', 'Visible': 'True', 'Left': '118', 'Width': '123', 'Var': '', 'Tag': '', 'Events': '[clicked]', 'Help': '', 'Picture': '', 'Font': '', 'ParentsType': '', 'Top': '261', 'ForeColor': '(0,0,0,1)', 'BackColor': '(1,1,1,0.25)'}",'QPushButton','usercontrol',"[['clicked', 'self,arg1']]")
def Widget(self):
return self
def loop(self):
if self.form_load==False:
self.form_load=True
if self.sch.Event():#timer routine
#code here
if self.timeoutdestroy!=-1:
self.timeoutdestroy-=1
if self.timeoutdestroy==0:
pass#self.unload(None)
self.sch.Start()#restart scheduler
def connect(self,ev,evusr):
self.wiredevents.update({ev:evusr})
def activeXcreated(self,*args):
pass
def eventFilter(self, obj, event):
return super(Handler, self).eventFilter(obj, event)
def QPushButton1_clicked(self,arg1):
import sqlite3
self.db = sqlite3.connect('database.db')
self.DataGrid1.DataSource(self.db,"select * from supplier limit 10")
pass
def DataGrid1_clicked(self,arg1):
print(arg1.row(),arg1.column(),arg1.data())
pass
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
w = Handler()
w.show()
sys.exit(app.exec_())
| 43.105263 | 432 | 0.634921 | from PyQt5 import QtCore, QtWidgets
from PyQt5.QtGui import *
from wired_module import *
class Handler(QtWidgets.QWidget,usercontrol):
def __init__(self, *param):
super(Handler, self).__init__(None)
initUI(self,param,w=400,h=400,title="WiredQTv5.0",controlbox=True,startpos=(0,30),timeoutdestroy=-1)
self.GTKForms()
self.timer=QtCore.QTimer()
self.timer.timeout.connect(self.loop)
self.timer.start(10)
self.sch=Scheduler(5000)
self.sch.Start()
def createwidget(self,prop,control,parent,event=[]):
createWidget(self,prop,control,parent,event)
def GTKForms(self):
self.createwidget("{'dbtype': 'sqllite', 'Enable': 'True', 'Height': '173', 'Text': '', 'Name': 'DataGrid1', 'Visible': 'True', 'Left': '79', 'Width': '318', 'Var': '', 'Tag': 'Activex', 'Events': [[['clicked', 'self,arg1']]], 'Help': '', 'Picture': '', 'Font': '', 'ParentsType': '', 'Top': '49', 'ForeColor': '(0,0,0,1)', 'BackColor': '(1,1,1,0.25)'}",'DataGrid','usercontrol',"[['clicked', 'self,arg1']]")
self.createwidget("{'Enable': 'True', 'Height': '100', 'Text': 'QPushButton1', 'Name': 'QPushButton1', 'Visible': 'True', 'Left': '118', 'Width': '123', 'Var': '', 'Tag': '', 'Events': '[clicked]', 'Help': '', 'Picture': '', 'Font': '', 'ParentsType': '', 'Top': '261', 'ForeColor': '(0,0,0,1)', 'BackColor': '(1,1,1,0.25)'}",'QPushButton','usercontrol',"[['clicked', 'self,arg1']]")
def Widget(self):
return self
def loop(self):
if self.form_load==False:
self.form_load=True
if self.sch.Event():
if self.timeoutdestroy!=-1:
self.timeoutdestroy-=1
if self.timeoutdestroy==0:
pass
self.sch.Start()
def connect(self,ev,evusr):
self.wiredevents.update({ev:evusr})
def activeXcreated(self,*args):
pass
def eventFilter(self, obj, event):
return super(Handler, self).eventFilter(obj, event)
def QPushButton1_clicked(self,arg1):
import sqlite3
self.db = sqlite3.connect('database.db')
self.DataGrid1.DataSource(self.db,"select * from supplier limit 10")
pass
def DataGrid1_clicked(self,arg1):
print(arg1.row(),arg1.column(),arg1.data())
pass
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
w = Handler()
w.show()
sys.exit(app.exec_())
| true | true |
f738a73e64ead48b126973cf863a8f0ebf981aa3 | 55,179 | py | Python | tests/auth_tests/test_views.py | PaulWay/django | 782d85b6dfa191e67c0f1d572641d8236c79174c | [
"PSF-2.0",
"BSD-3-Clause"
] | 1 | 2019-03-01T11:58:20.000Z | 2019-03-01T11:58:20.000Z | tests/auth_tests/test_views.py | PaulWay/django | 782d85b6dfa191e67c0f1d572641d8236c79174c | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | tests/auth_tests/test_views.py | PaulWay/django | 782d85b6dfa191e67c0f1d572641d8236c79174c | [
"PSF-2.0",
"BSD-3-Clause"
] | 1 | 2019-03-19T20:57:55.000Z | 2019-03-19T20:57:55.000Z | import datetime
import itertools
import os
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import (
BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY,
)
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN, LoginView, logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.db import connection
from django.http import HttpRequest
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser, UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[('en', 'English')],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password', email='testclient@example.com')
cls.u3 = User.objects.create_user(username='staff', password='password', email='staffmember@example.com')
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': 'staffmember@example.com'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertLogs('django.security.DisallowedHost', 'ERROR'):
response = self.client.post(
'/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertLogs('django.security.DisallowedHost', 'ERROR'):
response = self.client.post(
'/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# A nonexistent user returns a 200 response, not a 404.
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# A base36 user id that overflows int returns a 200 response.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
"""A POST with an invalid token is rejected."""
u = User.objects.get(email='staffmember@example.com')
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split('-')
path_parts[-1] = ("0") * 20 + '/'
path = '-'.join(path_parts)
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertIs(response.context['validlink'], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password) # password hasn't changed
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertRedirects(response, '/password_reset/done/', fetch_redirect_response=False)
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/', {'email': 'staffmember@example.com'})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/', {'email': 'staffmember@example.com'})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.AllowAllUsersModelBackend',
]
)
def test_confirm_login_post_reset_custom_backend(self):
# This backend is specified in the URL pattern.
backend = 'django.contrib.auth.backends.AllowAllUsersModelBackend'
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login_custom_backend/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login/')
self.login()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# The password_reset_confirm() view passes the user object to the
# SetPasswordForm``, even on GET requests (#16919). For this test,
# {{ form.user }}`` is rendered in the template
# registration/password_reset_confirm.html.
username = User.objects.get(email='staffmember@example.com').username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# Don't use PasswordResetConfirmClient (self.client) here which
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs['token']
uuidb64 = response.resolver_match.kwargs['uidb64']
self.assertRedirects(response, '/reset/%s/set-password/' % uuidb64)
self.assertEqual(client.session['_password_reset_token'], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip('/').split('/')
response = Client().get('/reset/%s/set-password/' % uuidb64)
self.assertContains(response, 'The password reset link was invalid')
@override_settings(AUTH_USER_MODEL='auth_tests.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = 'staffmember@example.com'
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email='staffmember@example.com',
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password('password')
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertRedirects(response, '/reset/done/')
@override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser')
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username='foo',
password='foo',
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
"""A uidb64 that decodes to a non-UUID doesn't crash."""
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode('INVALID_UUID'.encode())
first, _uuidb64_, second = path.strip('/').split('/')
response = self.client.get('/' + '/'.join((first, invalid_uidb64, second)) + '/')
self.assertContains(response, 'The password reset link was invalid')
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post('/login/', {
'username': 'testclient',
'password': 'password',
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False)
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertRedirects(response, '/login/?next=/password_change/done/', fetch_redirect_response=False)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
original_session_key = self.client.session.session_key
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, '/password_change/done/')
# The session key is rotated.
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if apps.is_installed('django.contrib.sites'):
Site = apps.get_model('sites.Site')
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertIsInstance(response.context['form'], AuthenticationForm)
def test_security_check(self):
login_url = reverse('login')
# These URLs should not pass the security check.
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': quote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': 'password',
})
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url)
# These URLs should pass the security check.
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/',
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': quote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': 'password',
})
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, '%s should be allowed' % good_url)
def test_security_check_https(self):
login_url = reverse('login')
non_https_next_url = 'http://testserver/path'
not_secured_url = '%(url)s?%(next)s=%(next_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'next_url': quote(non_https_next_url),
}
post_data = {
'username': 'testclient',
'password': 'password',
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
# The custom authentication form for this login requires a request to
# initialize it.
response = self.client.post('/custom_request_auth_login/', {
'username': 'testclient',
'password': 'password',
})
# The login was successful.
self.assertRedirects(response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False)
def test_login_csrf_rotate(self):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# The test client isn't used here as it's a test for middleware.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = LoginView.as_view()(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': 'password', 'csrfmiddlewaretoken': token1}
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = LoginView.as_view()(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username='staff')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username='testclient')
user.set_password('foobar')
user.save()
self.login(password='foobar')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username='testclient')
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url):
response = self.client.get('/login_required/')
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_lazy_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next='/else/where/')
expected = '/login/?next=/else/where/'
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next='/else/where/झ/')
expected = '/login/?next=/else/where/%E0%A4%9D/'
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
"""Tests for the logout_then_login view"""
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL='/login/')
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = 'GET'
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = 'GET'
req.session = self.client.session
response = logout_then_login(req, login_url='/custom/')
self.confirm_logged_out()
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = '/login/redirect_authenticated_user_default/'
do_redirect_url = '/login/redirect_authenticated_user/'
def test_default(self):
"""Stay on the login page by default."""
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['next'], '')
def test_guest(self):
"""If not logged in, stay on the same page."""
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
"""If logged in, go to default redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False)
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_redirect_url(self):
"""If logged in, go to custom redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_redirect_param(self):
"""If next is specified as a GET parameter, go there."""
self.login()
url = self.do_redirect_url + '?next=/custom_next/'
response = self.client.get(url)
self.assertRedirects(response, '/custom_next/', fetch_redirect_response=False)
def test_redirect_loop(self):
"""
Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set,
with and without custom parameters.
"""
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page"
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + '?bla=2'
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get('/permission_required_redirect/', follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get('/permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get('/login_and_permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(RedirectCycleError, 'Redirect loop detected.'):
self.client.get('/permission_required_redirect/', follow=True)
# exception raised.
response = self.client.get('/permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get('/login_and_permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://testserver/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, 'https://testserver/home', fetch_redirect_response=False)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://otherserver/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, 'https://otherserver/home', fetch_redirect_response=False)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://evil/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertIn('site', response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.get('/logout/')
self.assertIn('no-store', response['Cache-Control'])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
response = self.client.get('/logout/next_page/?next=/login/')
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://testserver/')
self.assertRedirects(response, 'https://testserver/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://otherserver/')
self.assertRedirects(response, 'https://otherserver/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://evil/')
self.assertRedirects(response, '/logout/allowed_hosts/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse('logout')
# These URLs should not pass the security check.
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': quote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/',
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': quote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, '%s should be allowed' % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse('logout')
non_https_next_url = 'http://testserver/'
url = '%(url)s?%(next)s=%(next_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'next_url': quote(non_https_next_url),
}
self.login()
response = self.client.get(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Language is preserved after logout."""
self.login()
self.client.post('/setlang/', {'language': 'pl'})
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl')
self.client.get('/logout/')
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl')
@override_settings(LOGOUT_REDIRECT_URL='/custom/')
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.get('/logout/')
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL='logout')
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.get('/logout/')
self.assertRedirects(response, '/logout/', fetch_redirect_response=False)
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(ROOT_URLCONF='auth_tests.urls_admin')
class ChangelistTests(AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
def setUp(self):
self.login()
# Get the latest last_login value.
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'):
response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
data
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed email.')
def test_user_not_change(self):
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
self.get_user_data(self.admin)
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'No fields changed.')
def test_user_change_password(self):
user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,))
password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
response.content.decode()
).groups()[0]
self.assertEqual(
os.path.normpath(user_change_url + rel_link),
os.path.normpath(password_change_url)
)
response = self.client.post(
password_change_url,
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed password.')
self.logout()
self.login(password='password1')
def test_user_change_different_user_password(self):
u = User.objects.get(email='staffmember@example.com')
response = self.client.post(
reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)),
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,)))
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
def test_password_change_bad_url(self):
response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',)))
self.assertEqual(response.status_code, 404)
@mock.patch('django.contrib.auth.admin.UserAdmin.has_change_permission')
def test_user_change_password_passes_user_to_has_change_permission(self, has_change_permission):
url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
self.client.post(url, {'password1': 'password1', 'password2': 'password1'})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username='testclient')
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, 'view_user'))
response = self.client.get(reverse('auth_test_admin:auth_user_change', args=(u.pk,)),)
algo, salt, hash_string = (u.password.split('$'))
self.assertContains(response, '<div class="readonly">testclient</div>')
# ReadOnlyPasswordHashWidget is used to render the field.
self.assertContains(
response,
'<strong>algorithm</strong>: %s\n\n'
'<strong>salt</strong>: %s**********\n\n'
'<strong>hash</strong>: %s**************************\n\n' % (
algo, salt[:2], hash_string[:6],
),
html=True,
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data['password'] = 'shouldnotchange'
change_url = reverse('auth_test_admin:auth_user_change', args=(u.pk,))
response = self.client.post(change_url, data)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL='auth_tests.UUIDUser',
ROOT_URLCONF='auth_tests.urls_custom_user_admin',
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(username='uuid', email='foo@bar.com', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,))
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,))
response = self.client.get(password_change_url)
self.assertEqual(response.status_code, 200)
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(password_change_url, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
| 42.974299 | 119 | 0.652911 | import datetime
import itertools
import os
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import (
BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY,
)
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN, LoginView, logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.db import connection
from django.http import HttpRequest
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser, UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[('en', 'English')],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password', email='testclient@example.com')
cls.u3 = User.objects.create_user(username='staff', password='password', email='staffmember@example.com')
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': 'staffmember@example.com'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
# is invoked, but we check here as a practical consequence.
with self.assertLogs('django.security.DisallowedHost', 'ERROR'):
response = self.client.post(
'/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
with self.assertLogs('django.security.DisallowedHost', 'ERROR'):
response = self.client.post(
'/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
u = User.objects.get(email='staffmember@example.com')
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split('-')
path_parts[-1] = ("0") * 20 + '/'
path = '-'.join(path_parts)
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertIs(response.context['validlink'], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password)
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertRedirects(response, '/password_reset/done/', fetch_redirect_response=False)
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/', {'email': 'staffmember@example.com'})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/', {'email': 'staffmember@example.com'})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.AllowAllUsersModelBackend',
]
)
def test_confirm_login_post_reset_custom_backend(self):
backend = 'django.contrib.auth.backends.AllowAllUsersModelBackend'
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login_custom_backend/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login/')
self.login()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
username = User.objects.get(email='staffmember@example.com').username
self.assertContains(response, "Hello, %s." % username)
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs['token']
uuidb64 = response.resolver_match.kwargs['uidb64']
self.assertRedirects(response, '/reset/%s/set-password/' % uuidb64)
self.assertEqual(client.session['_password_reset_token'], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip('/').split('/')
response = Client().get('/reset/%s/set-password/' % uuidb64)
self.assertContains(response, 'The password reset link was invalid')
@override_settings(AUTH_USER_MODEL='auth_tests.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = 'staffmember@example.com'
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email='staffmember@example.com',
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password('password')
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertRedirects(response, '/reset/done/')
@override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser')
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username='foo',
password='foo',
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode('INVALID_UUID'.encode())
first, _uuidb64_, second = path.strip('/').split('/')
response = self.client.get('/' + '/'.join((first, invalid_uidb64, second)) + '/')
self.assertContains(response, 'The password reset link was invalid')
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post('/login/', {
'username': 'testclient',
'password': 'password',
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False)
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertRedirects(response, '/login/?next=/password_change/done/', fetch_redirect_response=False)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
self.login()
original_session_key = self.client.session.session_key
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, '/password_change/done/')
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if apps.is_installed('django.contrib.sites'):
Site = apps.get_model('sites.Site')
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertIsInstance(response.context['form'], AuthenticationForm)
def test_security_check(self):
login_url = reverse('login')
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': quote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': 'password',
})
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url)
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/',
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': quote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': 'password',
})
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, '%s should be allowed' % good_url)
def test_security_check_https(self):
login_url = reverse('login')
non_https_next_url = 'http://testserver/path'
not_secured_url = '%(url)s?%(next)s=%(next_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'next_url': quote(non_https_next_url),
}
post_data = {
'username': 'testclient',
'password': 'password',
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
response = self.client.post('/custom_request_auth_login/', {
'username': 'testclient',
'password': 'password',
})
self.assertRedirects(response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False)
def test_login_csrf_rotate(self):
req = HttpRequest()
CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {})
get_token(req)
resp = LoginView.as_view()(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': 'password', 'csrfmiddlewaretoken': token1}
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {})
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
resp = LoginView.as_view()(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
self.login()
original_session_key = self.client.session.session_key
self.login(username='staff')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
self.login()
original_session_key = self.client.session.session_key
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username='testclient')
user.set_password('foobar')
user.save()
self.login(password='foobar')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
user = User.objects.get(username='testclient')
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginURLSettings(AuthViewsTestCase):
def assertLoginURLEquals(self, url):
response = self.client.get('/login_required/')
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_lazy_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
class LoginRedirectUrlTest(AuthViewsTestCase):
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
class RedirectToLoginTests(AuthViewsTestCase):
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next='/else/where/')
expected = '/login/?next=/else/where/'
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next='/else/where/झ/')
expected = '/login/?next=/else/where/%E0%A4%9D/'
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL='/login/')
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = 'GET'
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = 'GET'
req.session = self.client.session
response = logout_then_login(req, login_url='/custom/')
self.confirm_logged_out()
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = '/login/redirect_authenticated_user_default/'
do_redirect_url = '/login/redirect_authenticated_user/'
def test_default(self):
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['next'], '')
def test_guest(self):
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False)
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_redirect_url(self):
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_redirect_param(self):
self.login()
url = self.do_redirect_url + '?next=/custom_next/'
response = self.client.get(url)
self.assertRedirects(response, '/custom_next/', fetch_redirect_response=False)
def test_redirect_loop(self):
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page"
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + '?bla=2'
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get('/permission_required_redirect/', follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get('/permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get('/login_and_permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(RedirectCycleError, 'Redirect loop detected.'):
self.client.get('/permission_required_redirect/', follow=True)
# exception raised.
response = self.client.get('/permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get('/login_and_permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://testserver/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, 'https://testserver/home', fetch_redirect_response=False)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://otherserver/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, 'https://otherserver/home', fetch_redirect_response=False)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://evil/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertIn('site', response.context)
def test_logout_doesnt_cache(self):
response = self.client.get('/logout/')
self.assertIn('no-store', response['Cache-Control'])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
response = self.client.get('/logout/next_page/?next=/login/')
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
self.login()
response = self.client.get('/logout/next_page/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://testserver/')
self.assertRedirects(response, 'https://testserver/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://otherserver/')
self.assertRedirects(response, 'https://otherserver/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://evil/')
self.assertRedirects(response, '/logout/allowed_hosts/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse('logout')
# These URLs should not pass the security check.
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': quote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/',
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': quote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, '%s should be allowed' % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse('logout')
non_https_next_url = 'http://testserver/'
url = '%(url)s?%(next)s=%(next_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'next_url': quote(non_https_next_url),
}
self.login()
response = self.client.get(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
self.login()
self.client.post('/setlang/', {'language': 'pl'})
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl')
self.client.get('/logout/')
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl')
@override_settings(LOGOUT_REDIRECT_URL='/custom/')
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.get('/logout/')
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL='logout')
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.get('/logout/')
self.assertRedirects(response, '/logout/', fetch_redirect_response=False)
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (
@override_settings(ROOT_URLCONF='auth_tests.urls_admin')
class ChangelistTests(AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
def setUp(self):
self.login()
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
elist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'):
response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
data
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed email.')
def test_user_not_change(self):
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
self.get_user_data(self.admin)
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'No fields changed.')
def test_user_change_password(self):
user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,))
password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
response = self.client.get(user_change_url)
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
response.content.decode()
).groups()[0]
self.assertEqual(
os.path.normpath(user_change_url + rel_link),
os.path.normpath(password_change_url)
)
response = self.client.post(
password_change_url,
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed password.')
self.logout()
self.login(password='password1')
def test_user_change_different_user_password(self):
u = User.objects.get(email='staffmember@example.com')
response = self.client.post(
reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)),
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,)))
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
def test_password_change_bad_url(self):
response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',)))
self.assertEqual(response.status_code, 404)
@mock.patch('django.contrib.auth.admin.UserAdmin.has_change_permission')
def test_user_change_password_passes_user_to_has_change_permission(self, has_change_permission):
url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
self.client.post(url, {'password1': 'password1', 'password2': 'password1'})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username='testclient')
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, 'view_user'))
response = self.client.get(reverse('auth_test_admin:auth_user_change', args=(u.pk,)),)
algo, salt, hash_string = (u.password.split('$'))
self.assertContains(response, '<div class="readonly">testclient</div>')
# ReadOnlyPasswordHashWidget is used to render the field.
self.assertContains(
response,
'<strong>algorithm</strong>: %s\n\n'
'<strong>salt</strong>: %s**********\n\n'
'<strong>hash</strong>: %s**************************\n\n' % (
algo, salt[:2], hash_string[:6],
),
html=True,
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data['password'] = 'shouldnotchange'
change_url = reverse('auth_test_admin:auth_user_change', args=(u.pk,))
response = self.client.post(change_url, data)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL='auth_tests.UUIDUser',
ROOT_URLCONF='auth_tests.urls_custom_user_admin',
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(username='uuid', email='foo@bar.com', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,))
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,))
response = self.client.get(password_change_url)
self.assertEqual(response.status_code, 200)
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(password_change_url, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
| true | true |
f738a7c9d738f2e45ab035ef9b7e3a7f6537a9a5 | 1,620 | py | Python | tests/registration/warnings_test.py | martvanrijthoven/creationism | 510040dc4f3cd622c48006318e3d291a66f5335f | [
"MIT"
] | null | null | null | tests/registration/warnings_test.py | martvanrijthoven/creationism | 510040dc4f3cd622c48006318e3d291a66f5335f | [
"MIT"
] | 1 | 2021-07-29T18:20:54.000Z | 2021-07-29T18:20:54.000Z | tests/warnings_test.py | martvanrijthoven/strar | d58a78eabd758778b9320b08302da5ab46f3e0a8 | [
"MIT"
] | null | null | null | from pytest import warns
from strar.registration import Registrar
from strar.utils import Text, chain_functions
from strar.warnings import DuplicateRegistrantNameWarning
class TestWarnings:
def test_no_replace(self):
with warns(DuplicateRegistrantNameWarning):
class A(Registrar):
AUTO = False
CONVERT_NAME = lambda x: x
REPLACE = False
@A.register(("suba", "suba"))
class SubA(A):
pass
with warns(DuplicateRegistrantNameWarning):
class A(Registrar):
AUTO = True
CONVERT_NAME = lambda x: chain_functions(
x, Text.split_capitals_with_underscore, Text.lower
)
REPLACE = False
@A.register(("sub_a",))
class SubA(A):
pass
def test_replace(self):
with warns(None) as record:
class A(Registrar):
AUTO = False
CONVERT_NAME = lambda x: chain_functions(
x, Text.split_capitals_with_underscore, Text.lower
)
REPLACE = True
@A.register(("sub_a",))
class SubA(A):
pass
assert len(record) == 0
with warns(None) as record:
class A(Registrar):
AUTO = False
CONVERT_NAME = lambda x: x
REPLACE = True
@A.register(("suba", "suba"), replace=True)
class SubA(A):
pass
assert len(record) == 0
| 26.129032 | 70 | 0.509259 | from pytest import warns
from strar.registration import Registrar
from strar.utils import Text, chain_functions
from strar.warnings import DuplicateRegistrantNameWarning
class TestWarnings:
def test_no_replace(self):
with warns(DuplicateRegistrantNameWarning):
class A(Registrar):
AUTO = False
CONVERT_NAME = lambda x: x
REPLACE = False
@A.register(("suba", "suba"))
class SubA(A):
pass
with warns(DuplicateRegistrantNameWarning):
class A(Registrar):
AUTO = True
CONVERT_NAME = lambda x: chain_functions(
x, Text.split_capitals_with_underscore, Text.lower
)
REPLACE = False
@A.register(("sub_a",))
class SubA(A):
pass
def test_replace(self):
with warns(None) as record:
class A(Registrar):
AUTO = False
CONVERT_NAME = lambda x: chain_functions(
x, Text.split_capitals_with_underscore, Text.lower
)
REPLACE = True
@A.register(("sub_a",))
class SubA(A):
pass
assert len(record) == 0
with warns(None) as record:
class A(Registrar):
AUTO = False
CONVERT_NAME = lambda x: x
REPLACE = True
@A.register(("suba", "suba"), replace=True)
class SubA(A):
pass
assert len(record) == 0
| true | true |
f738a8260fe5c9038d920145eecc1199373d4207 | 1,292 | py | Python | backend-project/small_eod/cases/migrations/0012_auto_20200527_1127.py | WlodzimierzKorza/small_eod | 027022bd71122a949a2787d0fb86518df80e48cd | [
"MIT"
] | 64 | 2019-12-30T11:24:03.000Z | 2021-06-24T01:04:56.000Z | backend-project/small_eod/cases/migrations/0012_auto_20200527_1127.py | WlodzimierzKorza/small_eod | 027022bd71122a949a2787d0fb86518df80e48cd | [
"MIT"
] | 465 | 2018-06-13T21:43:43.000Z | 2022-01-04T23:33:56.000Z | backend-project/small_eod/cases/migrations/0012_auto_20200527_1127.py | WlodzimierzKorza/small_eod | 027022bd71122a949a2787d0fb86518df80e48cd | [
"MIT"
] | 72 | 2018-12-02T19:47:03.000Z | 2022-01-04T22:54:49.000Z | # Generated by Django 3.0.6 on 2020-05-27 11:27
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0011_auto_20200420_0717'),
]
operations = [
migrations.RemoveField(
model_name='case',
name='comments',
),
migrations.AddField(
model_name='case',
name='comment',
field=models.CharField(blank=True, help_text='Comment for this case.', max_length=256, verbose_name='Comment'),
),
migrations.AlterField(
model_name='case',
name='notified_users',
field=models.ManyToManyField(blank=True, help_text='Users who will receive notifications.', related_name='notified_about_case_set', to=settings.AUTH_USER_MODEL, verbose_name='Notified users'),
),
migrations.AlterField(
model_name='case',
name='responsible_users',
field=models.ManyToManyField(blank=True, help_text='Users who is responsible for this case.', related_name='responsible_for_case_set', to=settings.AUTH_USER_MODEL, verbose_name='Responsible users'),
),
]
| 36.914286 | 210 | 0.652477 |
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0011_auto_20200420_0717'),
]
operations = [
migrations.RemoveField(
model_name='case',
name='comments',
),
migrations.AddField(
model_name='case',
name='comment',
field=models.CharField(blank=True, help_text='Comment for this case.', max_length=256, verbose_name='Comment'),
),
migrations.AlterField(
model_name='case',
name='notified_users',
field=models.ManyToManyField(blank=True, help_text='Users who will receive notifications.', related_name='notified_about_case_set', to=settings.AUTH_USER_MODEL, verbose_name='Notified users'),
),
migrations.AlterField(
model_name='case',
name='responsible_users',
field=models.ManyToManyField(blank=True, help_text='Users who is responsible for this case.', related_name='responsible_for_case_set', to=settings.AUTH_USER_MODEL, verbose_name='Responsible users'),
),
]
| true | true |
f738a85b9d72c89ed626b92eb71969493f9198ba | 133 | py | Python | BackEnd/lib/Modules/joke.py | nekumelon/Jarvis | e3256d5f20c8d1bb6f2954b9ca300018e43768cd | [
"MIT"
] | null | null | null | BackEnd/lib/Modules/joke.py | nekumelon/Jarvis | e3256d5f20c8d1bb6f2954b9ca300018e43768cd | [
"MIT"
] | null | null | null | BackEnd/lib/Modules/joke.py | nekumelon/Jarvis | e3256d5f20c8d1bb6f2954b9ca300018e43768cd | [
"MIT"
] | null | null | null | import requests, json, pyjokes
def joke(textToSpeech, phrases, text, server):
joke = pyjokes.get_joke();
textToSpeech(joke); | 26.6 | 46 | 0.721805 | import requests, json, pyjokes
def joke(textToSpeech, phrases, text, server):
joke = pyjokes.get_joke();
textToSpeech(joke); | true | true |
f738a88472d70064fb2347b65d5b17a59db71134 | 2,988 | py | Python | main.py | lacygoill/inkscape-shortcut-manager | 3c2eacd9573a854c5c8fec449f524c6e680dcc68 | [
"MIT"
] | 1 | 2019-04-27T18:23:46.000Z | 2019-04-27T18:23:46.000Z | main.py | lacygoill/inkscape-shortcut-manager | 3c2eacd9573a854c5c8fec449f524c6e680dcc68 | [
"MIT"
] | null | null | null | main.py | lacygoill/inkscape-shortcut-manager | 3c2eacd9573a854c5c8fec449f524c6e680dcc68 | [
"MIT"
] | 1 | 2021-03-09T16:19:57.000Z | 2021-03-09T16:19:57.000Z | import threading
import Xlib
from Xlib.display import Display
from Xlib import X, XK
from Xlib.protocol import event
from normal import normal_mode
class Manager():
def __init__(self, inkscape_id):
self.id = inkscape_id
self.disp = Display()
self.screen = self.disp.screen()
self.root = self.screen.root
self.inkscape = self.disp.create_resource_object('window', inkscape_id)
self.mode = normal_mode
def event(self, name, detail, state):
return name(
time=X.CurrentTime,
root=self.root,
window=self.inkscape,
same_screen=0, child=Xlib.X.NONE,
root_x=0, root_y=0, event_x=0, event_y=0,
state=state,
detail=detail
)
def string_to_keycode(self, key):
keysym = XK.string_to_keysym(key)
keycode = self.disp.keysym_to_keycode(keysym)
return keycode
def press(self, key, mask=X.NONE):
keycode = self.string_to_keycode(key)
self.inkscape.send_event(self.event(event.KeyPress, keycode, mask), propagate=True)
self.inkscape.send_event(self.event(event.KeyRelease, keycode, mask), propagate=True)
self.disp.flush()
self.disp.sync()
def grab(self):
self.inkscape.grab_key(X.AnyKey, X.AnyModifier, True, X.GrabModeAsync, X.GrabModeAsync)
# Ungrab window manager shortcuts (Super + ...)
self.inkscape.ungrab_key(self.string_to_keycode('Super_L'), X.AnyModifier, True)
self.inkscape.change_attributes(event_mask=X.KeyReleaseMask | X.KeyPressMask | X.StructureNotifyMask)
def ungrab(self):
self.inkscape.ungrab_key(X.AnyKey, X.AnyModifier, True)
def listen(self):
self.grab()
while True:
evt = self.disp.next_event()
if evt.type in [X.KeyPress, X.KeyRelease]:
keycode = evt.detail
keysym = self.disp.keycode_to_keysym(keycode, 0)
char = XK.keysym_to_string(keysym)
self.disp.allow_events(X.ReplayKeyboard, X.CurrentTime)
self.mode(self, evt, char)
if evt.type == X.DestroyNotify:
if evt.window.id == self.id:
self.ungrab()
return
def create(inkscape_id):
m = Manager(inkscape_id)
m.listen()
def main():
disp = Display()
screen = disp.screen()
root = screen.root
root.change_attributes(event_mask=X.SubstructureNotifyMask)
while True:
evt = disp.next_event()
if evt.type == X.CreateNotify:
window = evt.window
try:
if window.get_wm_class() and window.get_wm_class()[0] == 'inkscape':
print('Listening!')
listen = threading.Thread(target=create, args=[window.id])
listen.start()
except Xlib.error.BadWindow:
pass
if __name__ == '__main__':
main()
| 32.478261 | 109 | 0.602744 | import threading
import Xlib
from Xlib.display import Display
from Xlib import X, XK
from Xlib.protocol import event
from normal import normal_mode
class Manager():
def __init__(self, inkscape_id):
self.id = inkscape_id
self.disp = Display()
self.screen = self.disp.screen()
self.root = self.screen.root
self.inkscape = self.disp.create_resource_object('window', inkscape_id)
self.mode = normal_mode
def event(self, name, detail, state):
return name(
time=X.CurrentTime,
root=self.root,
window=self.inkscape,
same_screen=0, child=Xlib.X.NONE,
root_x=0, root_y=0, event_x=0, event_y=0,
state=state,
detail=detail
)
def string_to_keycode(self, key):
keysym = XK.string_to_keysym(key)
keycode = self.disp.keysym_to_keycode(keysym)
return keycode
def press(self, key, mask=X.NONE):
keycode = self.string_to_keycode(key)
self.inkscape.send_event(self.event(event.KeyPress, keycode, mask), propagate=True)
self.inkscape.send_event(self.event(event.KeyRelease, keycode, mask), propagate=True)
self.disp.flush()
self.disp.sync()
def grab(self):
self.inkscape.grab_key(X.AnyKey, X.AnyModifier, True, X.GrabModeAsync, X.GrabModeAsync)
self.inkscape.ungrab_key(self.string_to_keycode('Super_L'), X.AnyModifier, True)
self.inkscape.change_attributes(event_mask=X.KeyReleaseMask | X.KeyPressMask | X.StructureNotifyMask)
def ungrab(self):
self.inkscape.ungrab_key(X.AnyKey, X.AnyModifier, True)
def listen(self):
self.grab()
while True:
evt = self.disp.next_event()
if evt.type in [X.KeyPress, X.KeyRelease]:
keycode = evt.detail
keysym = self.disp.keycode_to_keysym(keycode, 0)
char = XK.keysym_to_string(keysym)
self.disp.allow_events(X.ReplayKeyboard, X.CurrentTime)
self.mode(self, evt, char)
if evt.type == X.DestroyNotify:
if evt.window.id == self.id:
self.ungrab()
return
def create(inkscape_id):
m = Manager(inkscape_id)
m.listen()
def main():
disp = Display()
screen = disp.screen()
root = screen.root
root.change_attributes(event_mask=X.SubstructureNotifyMask)
while True:
evt = disp.next_event()
if evt.type == X.CreateNotify:
window = evt.window
try:
if window.get_wm_class() and window.get_wm_class()[0] == 'inkscape':
print('Listening!')
listen = threading.Thread(target=create, args=[window.id])
listen.start()
except Xlib.error.BadWindow:
pass
if __name__ == '__main__':
main()
| true | true |
f738a8e813dd9ace1fbe232f9e27b602958b8c38 | 4,062 | py | Python | virtual/lib/python3.10/site-packages/bootstrap_py/commands.py | alex-mu/Moringa-blog | 430ab9c1f43f2f0066369433ac3f60c41a51a01c | [
"MIT"
] | null | null | null | virtual/lib/python3.10/site-packages/bootstrap_py/commands.py | alex-mu/Moringa-blog | 430ab9c1f43f2f0066369433ac3f60c41a51a01c | [
"MIT"
] | 7 | 2021-03-30T14:10:56.000Z | 2022-03-12T00:43:13.000Z | virtual/lib/python3.6/site-packages/bootstrap_py/commands.py | sarahsindet/pitch | c7a4256e19c9a250b6d88d085699a34f508eb86b | [
"Unlicense",
"MIT"
] | 1 | 2021-08-19T06:07:23.000Z | 2021-08-19T06:07:23.000Z | # -*- coding: utf-8 -*-
"""bootstrap_py.commands."""
import os
import sys
import re
import argparse
from bootstrap_py import control, __prog__, __version__
from bootstrap_py.update import Update
from bootstrap_py.exceptions import BackendFailure, Conflict
def setoption(parser, metadata=None):
"""Set argument parser option."""
parser.add_argument('-v', action='version',
version=__version__)
subparsers = parser.add_subparsers(help='sub commands help')
create_cmd = subparsers.add_parser('create')
create_cmd.add_argument('name',
help='Specify Python package name.')
create_cmd.add_argument('-d', dest='description', action='store',
help='Short description about your package.')
create_cmd.add_argument('-a', dest='author', action='store',
required=True,
help='Python package author name.')
create_cmd.add_argument('-e', dest='email', action='store',
required=True,
help='Python package author email address.')
create_cmd.add_argument('-l', dest='license',
choices=metadata.licenses().keys(),
default='GPLv3+',
help='Specify license. (default: %(default)s)')
create_cmd.add_argument('-s', dest='status',
choices=metadata.status().keys(),
default='Alpha',
help=('Specify development status. '
'(default: %(default)s)'))
create_cmd.add_argument('--no-check', action='store_true',
help='No checking package name in PyPI.')
create_cmd.add_argument('--with-samples', action='store_true',
help='Generate package with sample code.')
group = create_cmd.add_mutually_exclusive_group(required=True)
group.add_argument('-U', dest='username', action='store',
help='Specify GitHub username.')
group.add_argument('-u', dest='url', action='store', type=valid_url,
help='Python package homepage url.')
create_cmd.add_argument('-o', dest='outdir', action='store',
default=os.path.abspath(os.path.curdir),
help='Specify output directory. (default: $PWD)')
list_cmd = subparsers.add_parser('list')
list_cmd.add_argument('-l', dest='licenses', action='store_true',
help='show license choices.')
def valid_url(url):
"""Validate url.
:rtype: str
:return: url
:param str url: package homepage url.
"""
regex = re.compile(
r'^(?:http)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'
r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))'
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
if not regex.match(url):
raise argparse.ArgumentTypeError('"{0}" is invalid url.'.format(url))
return url
def parse_options(metadata):
"""Parse argument options."""
parser = argparse.ArgumentParser(description='%(prog)s usage:',
prog=__prog__)
setoption(parser, metadata=metadata)
return parser
def main():
"""Execute main processes."""
try:
pkg_version = Update()
if pkg_version.updatable():
pkg_version.show_message()
metadata = control.retreive_metadata()
parser = parse_options(metadata)
argvs = sys.argv
if len(argvs) <= 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
control.print_licences(args, metadata)
control.check_repository_existence(args)
control.check_package_existence(args)
control.generate_package(args)
except (RuntimeError, BackendFailure, Conflict) as exc:
sys.stderr.write('{0}\n'.format(exc))
sys.exit(1)
if __name__ == '__main__':
main()
| 39.057692 | 77 | 0.566962 |
import os
import sys
import re
import argparse
from bootstrap_py import control, __prog__, __version__
from bootstrap_py.update import Update
from bootstrap_py.exceptions import BackendFailure, Conflict
def setoption(parser, metadata=None):
parser.add_argument('-v', action='version',
version=__version__)
subparsers = parser.add_subparsers(help='sub commands help')
create_cmd = subparsers.add_parser('create')
create_cmd.add_argument('name',
help='Specify Python package name.')
create_cmd.add_argument('-d', dest='description', action='store',
help='Short description about your package.')
create_cmd.add_argument('-a', dest='author', action='store',
required=True,
help='Python package author name.')
create_cmd.add_argument('-e', dest='email', action='store',
required=True,
help='Python package author email address.')
create_cmd.add_argument('-l', dest='license',
choices=metadata.licenses().keys(),
default='GPLv3+',
help='Specify license. (default: %(default)s)')
create_cmd.add_argument('-s', dest='status',
choices=metadata.status().keys(),
default='Alpha',
help=('Specify development status. '
'(default: %(default)s)'))
create_cmd.add_argument('--no-check', action='store_true',
help='No checking package name in PyPI.')
create_cmd.add_argument('--with-samples', action='store_true',
help='Generate package with sample code.')
group = create_cmd.add_mutually_exclusive_group(required=True)
group.add_argument('-U', dest='username', action='store',
help='Specify GitHub username.')
group.add_argument('-u', dest='url', action='store', type=valid_url,
help='Python package homepage url.')
create_cmd.add_argument('-o', dest='outdir', action='store',
default=os.path.abspath(os.path.curdir),
help='Specify output directory. (default: $PWD)')
list_cmd = subparsers.add_parser('list')
list_cmd.add_argument('-l', dest='licenses', action='store_true',
help='show license choices.')
def valid_url(url):
regex = re.compile(
r'^(?:http)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+'
r'(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))'
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
if not regex.match(url):
raise argparse.ArgumentTypeError('"{0}" is invalid url.'.format(url))
return url
def parse_options(metadata):
parser = argparse.ArgumentParser(description='%(prog)s usage:',
prog=__prog__)
setoption(parser, metadata=metadata)
return parser
def main():
try:
pkg_version = Update()
if pkg_version.updatable():
pkg_version.show_message()
metadata = control.retreive_metadata()
parser = parse_options(metadata)
argvs = sys.argv
if len(argvs) <= 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
control.print_licences(args, metadata)
control.check_repository_existence(args)
control.check_package_existence(args)
control.generate_package(args)
except (RuntimeError, BackendFailure, Conflict) as exc:
sys.stderr.write('{0}\n'.format(exc))
sys.exit(1)
if __name__ == '__main__':
main()
| true | true |
f738a9f000115ed33e65863504a8d657cbba9d95 | 128,196 | py | Python | lib/python2.7/site-packages/numpy/f2py/crackfortran.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
] | 2 | 2020-11-01T13:22:11.000Z | 2020-11-01T13:22:20.000Z | lib/python2.7/site-packages/numpy/f2py/crackfortran.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
] | 12 | 2020-07-11T01:42:51.000Z | 2020-08-12T17:17:35.000Z | lib/python2.7/site-packages/numpy/f2py/crackfortran.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
crackfortran --- read fortran (77,90) code and extract declaration information.
Copyright 1999-2004 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/09/27 07:13:49 $
Pearu Peterson
Usage of crackfortran:
======================
Command line keys: -quiet,-verbose,-fix,-f77,-f90,-show,-h <pyffilename>
-m <module name for f77 routines>,--ignore-contains
Functions: crackfortran, crack2fortran
The following Fortran statements/constructions are supported
(or will be if needed):
block data,byte,call,character,common,complex,contains,data,
dimension,double complex,double precision,end,external,function,
implicit,integer,intent,interface,intrinsic,
logical,module,optional,parameter,private,public,
program,real,(sequence?),subroutine,type,use,virtual,
include,pythonmodule
Note: 'virtual' is mapped to 'dimension'.
Note: 'implicit integer (z) static (z)' is 'implicit static (z)' (this is minor bug).
Note: code after 'contains' will be ignored until its scope ends.
Note: 'common' statement is extended: dimensions are moved to variable definitions
Note: f2py directive: <commentchar>f2py<line> is read as <line>
Note: pythonmodule is introduced to represent Python module
Usage:
`postlist=crackfortran(files,funcs)`
`postlist` contains declaration information read from the list of files `files`.
`crack2fortran(postlist)` returns a fortran code to be saved to pyf-file
`postlist` has the following structure:
*** it is a list of dictionaries containing `blocks':
B = {'block','body','vars','parent_block'[,'name','prefix','args','result',
'implicit','externals','interfaced','common','sortvars',
'commonvars','note']}
B['block'] = 'interface' | 'function' | 'subroutine' | 'module' |
'program' | 'block data' | 'type' | 'pythonmodule'
B['body'] --- list containing `subblocks' with the same structure as `blocks'
B['parent_block'] --- dictionary of a parent block:
C['body'][<index>]['parent_block'] is C
B['vars'] --- dictionary of variable definitions
B['sortvars'] --- dictionary of variable definitions sorted by dependence (independent first)
B['name'] --- name of the block (not if B['block']=='interface')
B['prefix'] --- prefix string (only if B['block']=='function')
B['args'] --- list of argument names if B['block']== 'function' | 'subroutine'
B['result'] --- name of the return value (only if B['block']=='function')
B['implicit'] --- dictionary {'a':<variable definition>,'b':...} | None
B['externals'] --- list of variables being external
B['interfaced'] --- list of variables being external and defined
B['common'] --- dictionary of common blocks (list of objects)
B['commonvars'] --- list of variables used in common blocks (dimensions are moved to variable definitions)
B['from'] --- string showing the 'parents' of the current block
B['use'] --- dictionary of modules used in current block:
{<modulename>:{['only':<0|1>],['map':{<local_name1>:<use_name1>,...}]}}
B['note'] --- list of LaTeX comments on the block
B['f2pyenhancements'] --- optional dictionary
{'threadsafe':'','fortranname':<name>,
'callstatement':<C-expr>|<multi-line block>,
'callprotoargument':<C-expr-list>,
'usercode':<multi-line block>|<list of multi-line blocks>,
'pymethoddef:<multi-line block>'
}
B['entry'] --- dictionary {entryname:argslist,..}
B['varnames'] --- list of variable names given in the order of reading the
Fortran code, useful for derived types.
B['saved_interface'] --- a string of scanned routine signature, defines explicit interface
*** Variable definition is a dictionary
D = B['vars'][<variable name>] =
{'typespec'[,'attrspec','kindselector','charselector','=','typename']}
D['typespec'] = 'byte' | 'character' | 'complex' | 'double complex' |
'double precision' | 'integer' | 'logical' | 'real' | 'type'
D['attrspec'] --- list of attributes (e.g. 'dimension(<arrayspec>)',
'external','intent(in|out|inout|hide|c|callback|cache|aligned4|aligned8|aligned16)',
'optional','required', etc)
K = D['kindselector'] = {['*','kind']} (only if D['typespec'] =
'complex' | 'integer' | 'logical' | 'real' )
C = D['charselector'] = {['*','len','kind']}
(only if D['typespec']=='character')
D['='] --- initialization expression string
D['typename'] --- name of the type if D['typespec']=='type'
D['dimension'] --- list of dimension bounds
D['intent'] --- list of intent specifications
D['depend'] --- list of variable names on which current variable depends on
D['check'] --- list of C-expressions; if C-expr returns zero, exception is raised
D['note'] --- list of LaTeX comments on the variable
*** Meaning of kind/char selectors (few examples):
D['typespec>']*K['*']
D['typespec'](kind=K['kind'])
character*C['*']
character(len=C['len'],kind=C['kind'])
(see also fortran type declaration statement formats below)
Fortran 90 type declaration statement format (F77 is subset of F90)
====================================================================
(Main source: IBM XL Fortran 5.1 Language Reference Manual)
type declaration = <typespec> [[<attrspec>]::] <entitydecl>
<typespec> = byte |
character[<charselector>] |
complex[<kindselector>] |
double complex |
double precision |
integer[<kindselector>] |
logical[<kindselector>] |
real[<kindselector>] |
type(<typename>)
<charselector> = * <charlen> |
([len=]<len>[,[kind=]<kind>]) |
(kind=<kind>[,len=<len>])
<kindselector> = * <intlen> |
([kind=]<kind>)
<attrspec> = comma separated list of attributes.
Only the following attributes are used in
building up the interface:
external
(parameter --- affects '=' key)
optional
intent
Other attributes are ignored.
<intentspec> = in | out | inout
<arrayspec> = comma separated list of dimension bounds.
<entitydecl> = <name> [[*<charlen>][(<arrayspec>)] | [(<arrayspec>)]*<charlen>]
[/<init_expr>/ | =<init_expr>] [,<entitydecl>]
In addition, the following attributes are used: check,depend,note
TODO:
* Apply 'parameter' attribute (e.g. 'integer parameter :: i=2' 'real x(i)'
-> 'real x(2)')
The above may be solved by creating appropriate preprocessor program, for example.
"""
from __future__ import division, absolute_import, print_function
import sys
import string
import fileinput
import re
import os
import copy
import platform
from . import __version__
# The eviroment provided by auxfuncs.py is needed for some calls to eval.
# As the needed functions cannot be determined by static inspection of the
# code, it is safest to use import * pending a major refactoring of f2py.
from .auxfuncs import *
f2py_version = __version__.version
# Global flags:
strictf77 = 1 # Ignore `!' comments unless line[0]=='!'
sourcecodeform = 'fix' # 'fix','free'
quiet = 0 # Be verbose if 0 (Obsolete: not used any more)
verbose = 1 # Be quiet if 0, extra verbose if > 1.
tabchar = 4 * ' '
pyffilename = ''
f77modulename = ''
skipemptyends = 0 # for old F77 programs without 'program' statement
ignorecontains = 1
dolowercase = 1
debug = []
# Global variables
beginpattern = ''
currentfilename = ''
expectbegin = 1
f90modulevars = {}
filepositiontext = ''
gotnextfile = 1
groupcache = None
groupcounter = 0
grouplist = {groupcounter: []}
groupname = ''
include_paths = []
neededmodule = -1
onlyfuncs = []
previous_context = None
skipblocksuntil = -1
skipfuncs = []
skipfunctions = []
usermodules = []
def reset_global_f2py_vars():
global groupcounter, grouplist, neededmodule, expectbegin
global skipblocksuntil, usermodules, f90modulevars, gotnextfile
global filepositiontext, currentfilename, skipfunctions, skipfuncs
global onlyfuncs, include_paths, previous_context
global strictf77, sourcecodeform, quiet, verbose, tabchar, pyffilename
global f77modulename, skipemptyends, ignorecontains, dolowercase, debug
# flags
strictf77 = 1
sourcecodeform = 'fix'
quiet = 0
verbose = 1
tabchar = 4 * ' '
pyffilename = ''
f77modulename = ''
skipemptyends = 0
ignorecontains = 1
dolowercase = 1
debug = []
# variables
groupcounter = 0
grouplist = {groupcounter: []}
neededmodule = -1
expectbegin = 1
skipblocksuntil = -1
usermodules = []
f90modulevars = {}
gotnextfile = 1
filepositiontext = ''
currentfilename = ''
skipfunctions = []
skipfuncs = []
onlyfuncs = []
include_paths = []
previous_context = None
def outmess(line, flag=1):
global filepositiontext
if not verbose:
return
if not quiet:
if flag:
sys.stdout.write(filepositiontext)
sys.stdout.write(line)
re._MAXCACHE = 50
defaultimplicitrules = {}
for c in "abcdefghopqrstuvwxyz$_":
defaultimplicitrules[c] = {'typespec': 'real'}
for c in "ijklmn":
defaultimplicitrules[c] = {'typespec': 'integer'}
del c
badnames = {}
invbadnames = {}
for n in ['int', 'double', 'float', 'char', 'short', 'long', 'void', 'case', 'while',
'return', 'signed', 'unsigned', 'if', 'for', 'typedef', 'sizeof', 'union',
'struct', 'static', 'register', 'new', 'break', 'do', 'goto', 'switch',
'continue', 'else', 'inline', 'extern', 'delete', 'const', 'auto',
'len', 'rank', 'shape', 'index', 'slen', 'size', '_i',
'max', 'min',
'flen', 'fshape',
'string', 'complex_double', 'float_double', 'stdin', 'stderr', 'stdout',
'type', 'default']:
badnames[n] = n + '_bn'
invbadnames[n + '_bn'] = n
def rmbadname1(name):
if name in badnames:
errmess('rmbadname1: Replacing "%s" with "%s".\n' %
(name, badnames[name]))
return badnames[name]
return name
def rmbadname(names):
return [rmbadname1(_m) for _m in names]
def undo_rmbadname1(name):
if name in invbadnames:
errmess('undo_rmbadname1: Replacing "%s" with "%s".\n'
% (name, invbadnames[name]))
return invbadnames[name]
return name
def undo_rmbadname(names):
return [undo_rmbadname1(_m) for _m in names]
def getextension(name):
i = name.rfind('.')
if i == -1:
return ''
if '\\' in name[i:]:
return ''
if '/' in name[i:]:
return ''
return name[i + 1:]
is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z', re.I).match
_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-', re.I).search
_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-', re.I).search
_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-', re.I).search
_free_f90_start = re.compile(r'[^c*]\s*[^\s\d\t]', re.I).match
def is_free_format(file):
"""Check if file is in free format Fortran."""
# f90 allows both fixed and free format, assuming fixed unless
# signs of free format are detected.
result = 0
f = open(file, 'r')
line = f.readline()
n = 15 # the number of non-comment lines to scan for hints
if _has_f_header(line):
n = 0
elif _has_f90_header(line):
n = 0
result = 1
while n > 0 and line:
if line[0] != '!' and line.strip():
n -= 1
if (line[0] != '\t' and _free_f90_start(line[:5])) or line[-2:-1] == '&':
result = 1
break
line = f.readline()
f.close()
return result
# Read fortran (77,90) code
def readfortrancode(ffile, dowithline=show, istop=1):
"""
Read fortran codes from files and
1) Get rid of comments, line continuations, and empty lines; lower cases.
2) Call dowithline(line) on every line.
3) Recursively call itself when statement \"include '<filename>'\" is met.
"""
global gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77
global beginpattern, quiet, verbose, dolowercase, include_paths
if not istop:
saveglobals = gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77,\
beginpattern, quiet, verbose, dolowercase
if ffile == []:
return
localdolowercase = dolowercase
cont = 0
finalline = ''
ll = ''
commentline = re.compile(
r'(?P<line>([^"]*["][^"]*["][^"!]*|[^\']*\'[^\']*\'[^\'!]*|[^!\'"]*))!{1}(?P<rest>.*)')
includeline = re.compile(
r'\s*include\s*(\'|")(?P<name>[^\'"]*)(\'|")', re.I)
cont1 = re.compile(r'(?P<line>.*)&\s*\Z')
cont2 = re.compile(r'(\s*&|)(?P<line>.*)')
mline_mark = re.compile(r".*?'''")
if istop:
dowithline('', -1)
ll, l1 = '', ''
spacedigits = [' '] + [str(_m) for _m in range(10)]
filepositiontext = ''
fin = fileinput.FileInput(ffile)
while True:
l = fin.readline()
if not l:
break
if fin.isfirstline():
filepositiontext = ''
currentfilename = fin.filename()
gotnextfile = 1
l1 = l
strictf77 = 0
sourcecodeform = 'fix'
ext = os.path.splitext(currentfilename)[1]
if is_f_file(currentfilename) and \
not (_has_f90_header(l) or _has_fix_header(l)):
strictf77 = 1
elif is_free_format(currentfilename) and not _has_fix_header(l):
sourcecodeform = 'free'
if strictf77:
beginpattern = beginpattern77
else:
beginpattern = beginpattern90
outmess('\tReading file %s (format:%s%s)\n'
% (repr(currentfilename), sourcecodeform,
strictf77 and ',strict' or ''))
l = l.expandtabs().replace('\xa0', ' ')
# Get rid of newline characters
while not l == '':
if l[-1] not in "\n\r\f":
break
l = l[:-1]
if not strictf77:
r = commentline.match(l)
if r:
l = r.group('line') + ' ' # Strip comments starting with `!'
rl = r.group('rest')
if rl[:4].lower() == 'f2py': # f2py directive
l = l + 4 * ' '
r = commentline.match(rl[4:])
if r:
l = l + r.group('line')
else:
l = l + rl[4:]
if l.strip() == '': # Skip empty line
cont = 0
continue
if sourcecodeform == 'fix':
if l[0] in ['*', 'c', '!', 'C', '#']:
if l[1:5].lower() == 'f2py': # f2py directive
l = ' ' + l[5:]
else: # Skip comment line
cont = 0
continue
elif strictf77:
if len(l) > 72:
l = l[:72]
if not (l[0] in spacedigits):
raise Exception('readfortrancode: Found non-(space,digit) char '
'in the first column.\n\tAre you sure that '
'this code is in fix form?\n\tline=%s' % repr(l))
if (not cont or strictf77) and (len(l) > 5 and not l[5] == ' '):
# Continuation of a previous line
ll = ll + l[6:]
finalline = ''
origfinalline = ''
else:
if not strictf77:
# F90 continuation
r = cont1.match(l)
if r:
l = r.group('line') # Continuation follows ..
if cont:
ll = ll + cont2.match(l).group('line')
finalline = ''
origfinalline = ''
else:
# clean up line beginning from possible digits.
l = ' ' + l[5:]
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
ll = l
cont = (r is not None)
else:
# clean up line beginning from possible digits.
l = ' ' + l[5:]
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
ll = l
elif sourcecodeform == 'free':
if not cont and ext == '.pyf' and mline_mark.match(l):
l = l + '\n'
while True:
lc = fin.readline()
if not lc:
errmess(
'Unexpected end of file when reading multiline\n')
break
l = l + lc
if mline_mark.match(lc):
break
l = l.rstrip()
r = cont1.match(l)
if r:
l = r.group('line') # Continuation follows ..
if cont:
ll = ll + cont2.match(l).group('line')
finalline = ''
origfinalline = ''
else:
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
ll = l
cont = (r is not None)
else:
raise ValueError(
"Flag sourcecodeform must be either 'fix' or 'free': %s" % repr(sourcecodeform))
filepositiontext = 'Line #%d in %s:"%s"\n\t' % (
fin.filelineno() - 1, currentfilename, l1)
m = includeline.match(origfinalline)
if m:
fn = m.group('name')
if os.path.isfile(fn):
readfortrancode(fn, dowithline=dowithline, istop=0)
else:
include_dirs = [
os.path.dirname(currentfilename)] + include_paths
foundfile = 0
for inc_dir in include_dirs:
fn1 = os.path.join(inc_dir, fn)
if os.path.isfile(fn1):
foundfile = 1
readfortrancode(fn1, dowithline=dowithline, istop=0)
break
if not foundfile:
outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n' % (
repr(fn), os.pathsep.join(include_dirs)))
else:
dowithline(finalline)
l1 = ll
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
filepositiontext = 'Line #%d in %s:"%s"\n\t' % (
fin.filelineno() - 1, currentfilename, l1)
m = includeline.match(origfinalline)
if m:
fn = m.group('name')
if os.path.isfile(fn):
readfortrancode(fn, dowithline=dowithline, istop=0)
else:
include_dirs = [os.path.dirname(currentfilename)] + include_paths
foundfile = 0
for inc_dir in include_dirs:
fn1 = os.path.join(inc_dir, fn)
if os.path.isfile(fn1):
foundfile = 1
readfortrancode(fn1, dowithline=dowithline, istop=0)
break
if not foundfile:
outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n' % (
repr(fn), os.pathsep.join(include_dirs)))
else:
dowithline(finalline)
filepositiontext = ''
fin.close()
if istop:
dowithline('', 1)
else:
gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77,\
beginpattern, quiet, verbose, dolowercase = saveglobals
# Crack line
beforethisafter = r'\s*(?P<before>%s(?=\s*(\b(%s)\b)))' + \
r'\s*(?P<this>(\b(%s)\b))' + \
r'\s*(?P<after>%s)\s*\Z'
##
fortrantypes = r'character|logical|integer|real|complex|double\s*(precision\s*(complex|)|complex)|type(?=\s*\([\w\s,=(*)]*\))|byte'
typespattern = re.compile(
beforethisafter % ('', fortrantypes, fortrantypes, '.*'), re.I), 'type'
typespattern4implicit = re.compile(beforethisafter % (
'', fortrantypes + '|static|automatic|undefined', fortrantypes + '|static|automatic|undefined', '.*'), re.I)
#
functionpattern = re.compile(beforethisafter % (
r'([a-z]+[\w\s(=*+-/)]*?|)', 'function', 'function', '.*'), re.I), 'begin'
subroutinepattern = re.compile(beforethisafter % (
r'[a-z\s]*?', 'subroutine', 'subroutine', '.*'), re.I), 'begin'
# modulepattern=re.compile(beforethisafter%('[a-z\s]*?','module','module','.*'),re.I),'begin'
#
groupbegins77 = r'program|block\s*data'
beginpattern77 = re.compile(
beforethisafter % ('', groupbegins77, groupbegins77, '.*'), re.I), 'begin'
groupbegins90 = groupbegins77 + \
r'|module(?!\s*procedure)|python\s*module|interface|type(?!\s*\()'
beginpattern90 = re.compile(
beforethisafter % ('', groupbegins90, groupbegins90, '.*'), re.I), 'begin'
groupends = r'end|endprogram|endblockdata|endmodule|endpythonmodule|endinterface'
endpattern = re.compile(
beforethisafter % ('', groupends, groupends, r'[\w\s]*'), re.I), 'end'
# endifs='end\s*(if|do|where|select|while|forall)'
endifs = r'(end\s*(if|do|where|select|while|forall))|(module\s*procedure)'
endifpattern = re.compile(
beforethisafter % (r'[\w]*?', endifs, endifs, r'[\w\s]*'), re.I), 'endif'
#
implicitpattern = re.compile(
beforethisafter % ('', 'implicit', 'implicit', '.*'), re.I), 'implicit'
dimensionpattern = re.compile(beforethisafter % (
'', 'dimension|virtual', 'dimension|virtual', '.*'), re.I), 'dimension'
externalpattern = re.compile(
beforethisafter % ('', 'external', 'external', '.*'), re.I), 'external'
optionalpattern = re.compile(
beforethisafter % ('', 'optional', 'optional', '.*'), re.I), 'optional'
requiredpattern = re.compile(
beforethisafter % ('', 'required', 'required', '.*'), re.I), 'required'
publicpattern = re.compile(
beforethisafter % ('', 'public', 'public', '.*'), re.I), 'public'
privatepattern = re.compile(
beforethisafter % ('', 'private', 'private', '.*'), re.I), 'private'
intrisicpattern = re.compile(
beforethisafter % ('', 'intrisic', 'intrisic', '.*'), re.I), 'intrisic'
intentpattern = re.compile(beforethisafter % (
'', 'intent|depend|note|check', 'intent|depend|note|check', r'\s*\(.*?\).*'), re.I), 'intent'
parameterpattern = re.compile(
beforethisafter % ('', 'parameter', 'parameter', r'\s*\(.*'), re.I), 'parameter'
datapattern = re.compile(
beforethisafter % ('', 'data', 'data', '.*'), re.I), 'data'
callpattern = re.compile(
beforethisafter % ('', 'call', 'call', '.*'), re.I), 'call'
entrypattern = re.compile(
beforethisafter % ('', 'entry', 'entry', '.*'), re.I), 'entry'
callfunpattern = re.compile(
beforethisafter % ('', 'callfun', 'callfun', '.*'), re.I), 'callfun'
commonpattern = re.compile(
beforethisafter % ('', 'common', 'common', '.*'), re.I), 'common'
usepattern = re.compile(
beforethisafter % ('', 'use', 'use', '.*'), re.I), 'use'
containspattern = re.compile(
beforethisafter % ('', 'contains', 'contains', ''), re.I), 'contains'
formatpattern = re.compile(
beforethisafter % ('', 'format', 'format', '.*'), re.I), 'format'
# Non-fortran and f2py-specific statements
f2pyenhancementspattern = re.compile(beforethisafter % ('', 'threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef',
'threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef', '.*'), re.I | re.S), 'f2pyenhancements'
multilinepattern = re.compile(
r"\s*(?P<before>''')(?P<this>.*?)(?P<after>''')\s*\Z", re.S), 'multiline'
##
def _simplifyargs(argsline):
a = []
for n in markoutercomma(argsline).split('@,@'):
for r in '(),':
n = n.replace(r, '_')
a.append(n)
return ','.join(a)
crackline_re_1 = re.compile(r'\s*(?P<result>\b[a-z]+[\w]*\b)\s*[=].*', re.I)
def crackline(line, reset=0):
"""
reset=-1 --- initialize
reset=0 --- crack the line
reset=1 --- final check if mismatch of blocks occurred
Cracked data is saved in grouplist[0].
"""
global beginpattern, groupcounter, groupname, groupcache, grouplist
global filepositiontext, currentfilename, neededmodule, expectbegin
global skipblocksuntil, skipemptyends, previous_context, gotnextfile
if ';' in line and not (f2pyenhancementspattern[0].match(line) or
multilinepattern[0].match(line)):
for l in line.split(';'):
# XXX: non-zero reset values need testing
assert reset == 0, repr(reset)
crackline(l, reset)
return
if reset < 0:
groupcounter = 0
groupname = {groupcounter: ''}
groupcache = {groupcounter: {}}
grouplist = {groupcounter: []}
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['vars'] = {}
groupcache[groupcounter]['block'] = ''
groupcache[groupcounter]['name'] = ''
neededmodule = -1
skipblocksuntil = -1
return
if reset > 0:
fl = 0
if f77modulename and neededmodule == groupcounter:
fl = 2
while groupcounter > fl:
outmess('crackline: groupcounter=%s groupname=%s\n' %
(repr(groupcounter), repr(groupname)))
outmess(
'crackline: Mismatch of blocks encountered. Trying to fix it by assuming "end" statement.\n')
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1
if f77modulename and neededmodule == groupcounter:
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end interface
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end module
neededmodule = -1
return
if line == '':
return
flag = 0
for pat in [dimensionpattern, externalpattern, intentpattern, optionalpattern,
requiredpattern,
parameterpattern, datapattern, publicpattern, privatepattern,
intrisicpattern,
endifpattern, endpattern,
formatpattern,
beginpattern, functionpattern, subroutinepattern,
implicitpattern, typespattern, commonpattern,
callpattern, usepattern, containspattern,
entrypattern,
f2pyenhancementspattern,
multilinepattern
]:
m = pat[0].match(line)
if m:
break
flag = flag + 1
if not m:
re_1 = crackline_re_1
if 0 <= skipblocksuntil <= groupcounter:
return
if 'externals' in groupcache[groupcounter]:
for name in groupcache[groupcounter]['externals']:
if name in invbadnames:
name = invbadnames[name]
if 'interfaced' in groupcache[groupcounter] and name in groupcache[groupcounter]['interfaced']:
continue
m1 = re.match(
r'(?P<before>[^"]*)\b%s\b\s*@\(@(?P<args>[^@]*)@\)@.*\Z' % name, markouterparen(line), re.I)
if m1:
m2 = re_1.match(m1.group('before'))
a = _simplifyargs(m1.group('args'))
if m2:
line = 'callfun %s(%s) result (%s)' % (
name, a, m2.group('result'))
else:
line = 'callfun %s(%s)' % (name, a)
m = callfunpattern[0].match(line)
if not m:
outmess(
'crackline: could not resolve function call for line=%s.\n' % repr(line))
return
analyzeline(m, 'callfun', line)
return
if verbose > 1 or (verbose == 1 and currentfilename.lower().endswith('.pyf')):
previous_context = None
outmess('crackline:%d: No pattern for line\n' % (groupcounter))
return
elif pat[1] == 'end':
if 0 <= skipblocksuntil < groupcounter:
groupcounter = groupcounter - 1
if skipblocksuntil <= groupcounter:
return
if groupcounter <= 0:
raise Exception('crackline: groupcounter(=%s) is nonpositive. '
'Check the blocks.'
% (groupcounter))
m1 = beginpattern[0].match((line))
if (m1) and (not m1.group('this') == groupname[groupcounter]):
raise Exception('crackline: End group %s does not match with '
'previous Begin group %s\n\t%s' %
(repr(m1.group('this')), repr(groupname[groupcounter]),
filepositiontext)
)
if skipblocksuntil == groupcounter:
skipblocksuntil = -1
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1
if not skipemptyends:
expectbegin = 1
elif pat[1] == 'begin':
if 0 <= skipblocksuntil <= groupcounter:
groupcounter = groupcounter + 1
return
gotnextfile = 0
analyzeline(m, pat[1], line)
expectbegin = 0
elif pat[1] == 'endif':
pass
elif pat[1] == 'contains':
if ignorecontains:
return
if 0 <= skipblocksuntil <= groupcounter:
return
skipblocksuntil = groupcounter
else:
if 0 <= skipblocksuntil <= groupcounter:
return
analyzeline(m, pat[1], line)
def markouterparen(line):
l = ''
f = 0
for c in line:
if c == '(':
f = f + 1
if f == 1:
l = l + '@(@'
continue
elif c == ')':
f = f - 1
if f == 0:
l = l + '@)@'
continue
l = l + c
return l
def markoutercomma(line, comma=','):
l = ''
f = 0
cc = ''
for c in line:
if (not cc or cc == ')') and c == '(':
f = f + 1
cc = ')'
elif not cc and c == '\'' and (not l or l[-1] != '\\'):
f = f + 1
cc = '\''
elif c == cc:
f = f - 1
if f == 0:
cc = ''
elif c == comma and f == 0:
l = l + '@' + comma + '@'
continue
l = l + c
assert not f, repr((f, line, l, cc))
return l
def unmarkouterparen(line):
r = line.replace('@(@', '(').replace('@)@', ')')
return r
def appenddecl(decl, decl2, force=1):
if not decl:
decl = {}
if not decl2:
return decl
if decl is decl2:
return decl
for k in list(decl2.keys()):
if k == 'typespec':
if force or k not in decl:
decl[k] = decl2[k]
elif k == 'attrspec':
for l in decl2[k]:
decl = setattrspec(decl, l, force)
elif k == 'kindselector':
decl = setkindselector(decl, decl2[k], force)
elif k == 'charselector':
decl = setcharselector(decl, decl2[k], force)
elif k in ['=', 'typename']:
if force or k not in decl:
decl[k] = decl2[k]
elif k == 'note':
pass
elif k in ['intent', 'check', 'dimension', 'optional', 'required']:
errmess('appenddecl: "%s" not implemented.\n' % k)
else:
raise Exception('appenddecl: Unknown variable definition key:' +
str(k))
return decl
selectpattern = re.compile(
r'\s*(?P<this>(@\(@.*?@\)@|[*][\d*]+|[*]\s*@\(@.*?@\)@|))(?P<after>.*)\Z', re.I)
nameargspattern = re.compile(
r'\s*(?P<name>\b[\w$]+\b)\s*(@\(@\s*(?P<args>[\w\s,]*)\s*@\)@|)\s*((result(\s*@\(@\s*(?P<result>\b[\w$]+\b)\s*@\)@|))|(bind\s*@\(@\s*(?P<bind>.*)\s*@\)@))*\s*\Z', re.I)
callnameargspattern = re.compile(
r'\s*(?P<name>\b[\w$]+\b)\s*@\(@\s*(?P<args>.*)\s*@\)@\s*\Z', re.I)
real16pattern = re.compile(
r'([-+]?(?:\d+(?:\.\d*)?|\d*\.\d+))[dD]((?:[-+]?\d+)?)')
real8pattern = re.compile(
r'([-+]?((?:\d+(?:\.\d*)?|\d*\.\d+))[eE]((?:[-+]?\d+)?)|(\d+\.\d*))')
_intentcallbackpattern = re.compile(r'intent\s*\(.*?\bcallback\b', re.I)
def _is_intent_callback(vdecl):
for a in vdecl.get('attrspec', []):
if _intentcallbackpattern.match(a):
return 1
return 0
def _resolvenameargspattern(line):
line = markouterparen(line)
m1 = nameargspattern.match(line)
if m1:
return m1.group('name'), m1.group('args'), m1.group('result'), m1.group('bind')
m1 = callnameargspattern.match(line)
if m1:
return m1.group('name'), m1.group('args'), None, None
return None, [], None, None
def analyzeline(m, case, line):
global groupcounter, groupname, groupcache, grouplist, filepositiontext
global currentfilename, f77modulename, neededinterface, neededmodule
global expectbegin, gotnextfile, previous_context
block = m.group('this')
if case != 'multiline':
previous_context = None
if expectbegin and case not in ['begin', 'call', 'callfun', 'type'] \
and not skipemptyends and groupcounter < 1:
newname = os.path.basename(currentfilename).split('.')[0]
outmess(
'analyzeline: no group yet. Creating program group with name "%s".\n' % newname)
gotnextfile = 0
groupcounter = groupcounter + 1
groupname[groupcounter] = 'program'
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['vars'] = {}
groupcache[groupcounter]['block'] = 'program'
groupcache[groupcounter]['name'] = newname
groupcache[groupcounter]['from'] = 'fromsky'
expectbegin = 0
if case in ['begin', 'call', 'callfun']:
# Crack line => block,name,args,result
block = block.lower()
if re.match(r'block\s*data', block, re.I):
block = 'block data'
if re.match(r'python\s*module', block, re.I):
block = 'python module'
name, args, result, bind = _resolvenameargspattern(m.group('after'))
if name is None:
if block == 'block data':
name = '_BLOCK_DATA_'
else:
name = ''
if block not in ['interface', 'block data']:
outmess('analyzeline: No name/args pattern found for line.\n')
previous_context = (block, name, groupcounter)
if args:
args = rmbadname([x.strip()
for x in markoutercomma(args).split('@,@')])
else:
args = []
if '' in args:
while '' in args:
args.remove('')
outmess(
'analyzeline: argument list is malformed (missing argument).\n')
# end of crack line => block,name,args,result
needmodule = 0
needinterface = 0
if case in ['call', 'callfun']:
needinterface = 1
if 'args' not in groupcache[groupcounter]:
return
if name not in groupcache[groupcounter]['args']:
return
for it in grouplist[groupcounter]:
if it['name'] == name:
return
if name in groupcache[groupcounter]['interfaced']:
return
block = {'call': 'subroutine', 'callfun': 'function'}[case]
if f77modulename and neededmodule == -1 and groupcounter <= 1:
neededmodule = groupcounter + 2
needmodule = 1
if block != 'interface':
needinterface = 1
# Create new block(s)
groupcounter = groupcounter + 1
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
if needmodule:
if verbose > 1:
outmess('analyzeline: Creating module block %s\n' %
repr(f77modulename), 0)
groupname[groupcounter] = 'module'
groupcache[groupcounter]['block'] = 'python module'
groupcache[groupcounter]['name'] = f77modulename
groupcache[groupcounter]['from'] = ''
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['interfaced'] = []
groupcache[groupcounter]['vars'] = {}
groupcounter = groupcounter + 1
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
if needinterface:
if verbose > 1:
outmess('analyzeline: Creating additional interface block (groupcounter=%s).\n' % (
groupcounter), 0)
groupname[groupcounter] = 'interface'
groupcache[groupcounter]['block'] = 'interface'
groupcache[groupcounter]['name'] = 'unknown_interface'
groupcache[groupcounter]['from'] = '%s:%s' % (
groupcache[groupcounter - 1]['from'], groupcache[groupcounter - 1]['name'])
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['interfaced'] = []
groupcache[groupcounter]['vars'] = {}
groupcounter = groupcounter + 1
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
groupname[groupcounter] = block
groupcache[groupcounter]['block'] = block
if not name:
name = 'unknown_' + block
groupcache[groupcounter]['prefix'] = m.group('before')
groupcache[groupcounter]['name'] = rmbadname1(name)
groupcache[groupcounter]['result'] = result
if groupcounter == 1:
groupcache[groupcounter]['from'] = currentfilename
else:
if f77modulename and groupcounter == 3:
groupcache[groupcounter]['from'] = '%s:%s' % (
groupcache[groupcounter - 1]['from'], currentfilename)
else:
groupcache[groupcounter]['from'] = '%s:%s' % (
groupcache[groupcounter - 1]['from'], groupcache[groupcounter - 1]['name'])
for k in list(groupcache[groupcounter].keys()):
if not groupcache[groupcounter][k]:
del groupcache[groupcounter][k]
groupcache[groupcounter]['args'] = args
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['interfaced'] = []
groupcache[groupcounter]['vars'] = {}
groupcache[groupcounter]['entry'] = {}
# end of creation
if block == 'type':
groupcache[groupcounter]['varnames'] = []
if case in ['call', 'callfun']: # set parents variables
if name not in groupcache[groupcounter - 2]['externals']:
groupcache[groupcounter - 2]['externals'].append(name)
groupcache[groupcounter]['vars'] = copy.deepcopy(
groupcache[groupcounter - 2]['vars'])
try:
del groupcache[groupcounter]['vars'][name][
groupcache[groupcounter]['vars'][name]['attrspec'].index('external')]
except:
pass
if block in ['function', 'subroutine']: # set global attributes
try:
groupcache[groupcounter]['vars'][name] = appenddecl(
groupcache[groupcounter]['vars'][name], groupcache[groupcounter - 2]['vars'][''])
except:
pass
if case == 'callfun': # return type
if result and result in groupcache[groupcounter]['vars']:
if not name == result:
groupcache[groupcounter]['vars'][name] = appenddecl(
groupcache[groupcounter]['vars'][name], groupcache[groupcounter]['vars'][result])
# if groupcounter>1: # name is interfaced
try:
groupcache[groupcounter - 2]['interfaced'].append(name)
except:
pass
if block == 'function':
t = typespattern[0].match(m.group('before') + ' ' + name)
if t:
typespec, selector, attr, edecl = cracktypespec0(
t.group('this'), t.group('after'))
updatevars(typespec, selector, attr, edecl)
if case in ['call', 'callfun']:
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end routine
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end interface
elif case == 'entry':
name, args, result, bind = _resolvenameargspattern(m.group('after'))
if name is not None:
if args:
args = rmbadname([x.strip()
for x in markoutercomma(args).split('@,@')])
else:
args = []
assert result is None, repr(result)
groupcache[groupcounter]['entry'][name] = args
previous_context = ('entry', name, groupcounter)
elif case == 'type':
typespec, selector, attr, edecl = cracktypespec0(
block, m.group('after'))
last_name = updatevars(typespec, selector, attr, edecl)
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case in ['dimension', 'intent', 'optional', 'required', 'external', 'public', 'private', 'intrisic']:
edecl = groupcache[groupcounter]['vars']
ll = m.group('after').strip()
i = ll.find('::')
if i < 0 and case == 'intent':
i = markouterparen(ll).find('@)@') - 2
ll = ll[:i + 1] + '::' + ll[i + 1:]
i = ll.find('::')
if ll[i:] == '::' and 'args' in groupcache[groupcounter]:
outmess('All arguments will have attribute %s%s\n' %
(m.group('this'), ll[:i]))
ll = ll + ','.join(groupcache[groupcounter]['args'])
if i < 0:
i = 0
pl = ''
else:
pl = ll[:i].strip()
ll = ll[i + 2:]
ch = markoutercomma(pl).split('@,@')
if len(ch) > 1:
pl = ch[0]
outmess('analyzeline: cannot handle multiple attributes without type specification. Ignoring %r.\n' % (
','.join(ch[1:])))
last_name = None
for e in [x.strip() for x in markoutercomma(ll).split('@,@')]:
m1 = namepattern.match(e)
if not m1:
if case in ['public', 'private']:
k = ''
else:
print(m.groupdict())
outmess('analyzeline: no name pattern found in %s statement for %s. Skipping.\n' % (
case, repr(e)))
continue
else:
k = rmbadname1(m1.group('name'))
if k not in edecl:
edecl[k] = {}
if case == 'dimension':
ap = case + m1.group('after')
if case == 'intent':
ap = m.group('this') + pl
if _intentcallbackpattern.match(ap):
if k not in groupcache[groupcounter]['args']:
if groupcounter > 1:
if '__user__' not in groupcache[groupcounter - 2]['name']:
outmess(
'analyzeline: missing __user__ module (could be nothing)\n')
# fixes ticket 1693
if k != groupcache[groupcounter]['name']:
outmess('analyzeline: appending intent(callback) %s'
' to %s arguments\n' % (k, groupcache[groupcounter]['name']))
groupcache[groupcounter]['args'].append(k)
else:
errmess(
'analyzeline: intent(callback) %s is ignored' % (k))
else:
errmess('analyzeline: intent(callback) %s is already'
' in argument list' % (k))
if case in ['optional', 'required', 'public', 'external', 'private', 'intrisic']:
ap = case
if 'attrspec' in edecl[k]:
edecl[k]['attrspec'].append(ap)
else:
edecl[k]['attrspec'] = [ap]
if case == 'external':
if groupcache[groupcounter]['block'] == 'program':
outmess('analyzeline: ignoring program arguments\n')
continue
if k not in groupcache[groupcounter]['args']:
continue
if 'externals' not in groupcache[groupcounter]:
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['externals'].append(k)
last_name = k
groupcache[groupcounter]['vars'] = edecl
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case == 'parameter':
edecl = groupcache[groupcounter]['vars']
ll = m.group('after').strip()[1:-1]
last_name = None
for e in markoutercomma(ll).split('@,@'):
try:
k, initexpr = [x.strip() for x in e.split('=')]
except:
outmess(
'analyzeline: could not extract name,expr in parameter statement "%s" of "%s"\n' % (e, ll))
continue
params = get_parameters(edecl)
k = rmbadname1(k)
if k not in edecl:
edecl[k] = {}
if '=' in edecl[k] and (not edecl[k]['='] == initexpr):
outmess('analyzeline: Overwriting the value of parameter "%s" ("%s") with "%s".\n' % (
k, edecl[k]['='], initexpr))
t = determineexprtype(initexpr, params)
if t:
if t.get('typespec') == 'real':
tt = list(initexpr)
for m in real16pattern.finditer(initexpr):
tt[m.start():m.end()] = list(
initexpr[m.start():m.end()].lower().replace('d', 'e'))
initexpr = ''.join(tt)
elif t.get('typespec') == 'complex':
initexpr = initexpr[1:].lower().replace('d', 'e').\
replace(',', '+1j*(')
try:
v = eval(initexpr, {}, params)
except (SyntaxError, NameError, TypeError) as msg:
errmess('analyzeline: Failed to evaluate %r. Ignoring: %s\n'
% (initexpr, msg))
continue
edecl[k]['='] = repr(v)
if 'attrspec' in edecl[k]:
edecl[k]['attrspec'].append('parameter')
else:
edecl[k]['attrspec'] = ['parameter']
last_name = k
groupcache[groupcounter]['vars'] = edecl
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case == 'implicit':
if m.group('after').strip().lower() == 'none':
groupcache[groupcounter]['implicit'] = None
elif m.group('after'):
if 'implicit' in groupcache[groupcounter]:
impl = groupcache[groupcounter]['implicit']
else:
impl = {}
if impl is None:
outmess(
'analyzeline: Overwriting earlier "implicit none" statement.\n')
impl = {}
for e in markoutercomma(m.group('after')).split('@,@'):
decl = {}
m1 = re.match(
r'\s*(?P<this>.*?)\s*(\(\s*(?P<after>[a-z-, ]+)\s*\)\s*|)\Z', e, re.I)
if not m1:
outmess(
'analyzeline: could not extract info of implicit statement part "%s"\n' % (e))
continue
m2 = typespattern4implicit.match(m1.group('this'))
if not m2:
outmess(
'analyzeline: could not extract types pattern of implicit statement part "%s"\n' % (e))
continue
typespec, selector, attr, edecl = cracktypespec0(
m2.group('this'), m2.group('after'))
kindselect, charselect, typename = cracktypespec(
typespec, selector)
decl['typespec'] = typespec
decl['kindselector'] = kindselect
decl['charselector'] = charselect
decl['typename'] = typename
for k in list(decl.keys()):
if not decl[k]:
del decl[k]
for r in markoutercomma(m1.group('after')).split('@,@'):
if '-' in r:
try:
begc, endc = [x.strip() for x in r.split('-')]
except:
outmess(
'analyzeline: expected "<char>-<char>" instead of "%s" in range list of implicit statement\n' % r)
continue
else:
begc = endc = r.strip()
if not len(begc) == len(endc) == 1:
outmess(
'analyzeline: expected "<char>-<char>" instead of "%s" in range list of implicit statement (2)\n' % r)
continue
for o in range(ord(begc), ord(endc) + 1):
impl[chr(o)] = decl
groupcache[groupcounter]['implicit'] = impl
elif case == 'data':
ll = []
dl = ''
il = ''
f = 0
fc = 1
inp = 0
for c in m.group('after'):
if not inp:
if c == "'":
fc = not fc
if c == '/' and fc:
f = f + 1
continue
if c == '(':
inp = inp + 1
elif c == ')':
inp = inp - 1
if f == 0:
dl = dl + c
elif f == 1:
il = il + c
elif f == 2:
dl = dl.strip()
if dl.startswith(','):
dl = dl[1:].strip()
ll.append([dl, il])
dl = c
il = ''
f = 0
if f == 2:
dl = dl.strip()
if dl.startswith(','):
dl = dl[1:].strip()
ll.append([dl, il])
vars = {}
if 'vars' in groupcache[groupcounter]:
vars = groupcache[groupcounter]['vars']
last_name = None
for l in ll:
l = [x.strip() for x in l]
if l[0][0] == ',':
l[0] = l[0][1:]
if l[0][0] == '(':
outmess(
'analyzeline: implied-DO list "%s" is not supported. Skipping.\n' % l[0])
continue
i = 0
j = 0
llen = len(l[1])
for v in rmbadname([x.strip() for x in markoutercomma(l[0]).split('@,@')]):
if v[0] == '(':
outmess(
'analyzeline: implied-DO list "%s" is not supported. Skipping.\n' % v)
# XXX: subsequent init expressions may get wrong values.
# Ignoring since data statements are irrelevant for
# wrapping.
continue
fc = 0
while (i < llen) and (fc or not l[1][i] == ','):
if l[1][i] == "'":
fc = not fc
i = i + 1
i = i + 1
if v not in vars:
vars[v] = {}
if '=' in vars[v] and not vars[v]['='] == l[1][j:i - 1]:
outmess('analyzeline: changing init expression of "%s" ("%s") to "%s"\n' % (
v, vars[v]['='], l[1][j:i - 1]))
vars[v]['='] = l[1][j:i - 1]
j = i
last_name = v
groupcache[groupcounter]['vars'] = vars
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case == 'common':
line = m.group('after').strip()
if not line[0] == '/':
line = '//' + line
cl = []
f = 0
bn = ''
ol = ''
for c in line:
if c == '/':
f = f + 1
continue
if f >= 3:
bn = bn.strip()
if not bn:
bn = '_BLNK_'
cl.append([bn, ol])
f = f - 2
bn = ''
ol = ''
if f % 2:
bn = bn + c
else:
ol = ol + c
bn = bn.strip()
if not bn:
bn = '_BLNK_'
cl.append([bn, ol])
commonkey = {}
if 'common' in groupcache[groupcounter]:
commonkey = groupcache[groupcounter]['common']
for c in cl:
if c[0] not in commonkey:
commonkey[c[0]] = []
for i in [x.strip() for x in markoutercomma(c[1]).split('@,@')]:
if i:
commonkey[c[0]].append(i)
groupcache[groupcounter]['common'] = commonkey
previous_context = ('common', bn, groupcounter)
elif case == 'use':
m1 = re.match(
r'\A\s*(?P<name>\b[\w]+\b)\s*((,(\s*\bonly\b\s*:|(?P<notonly>))\s*(?P<list>.*))|)\s*\Z', m.group('after'), re.I)
if m1:
mm = m1.groupdict()
if 'use' not in groupcache[groupcounter]:
groupcache[groupcounter]['use'] = {}
name = m1.group('name')
groupcache[groupcounter]['use'][name] = {}
isonly = 0
if 'list' in mm and mm['list'] is not None:
if 'notonly' in mm and mm['notonly'] is None:
isonly = 1
groupcache[groupcounter]['use'][name]['only'] = isonly
ll = [x.strip() for x in mm['list'].split(',')]
rl = {}
for l in ll:
if '=' in l:
m2 = re.match(
r'\A\s*(?P<local>\b[\w]+\b)\s*=\s*>\s*(?P<use>\b[\w]+\b)\s*\Z', l, re.I)
if m2:
rl[m2.group('local').strip()] = m2.group(
'use').strip()
else:
outmess(
'analyzeline: Not local=>use pattern found in %s\n' % repr(l))
else:
rl[l] = l
groupcache[groupcounter]['use'][name]['map'] = rl
else:
pass
else:
print(m.groupdict())
outmess('analyzeline: Could not crack the use statement.\n')
elif case in ['f2pyenhancements']:
if 'f2pyenhancements' not in groupcache[groupcounter]:
groupcache[groupcounter]['f2pyenhancements'] = {}
d = groupcache[groupcounter]['f2pyenhancements']
if m.group('this') == 'usercode' and 'usercode' in d:
if isinstance(d['usercode'], str):
d['usercode'] = [d['usercode']]
d['usercode'].append(m.group('after'))
else:
d[m.group('this')] = m.group('after')
elif case == 'multiline':
if previous_context is None:
if verbose:
outmess('analyzeline: No context for multiline block.\n')
return
gc = groupcounter
appendmultiline(groupcache[gc],
previous_context[:2],
m.group('this'))
else:
if verbose > 1:
print(m.groupdict())
outmess('analyzeline: No code implemented for line.\n')
def appendmultiline(group, context_name, ml):
if 'f2pymultilines' not in group:
group['f2pymultilines'] = {}
d = group['f2pymultilines']
if context_name not in d:
d[context_name] = []
d[context_name].append(ml)
return
def cracktypespec0(typespec, ll):
selector = None
attr = None
if re.match(r'double\s*complex', typespec, re.I):
typespec = 'double complex'
elif re.match(r'double\s*precision', typespec, re.I):
typespec = 'double precision'
else:
typespec = typespec.strip().lower()
m1 = selectpattern.match(markouterparen(ll))
if not m1:
outmess(
'cracktypespec0: no kind/char_selector pattern found for line.\n')
return
d = m1.groupdict()
for k in list(d.keys()):
d[k] = unmarkouterparen(d[k])
if typespec in ['complex', 'integer', 'logical', 'real', 'character', 'type']:
selector = d['this']
ll = d['after']
i = ll.find('::')
if i >= 0:
attr = ll[:i].strip()
ll = ll[i + 2:]
return typespec, selector, attr, ll
#####
namepattern = re.compile(r'\s*(?P<name>\b[\w]+\b)\s*(?P<after>.*)\s*\Z', re.I)
kindselector = re.compile(
r'\s*(\(\s*(kind\s*=)?\s*(?P<kind>.*)\s*\)|[*]\s*(?P<kind2>.*?))\s*\Z', re.I)
charselector = re.compile(
r'\s*(\((?P<lenkind>.*)\)|[*]\s*(?P<charlen>.*))\s*\Z', re.I)
lenkindpattern = re.compile(
r'\s*(kind\s*=\s*(?P<kind>.*?)\s*(@,@\s*len\s*=\s*(?P<len>.*)|)|(len\s*=\s*|)(?P<len2>.*?)\s*(@,@\s*(kind\s*=\s*|)(?P<kind2>.*)|))\s*\Z', re.I)
lenarraypattern = re.compile(
r'\s*(@\(@\s*(?!/)\s*(?P<array>.*?)\s*@\)@\s*[*]\s*(?P<len>.*?)|([*]\s*(?P<len2>.*?)|)\s*(@\(@\s*(?!/)\s*(?P<array2>.*?)\s*@\)@|))\s*(=\s*(?P<init>.*?)|(@\(@|)/\s*(?P<init2>.*?)\s*/(@\)@|)|)\s*\Z', re.I)
def removespaces(expr):
expr = expr.strip()
if len(expr) <= 1:
return expr
expr2 = expr[0]
for i in range(1, len(expr) - 1):
if (expr[i] == ' ' and
((expr[i + 1] in "()[]{}=+-/* ") or
(expr[i - 1] in "()[]{}=+-/* "))):
continue
expr2 = expr2 + expr[i]
expr2 = expr2 + expr[-1]
return expr2
def markinnerspaces(line):
l = ''
f = 0
cc = '\''
cb = ''
for c in line:
if cb == '\\' and c in ['\\', '\'', '"']:
l = l + c
cb = c
continue
if f == 0 and c in ['\'', '"']:
cc = c
if c == cc:
f = f + 1
elif c == cc:
f = f - 1
elif c == ' ' and f == 1:
l = l + '@_@'
continue
l = l + c
cb = c
return l
def updatevars(typespec, selector, attrspec, entitydecl):
global groupcache, groupcounter
last_name = None
kindselect, charselect, typename = cracktypespec(typespec, selector)
if attrspec:
attrspec = [x.strip() for x in markoutercomma(attrspec).split('@,@')]
l = []
c = re.compile(r'(?P<start>[a-zA-Z]+)')
for a in attrspec:
if not a:
continue
m = c.match(a)
if m:
s = m.group('start').lower()
a = s + a[len(s):]
l.append(a)
attrspec = l
el = [x.strip() for x in markoutercomma(entitydecl).split('@,@')]
el1 = []
for e in el:
for e1 in [x.strip() for x in markoutercomma(removespaces(markinnerspaces(e)), comma=' ').split('@ @')]:
if e1:
el1.append(e1.replace('@_@', ' '))
for e in el1:
m = namepattern.match(e)
if not m:
outmess(
'updatevars: no name pattern found for entity=%s. Skipping.\n' % (repr(e)))
continue
ename = rmbadname1(m.group('name'))
edecl = {}
if ename in groupcache[groupcounter]['vars']:
edecl = groupcache[groupcounter]['vars'][ename].copy()
not_has_typespec = 'typespec' not in edecl
if not_has_typespec:
edecl['typespec'] = typespec
elif typespec and (not typespec == edecl['typespec']):
outmess('updatevars: attempt to change the type of "%s" ("%s") to "%s". Ignoring.\n' % (
ename, edecl['typespec'], typespec))
if 'kindselector' not in edecl:
edecl['kindselector'] = copy.copy(kindselect)
elif kindselect:
for k in list(kindselect.keys()):
if k in edecl['kindselector'] and (not kindselect[k] == edecl['kindselector'][k]):
outmess('updatevars: attempt to change the kindselector "%s" of "%s" ("%s") to "%s". Ignoring.\n' % (
k, ename, edecl['kindselector'][k], kindselect[k]))
else:
edecl['kindselector'][k] = copy.copy(kindselect[k])
if 'charselector' not in edecl and charselect:
if not_has_typespec:
edecl['charselector'] = charselect
else:
errmess('updatevars:%s: attempt to change empty charselector to %r. Ignoring.\n'
% (ename, charselect))
elif charselect:
for k in list(charselect.keys()):
if k in edecl['charselector'] and (not charselect[k] == edecl['charselector'][k]):
outmess('updatevars: attempt to change the charselector "%s" of "%s" ("%s") to "%s". Ignoring.\n' % (
k, ename, edecl['charselector'][k], charselect[k]))
else:
edecl['charselector'][k] = copy.copy(charselect[k])
if 'typename' not in edecl:
edecl['typename'] = typename
elif typename and (not edecl['typename'] == typename):
outmess('updatevars: attempt to change the typename of "%s" ("%s") to "%s". Ignoring.\n' % (
ename, edecl['typename'], typename))
if 'attrspec' not in edecl:
edecl['attrspec'] = copy.copy(attrspec)
elif attrspec:
for a in attrspec:
if a not in edecl['attrspec']:
edecl['attrspec'].append(a)
else:
edecl['typespec'] = copy.copy(typespec)
edecl['kindselector'] = copy.copy(kindselect)
edecl['charselector'] = copy.copy(charselect)
edecl['typename'] = typename
edecl['attrspec'] = copy.copy(attrspec)
if m.group('after'):
m1 = lenarraypattern.match(markouterparen(m.group('after')))
if m1:
d1 = m1.groupdict()
for lk in ['len', 'array', 'init']:
if d1[lk + '2'] is not None:
d1[lk] = d1[lk + '2']
del d1[lk + '2']
for k in list(d1.keys()):
if d1[k] is not None:
d1[k] = unmarkouterparen(d1[k])
else:
del d1[k]
if 'len' in d1 and 'array' in d1:
if d1['len'] == '':
d1['len'] = d1['array']
del d1['array']
else:
d1['array'] = d1['array'] + ',' + d1['len']
del d1['len']
errmess('updatevars: "%s %s" is mapped to "%s %s(%s)"\n' % (
typespec, e, typespec, ename, d1['array']))
if 'array' in d1:
dm = 'dimension(%s)' % d1['array']
if 'attrspec' not in edecl or (not edecl['attrspec']):
edecl['attrspec'] = [dm]
else:
edecl['attrspec'].append(dm)
for dm1 in edecl['attrspec']:
if dm1[:9] == 'dimension' and dm1 != dm:
del edecl['attrspec'][-1]
errmess('updatevars:%s: attempt to change %r to %r. Ignoring.\n'
% (ename, dm1, dm))
break
if 'len' in d1:
if typespec in ['complex', 'integer', 'logical', 'real']:
if ('kindselector' not in edecl) or (not edecl['kindselector']):
edecl['kindselector'] = {}
edecl['kindselector']['*'] = d1['len']
elif typespec == 'character':
if ('charselector' not in edecl) or (not edecl['charselector']):
edecl['charselector'] = {}
if 'len' in edecl['charselector']:
del edecl['charselector']['len']
edecl['charselector']['*'] = d1['len']
if 'init' in d1:
if '=' in edecl and (not edecl['='] == d1['init']):
outmess('updatevars: attempt to change the init expression of "%s" ("%s") to "%s". Ignoring.\n' % (
ename, edecl['='], d1['init']))
else:
edecl['='] = d1['init']
else:
outmess('updatevars: could not crack entity declaration "%s". Ignoring.\n' % (
ename + m.group('after')))
for k in list(edecl.keys()):
if not edecl[k]:
del edecl[k]
groupcache[groupcounter]['vars'][ename] = edecl
if 'varnames' in groupcache[groupcounter]:
groupcache[groupcounter]['varnames'].append(ename)
last_name = ename
return last_name
def cracktypespec(typespec, selector):
kindselect = None
charselect = None
typename = None
if selector:
if typespec in ['complex', 'integer', 'logical', 'real']:
kindselect = kindselector.match(selector)
if not kindselect:
outmess(
'cracktypespec: no kindselector pattern found for %s\n' % (repr(selector)))
return
kindselect = kindselect.groupdict()
kindselect['*'] = kindselect['kind2']
del kindselect['kind2']
for k in list(kindselect.keys()):
if not kindselect[k]:
del kindselect[k]
for k, i in list(kindselect.items()):
kindselect[k] = rmbadname1(i)
elif typespec == 'character':
charselect = charselector.match(selector)
if not charselect:
outmess(
'cracktypespec: no charselector pattern found for %s\n' % (repr(selector)))
return
charselect = charselect.groupdict()
charselect['*'] = charselect['charlen']
del charselect['charlen']
if charselect['lenkind']:
lenkind = lenkindpattern.match(
markoutercomma(charselect['lenkind']))
lenkind = lenkind.groupdict()
for lk in ['len', 'kind']:
if lenkind[lk + '2']:
lenkind[lk] = lenkind[lk + '2']
charselect[lk] = lenkind[lk]
del lenkind[lk + '2']
del charselect['lenkind']
for k in list(charselect.keys()):
if not charselect[k]:
del charselect[k]
for k, i in list(charselect.items()):
charselect[k] = rmbadname1(i)
elif typespec == 'type':
typename = re.match(r'\s*\(\s*(?P<name>\w+)\s*\)', selector, re.I)
if typename:
typename = typename.group('name')
else:
outmess('cracktypespec: no typename found in %s\n' %
(repr(typespec + selector)))
else:
outmess('cracktypespec: no selector used for %s\n' %
(repr(selector)))
return kindselect, charselect, typename
######
def setattrspec(decl, attr, force=0):
if not decl:
decl = {}
if not attr:
return decl
if 'attrspec' not in decl:
decl['attrspec'] = [attr]
return decl
if force:
decl['attrspec'].append(attr)
if attr in decl['attrspec']:
return decl
if attr == 'static' and 'automatic' not in decl['attrspec']:
decl['attrspec'].append(attr)
elif attr == 'automatic' and 'static' not in decl['attrspec']:
decl['attrspec'].append(attr)
elif attr == 'public' and 'private' not in decl['attrspec']:
decl['attrspec'].append(attr)
elif attr == 'private' and 'public' not in decl['attrspec']:
decl['attrspec'].append(attr)
else:
decl['attrspec'].append(attr)
return decl
def setkindselector(decl, sel, force=0):
if not decl:
decl = {}
if not sel:
return decl
if 'kindselector' not in decl:
decl['kindselector'] = sel
return decl
for k in list(sel.keys()):
if force or k not in decl['kindselector']:
decl['kindselector'][k] = sel[k]
return decl
def setcharselector(decl, sel, force=0):
if not decl:
decl = {}
if not sel:
return decl
if 'charselector' not in decl:
decl['charselector'] = sel
return decl
for k in list(sel.keys()):
if force or k not in decl['charselector']:
decl['charselector'][k] = sel[k]
return decl
def getblockname(block, unknown='unknown'):
if 'name' in block:
return block['name']
return unknown
# post processing
def setmesstext(block):
global filepositiontext
try:
filepositiontext = 'In: %s:%s\n' % (block['from'], block['name'])
except:
pass
def get_usedict(block):
usedict = {}
if 'parent_block' in block:
usedict = get_usedict(block['parent_block'])
if 'use' in block:
usedict.update(block['use'])
return usedict
def get_useparameters(block, param_map=None):
global f90modulevars
if param_map is None:
param_map = {}
usedict = get_usedict(block)
if not usedict:
return param_map
for usename, mapping in list(usedict.items()):
usename = usename.lower()
if usename not in f90modulevars:
outmess('get_useparameters: no module %s info used by %s\n' %
(usename, block.get('name')))
continue
mvars = f90modulevars[usename]
params = get_parameters(mvars)
if not params:
continue
# XXX: apply mapping
if mapping:
errmess('get_useparameters: mapping for %s not impl.' % (mapping))
for k, v in list(params.items()):
if k in param_map:
outmess('get_useparameters: overriding parameter %s with'
' value from module %s' % (repr(k), repr(usename)))
param_map[k] = v
return param_map
def postcrack2(block, tab='', param_map=None):
global f90modulevars
if not f90modulevars:
return block
if isinstance(block, list):
ret = []
for g in block:
g = postcrack2(g, tab=tab + '\t', param_map=param_map)
ret.append(g)
return ret
setmesstext(block)
outmess('%sBlock: %s\n' % (tab, block['name']), 0)
if param_map is None:
param_map = get_useparameters(block)
if param_map is not None and 'vars' in block:
vars = block['vars']
for n in list(vars.keys()):
var = vars[n]
if 'kindselector' in var:
kind = var['kindselector']
if 'kind' in kind:
val = kind['kind']
if val in param_map:
kind['kind'] = param_map[val]
new_body = []
for b in block['body']:
b = postcrack2(b, tab=tab + '\t', param_map=param_map)
new_body.append(b)
block['body'] = new_body
return block
def postcrack(block, args=None, tab=''):
"""
TODO:
function return values
determine expression types if in argument list
"""
global usermodules, onlyfunctions
if isinstance(block, list):
gret = []
uret = []
for g in block:
setmesstext(g)
g = postcrack(g, tab=tab + '\t')
# sort user routines to appear first
if 'name' in g and '__user__' in g['name']:
uret.append(g)
else:
gret.append(g)
return uret + gret
setmesstext(block)
if not isinstance(block, dict) and 'block' not in block:
raise Exception('postcrack: Expected block dictionary instead of ' +
str(block))
if 'name' in block and not block['name'] == 'unknown_interface':
outmess('%sBlock: %s\n' % (tab, block['name']), 0)
block = analyzeargs(block)
block = analyzecommon(block)
block['vars'] = analyzevars(block)
block['sortvars'] = sortvarnames(block['vars'])
if 'args' in block and block['args']:
args = block['args']
block['body'] = analyzebody(block, args, tab=tab)
userisdefined = []
if 'use' in block:
useblock = block['use']
for k in list(useblock.keys()):
if '__user__' in k:
userisdefined.append(k)
else:
useblock = {}
name = ''
if 'name' in block:
name = block['name']
# and not userisdefined: # Build a __user__ module
if 'externals' in block and block['externals']:
interfaced = []
if 'interfaced' in block:
interfaced = block['interfaced']
mvars = copy.copy(block['vars'])
if name:
mname = name + '__user__routines'
else:
mname = 'unknown__user__routines'
if mname in userisdefined:
i = 1
while '%s_%i' % (mname, i) in userisdefined:
i = i + 1
mname = '%s_%i' % (mname, i)
interface = {'block': 'interface', 'body': [],
'vars': {}, 'name': name + '_user_interface'}
for e in block['externals']:
if e in interfaced:
edef = []
j = -1
for b in block['body']:
j = j + 1
if b['block'] == 'interface':
i = -1
for bb in b['body']:
i = i + 1
if 'name' in bb and bb['name'] == e:
edef = copy.copy(bb)
del b['body'][i]
break
if edef:
if not b['body']:
del block['body'][j]
del interfaced[interfaced.index(e)]
break
interface['body'].append(edef)
else:
if e in mvars and not isexternal(mvars[e]):
interface['vars'][e] = mvars[e]
if interface['vars'] or interface['body']:
block['interfaced'] = interfaced
mblock = {'block': 'python module', 'body': [
interface], 'vars': {}, 'name': mname, 'interfaced': block['externals']}
useblock[mname] = {}
usermodules.append(mblock)
if useblock:
block['use'] = useblock
return block
def sortvarnames(vars):
indep = []
dep = []
for v in list(vars.keys()):
if 'depend' in vars[v] and vars[v]['depend']:
dep.append(v)
else:
indep.append(v)
n = len(dep)
i = 0
while dep: # XXX: How to catch dependence cycles correctly?
v = dep[0]
fl = 0
for w in dep[1:]:
if w in vars[v]['depend']:
fl = 1
break
if fl:
dep = dep[1:] + [v]
i = i + 1
if i > n:
errmess('sortvarnames: failed to compute dependencies because'
' of cyclic dependencies between '
+ ', '.join(dep) + '\n')
indep = indep + dep
break
else:
indep.append(v)
dep = dep[1:]
n = len(dep)
i = 0
return indep
def analyzecommon(block):
if not hascommon(block):
return block
commonvars = []
for k in list(block['common'].keys()):
comvars = []
for e in block['common'][k]:
m = re.match(
r'\A\s*\b(?P<name>.*?)\b\s*(\((?P<dims>.*?)\)|)\s*\Z', e, re.I)
if m:
dims = []
if m.group('dims'):
dims = [x.strip()
for x in markoutercomma(m.group('dims')).split('@,@')]
n = m.group('name').strip()
if n in block['vars']:
if 'attrspec' in block['vars'][n]:
block['vars'][n]['attrspec'].append(
'dimension(%s)' % (','.join(dims)))
else:
block['vars'][n]['attrspec'] = [
'dimension(%s)' % (','.join(dims))]
else:
if dims:
block['vars'][n] = {
'attrspec': ['dimension(%s)' % (','.join(dims))]}
else:
block['vars'][n] = {}
if n not in commonvars:
commonvars.append(n)
else:
n = e
errmess(
'analyzecommon: failed to extract "<name>[(<dims>)]" from "%s" in common /%s/.\n' % (e, k))
comvars.append(n)
block['common'][k] = comvars
if 'commonvars' not in block:
block['commonvars'] = commonvars
else:
block['commonvars'] = block['commonvars'] + commonvars
return block
def analyzebody(block, args, tab=''):
global usermodules, skipfuncs, onlyfuncs, f90modulevars
setmesstext(block)
body = []
for b in block['body']:
b['parent_block'] = block
if b['block'] in ['function', 'subroutine']:
if args is not None and b['name'] not in args:
continue
else:
as_ = b['args']
if b['name'] in skipfuncs:
continue
if onlyfuncs and b['name'] not in onlyfuncs:
continue
b['saved_interface'] = crack2fortrangen(
b, '\n' + ' ' * 6, as_interface=True)
else:
as_ = args
b = postcrack(b, as_, tab=tab + '\t')
if b['block'] == 'interface' and not b['body']:
if 'f2pyenhancements' not in b:
continue
if b['block'].replace(' ', '') == 'pythonmodule':
usermodules.append(b)
else:
if b['block'] == 'module':
f90modulevars[b['name']] = b['vars']
body.append(b)
return body
def buildimplicitrules(block):
setmesstext(block)
implicitrules = defaultimplicitrules
attrrules = {}
if 'implicit' in block:
if block['implicit'] is None:
implicitrules = None
if verbose > 1:
outmess(
'buildimplicitrules: no implicit rules for routine %s.\n' % repr(block['name']))
else:
for k in list(block['implicit'].keys()):
if block['implicit'][k].get('typespec') not in ['static', 'automatic']:
implicitrules[k] = block['implicit'][k]
else:
attrrules[k] = block['implicit'][k]['typespec']
return implicitrules, attrrules
def myeval(e, g=None, l=None):
r = eval(e, g, l)
if type(r) in [type(0), type(0.0)]:
return r
raise ValueError('r=%r' % (r))
getlincoef_re_1 = re.compile(r'\A\b\w+\b\Z', re.I)
def getlincoef(e, xset): # e = a*x+b ; x in xset
try:
c = int(myeval(e, {}, {}))
return 0, c, None
except:
pass
if getlincoef_re_1.match(e):
return 1, 0, e
len_e = len(e)
for x in xset:
if len(x) > len_e:
continue
if re.search(r'\w\s*\([^)]*\b' + x + r'\b', e):
# skip function calls having x as an argument, e.g max(1, x)
continue
re_1 = re.compile(r'(?P<before>.*?)\b' + x + r'\b(?P<after>.*)', re.I)
m = re_1.match(e)
if m:
try:
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 0, m1.group('after'))
m1 = re_1.match(ee)
b = myeval(ee, {}, {})
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 1, m1.group('after'))
m1 = re_1.match(ee)
a = myeval(ee, {}, {}) - b
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 0.5, m1.group('after'))
m1 = re_1.match(ee)
c = myeval(ee, {}, {})
# computing another point to be sure that expression is linear
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 1.5, m1.group('after'))
m1 = re_1.match(ee)
c2 = myeval(ee, {}, {})
if (a * 0.5 + b == c and a * 1.5 + b == c2):
return a, b, x
except:
pass
break
return None, None, None
_varname_match = re.compile(r'\A[a-z]\w*\Z').match
def getarrlen(dl, args, star='*'):
edl = []
try:
edl.append(myeval(dl[0], {}, {}))
except:
edl.append(dl[0])
try:
edl.append(myeval(dl[1], {}, {}))
except:
edl.append(dl[1])
if isinstance(edl[0], int):
p1 = 1 - edl[0]
if p1 == 0:
d = str(dl[1])
elif p1 < 0:
d = '%s-%s' % (dl[1], -p1)
else:
d = '%s+%s' % (dl[1], p1)
elif isinstance(edl[1], int):
p1 = 1 + edl[1]
if p1 == 0:
d = '-(%s)' % (dl[0])
else:
d = '%s-(%s)' % (p1, dl[0])
else:
d = '%s-(%s)+1' % (dl[1], dl[0])
try:
return repr(myeval(d, {}, {})), None, None
except:
pass
d1, d2 = getlincoef(dl[0], args), getlincoef(dl[1], args)
if None not in [d1[0], d2[0]]:
if (d1[0], d2[0]) == (0, 0):
return repr(d2[1] - d1[1] + 1), None, None
b = d2[1] - d1[1] + 1
d1 = (d1[0], 0, d1[2])
d2 = (d2[0], b, d2[2])
if d1[0] == 0 and d2[2] in args:
if b < 0:
return '%s * %s - %s' % (d2[0], d2[2], -b), d2[2], '+%s)/(%s)' % (-b, d2[0])
elif b:
return '%s * %s + %s' % (d2[0], d2[2], b), d2[2], '-%s)/(%s)' % (b, d2[0])
else:
return '%s * %s' % (d2[0], d2[2]), d2[2], ')/(%s)' % (d2[0])
if d2[0] == 0 and d1[2] in args:
if b < 0:
return '%s * %s - %s' % (-d1[0], d1[2], -b), d1[2], '+%s)/(%s)' % (-b, -d1[0])
elif b:
return '%s * %s + %s' % (-d1[0], d1[2], b), d1[2], '-%s)/(%s)' % (b, -d1[0])
else:
return '%s * %s' % (-d1[0], d1[2]), d1[2], ')/(%s)' % (-d1[0])
if d1[2] == d2[2] and d1[2] in args:
a = d2[0] - d1[0]
if not a:
return repr(b), None, None
if b < 0:
return '%s * %s - %s' % (a, d1[2], -b), d2[2], '+%s)/(%s)' % (-b, a)
elif b:
return '%s * %s + %s' % (a, d1[2], b), d2[2], '-%s)/(%s)' % (b, a)
else:
return '%s * %s' % (a, d1[2]), d2[2], ')/(%s)' % (a)
if d1[0] == d2[0] == 1:
c = str(d1[2])
if c not in args:
if _varname_match(c):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c))
c = '(%s)' % c
if b == 0:
d = '%s-%s' % (d2[2], c)
elif b < 0:
d = '%s-%s-%s' % (d2[2], c, -b)
else:
d = '%s-%s+%s' % (d2[2], c, b)
elif d1[0] == 0:
c2 = str(d2[2])
if c2 not in args:
if _varname_match(c2):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c2))
c2 = '(%s)' % c2
if d2[0] == 1:
pass
elif d2[0] == -1:
c2 = '-%s' % c2
else:
c2 = '%s*%s' % (d2[0], c2)
if b == 0:
d = c2
elif b < 0:
d = '%s-%s' % (c2, -b)
else:
d = '%s+%s' % (c2, b)
elif d2[0] == 0:
c1 = str(d1[2])
if c1 not in args:
if _varname_match(c1):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c1))
c1 = '(%s)' % c1
if d1[0] == 1:
c1 = '-%s' % c1
elif d1[0] == -1:
c1 = '+%s' % c1
elif d1[0] < 0:
c1 = '+%s*%s' % (-d1[0], c1)
else:
c1 = '-%s*%s' % (d1[0], c1)
if b == 0:
d = c1
elif b < 0:
d = '%s-%s' % (c1, -b)
else:
d = '%s+%s' % (c1, b)
else:
c1 = str(d1[2])
if c1 not in args:
if _varname_match(c1):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c1))
c1 = '(%s)' % c1
if d1[0] == 1:
c1 = '-%s' % c1
elif d1[0] == -1:
c1 = '+%s' % c1
elif d1[0] < 0:
c1 = '+%s*%s' % (-d1[0], c1)
else:
c1 = '-%s*%s' % (d1[0], c1)
c2 = str(d2[2])
if c2 not in args:
if _varname_match(c2):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c2))
c2 = '(%s)' % c2
if d2[0] == 1:
pass
elif d2[0] == -1:
c2 = '-%s' % c2
else:
c2 = '%s*%s' % (d2[0], c2)
if b == 0:
d = '%s%s' % (c2, c1)
elif b < 0:
d = '%s%s-%s' % (c2, c1, -b)
else:
d = '%s%s+%s' % (c2, c1, b)
return d, None, None
word_pattern = re.compile(r'\b[a-z][\w$]*\b', re.I)
def _get_depend_dict(name, vars, deps):
if name in vars:
words = vars[name].get('depend', [])
if '=' in vars[name] and not isstring(vars[name]):
for word in word_pattern.findall(vars[name]['=']):
if word not in words and word in vars:
words.append(word)
for word in words[:]:
for w in deps.get(word, []) \
or _get_depend_dict(word, vars, deps):
if w not in words:
words.append(w)
else:
outmess('_get_depend_dict: no dependence info for %s\n' % (repr(name)))
words = []
deps[name] = words
return words
def _calc_depend_dict(vars):
names = list(vars.keys())
depend_dict = {}
for n in names:
_get_depend_dict(n, vars, depend_dict)
return depend_dict
def get_sorted_names(vars):
"""
"""
depend_dict = _calc_depend_dict(vars)
names = []
for name in list(depend_dict.keys()):
if not depend_dict[name]:
names.append(name)
del depend_dict[name]
while depend_dict:
for name, lst in list(depend_dict.items()):
new_lst = [n for n in lst if n in depend_dict]
if not new_lst:
names.append(name)
del depend_dict[name]
else:
depend_dict[name] = new_lst
return [name for name in names if name in vars]
def _kind_func(string):
# XXX: return something sensible.
if string[0] in "'\"":
string = string[1:-1]
if real16pattern.match(string):
return 8
elif real8pattern.match(string):
return 4
return 'kind(' + string + ')'
def _selected_int_kind_func(r):
# XXX: This should be processor dependent
m = 10 ** r
if m <= 2 ** 8:
return 1
if m <= 2 ** 16:
return 2
if m <= 2 ** 32:
return 4
if m <= 2 ** 63:
return 8
if m <= 2 ** 128:
return 16
return -1
def _selected_real_kind_func(p, r=0, radix=0):
# XXX: This should be processor dependent
# This is only good for 0 <= p <= 20
if p < 7:
return 4
if p < 16:
return 8
machine = platform.machine().lower()
if machine.startswith('power') or machine.startswith('ppc64'):
if p <= 20:
return 16
else:
if p < 19:
return 10
elif p <= 20:
return 16
return -1
def get_parameters(vars, global_params={}):
params = copy.copy(global_params)
g_params = copy.copy(global_params)
for name, func in [('kind', _kind_func),
('selected_int_kind', _selected_int_kind_func),
('selected_real_kind', _selected_real_kind_func), ]:
if name not in g_params:
g_params[name] = func
param_names = []
for n in get_sorted_names(vars):
if 'attrspec' in vars[n] and 'parameter' in vars[n]['attrspec']:
param_names.append(n)
kind_re = re.compile(r'\bkind\s*\(\s*(?P<value>.*)\s*\)', re.I)
selected_int_kind_re = re.compile(
r'\bselected_int_kind\s*\(\s*(?P<value>.*)\s*\)', re.I)
selected_kind_re = re.compile(
r'\bselected_(int|real)_kind\s*\(\s*(?P<value>.*)\s*\)', re.I)
for n in param_names:
if '=' in vars[n]:
v = vars[n]['=']
if islogical(vars[n]):
v = v.lower()
for repl in [
('.false.', 'False'),
('.true.', 'True'),
# TODO: test .eq., .neq., etc replacements.
]:
v = v.replace(*repl)
v = kind_re.sub(r'kind("\1")', v)
v = selected_int_kind_re.sub(r'selected_int_kind(\1)', v)
# We need to act according to the data.
# The easy case is if the data has a kind-specifier,
# then we may easily remove those specifiers.
# However, it may be that the user uses other specifiers...(!)
is_replaced = False
if 'kindselector' in vars[n]:
if 'kind' in vars[n]['kindselector']:
orig_v_len = len(v)
v = v.replace('_' + vars[n]['kindselector']['kind'], '')
# Again, this will be true if even a single specifier
# has been replaced, see comment above.
is_replaced = len(v) < orig_v_len
if not is_replaced:
if not selected_kind_re.match(v):
v_ = v.split('_')
# In case there are additive parameters
if len(v_) > 1:
v = ''.join(v_[:-1]).lower().replace(v_[-1].lower(), '')
# Currently this will not work for complex numbers.
# There is missing code for extracting a complex number,
# which may be defined in either of these:
# a) (Re, Im)
# b) cmplx(Re, Im)
# c) dcmplx(Re, Im)
# d) cmplx(Re, Im, <prec>)
if isdouble(vars[n]):
tt = list(v)
for m in real16pattern.finditer(v):
tt[m.start():m.end()] = list(
v[m.start():m.end()].lower().replace('d', 'e'))
v = ''.join(tt)
elif iscomplex(vars[n]):
# FIXME complex numbers may also have exponents
if v[0] == '(' and v[-1] == ')':
# FIXME, unused l looks like potential bug
l = markoutercomma(v[1:-1]).split('@,@')
try:
params[n] = eval(v, g_params, params)
except Exception as msg:
params[n] = v
outmess('get_parameters: got "%s" on %s\n' % (msg, repr(v)))
if isstring(vars[n]) and isinstance(params[n], int):
params[n] = chr(params[n])
nl = n.lower()
if nl != n:
params[nl] = params[n]
else:
print(vars[n])
outmess(
'get_parameters:parameter %s does not have value?!\n' % (repr(n)))
return params
def _eval_length(length, params):
if length in ['(:)', '(*)', '*']:
return '(*)'
return _eval_scalar(length, params)
_is_kind_number = re.compile(r'\d+_').match
def _eval_scalar(value, params):
if _is_kind_number(value):
value = value.split('_')[0]
try:
value = str(eval(value, {}, params))
except (NameError, SyntaxError):
return value
except Exception as msg:
errmess('"%s" in evaluating %r '
'(available names: %s)\n'
% (msg, value, list(params.keys())))
return value
def analyzevars(block):
global f90modulevars
setmesstext(block)
implicitrules, attrrules = buildimplicitrules(block)
vars = copy.copy(block['vars'])
if block['block'] == 'function' and block['name'] not in vars:
vars[block['name']] = {}
if '' in block['vars']:
del vars['']
if 'attrspec' in block['vars']['']:
gen = block['vars']['']['attrspec']
for n in list(vars.keys()):
for k in ['public', 'private']:
if k in gen:
vars[n] = setattrspec(vars[n], k)
svars = []
args = block['args']
for a in args:
try:
vars[a]
svars.append(a)
except KeyError:
pass
for n in list(vars.keys()):
if n not in args:
svars.append(n)
params = get_parameters(vars, get_useparameters(block))
dep_matches = {}
name_match = re.compile(r'\w[\w\d_$]*').match
for v in list(vars.keys()):
m = name_match(v)
if m:
n = v[m.start():m.end()]
try:
dep_matches[n]
except KeyError:
dep_matches[n] = re.compile(r'.*\b%s\b' % (v), re.I).match
for n in svars:
if n[0] in list(attrrules.keys()):
vars[n] = setattrspec(vars[n], attrrules[n[0]])
if 'typespec' not in vars[n]:
if not('attrspec' in vars[n] and 'external' in vars[n]['attrspec']):
if implicitrules:
ln0 = n[0].lower()
for k in list(implicitrules[ln0].keys()):
if k == 'typespec' and implicitrules[ln0][k] == 'undefined':
continue
if k not in vars[n]:
vars[n][k] = implicitrules[ln0][k]
elif k == 'attrspec':
for l in implicitrules[ln0][k]:
vars[n] = setattrspec(vars[n], l)
elif n in block['args']:
outmess('analyzevars: typespec of variable %s is not defined in routine %s.\n' % (
repr(n), block['name']))
if 'charselector' in vars[n]:
if 'len' in vars[n]['charselector']:
l = vars[n]['charselector']['len']
try:
l = str(eval(l, {}, params))
except:
pass
vars[n]['charselector']['len'] = l
if 'kindselector' in vars[n]:
if 'kind' in vars[n]['kindselector']:
l = vars[n]['kindselector']['kind']
try:
l = str(eval(l, {}, params))
except:
pass
vars[n]['kindselector']['kind'] = l
savelindims = {}
if 'attrspec' in vars[n]:
attr = vars[n]['attrspec']
attr.reverse()
vars[n]['attrspec'] = []
dim, intent, depend, check, note = None, None, None, None, None
for a in attr:
if a[:9] == 'dimension':
dim = (a[9:].strip())[1:-1]
elif a[:6] == 'intent':
intent = (a[6:].strip())[1:-1]
elif a[:6] == 'depend':
depend = (a[6:].strip())[1:-1]
elif a[:5] == 'check':
check = (a[5:].strip())[1:-1]
elif a[:4] == 'note':
note = (a[4:].strip())[1:-1]
else:
vars[n] = setattrspec(vars[n], a)
if intent:
if 'intent' not in vars[n]:
vars[n]['intent'] = []
for c in [x.strip() for x in markoutercomma(intent).split('@,@')]:
# Remove spaces so that 'in out' becomes 'inout'
tmp = c.replace(' ', '')
if tmp not in vars[n]['intent']:
vars[n]['intent'].append(tmp)
intent = None
if note:
note = note.replace('\\n\\n', '\n\n')
note = note.replace('\\n ', '\n')
if 'note' not in vars[n]:
vars[n]['note'] = [note]
else:
vars[n]['note'].append(note)
note = None
if depend is not None:
if 'depend' not in vars[n]:
vars[n]['depend'] = []
for c in rmbadname([x.strip() for x in markoutercomma(depend).split('@,@')]):
if c not in vars[n]['depend']:
vars[n]['depend'].append(c)
depend = None
if check is not None:
if 'check' not in vars[n]:
vars[n]['check'] = []
for c in [x.strip() for x in markoutercomma(check).split('@,@')]:
if c not in vars[n]['check']:
vars[n]['check'].append(c)
check = None
if dim and 'dimension' not in vars[n]:
vars[n]['dimension'] = []
for d in rmbadname([x.strip() for x in markoutercomma(dim).split('@,@')]):
star = '*'
if d == ':':
star = ':'
if d in params:
d = str(params[d])
for p in list(params.keys()):
re_1 = re.compile(r'(?P<before>.*?)\b' + p + r'\b(?P<after>.*)', re.I)
m = re_1.match(d)
while m:
d = m.group('before') + \
str(params[p]) + m.group('after')
m = re_1.match(d)
if d == star:
dl = [star]
else:
dl = markoutercomma(d, ':').split('@:@')
if len(dl) == 2 and '*' in dl: # e.g. dimension(5:*)
dl = ['*']
d = '*'
if len(dl) == 1 and not dl[0] == star:
dl = ['1', dl[0]]
if len(dl) == 2:
d, v, di = getarrlen(dl, list(block['vars'].keys()))
if d[:4] == '1 * ':
d = d[4:]
if di and di[-4:] == '/(1)':
di = di[:-4]
if v:
savelindims[d] = v, di
vars[n]['dimension'].append(d)
if 'dimension' in vars[n]:
if isintent_c(vars[n]):
shape_macro = 'shape'
else:
shape_macro = 'shape' # 'fshape'
if isstringarray(vars[n]):
if 'charselector' in vars[n]:
d = vars[n]['charselector']
if '*' in d:
d = d['*']
errmess('analyzevars: character array "character*%s %s(%s)" is considered as "character %s(%s)"; "intent(c)" is forced.\n'
% (d, n,
','.join(vars[n]['dimension']),
n, ','.join(vars[n]['dimension'] + [d])))
vars[n]['dimension'].append(d)
del vars[n]['charselector']
if 'intent' not in vars[n]:
vars[n]['intent'] = []
if 'c' not in vars[n]['intent']:
vars[n]['intent'].append('c')
else:
errmess(
"analyzevars: charselector=%r unhandled." % (d))
if 'check' not in vars[n] and 'args' in block and n in block['args']:
flag = 'depend' not in vars[n]
if flag:
vars[n]['depend'] = []
vars[n]['check'] = []
if 'dimension' in vars[n]:
#/----< no check
i = -1
ni = len(vars[n]['dimension'])
for d in vars[n]['dimension']:
ddeps = [] # dependecies of 'd'
ad = ''
pd = ''
if d not in vars:
if d in savelindims:
pd, ad = '(', savelindims[d][1]
d = savelindims[d][0]
else:
for r in block['args']:
if r not in vars:
continue
if re.match(r'.*?\b' + r + r'\b', d, re.I):
ddeps.append(r)
if d in vars:
if 'attrspec' in vars[d]:
for aa in vars[d]['attrspec']:
if aa[:6] == 'depend':
ddeps += aa[6:].strip()[1:-1].split(',')
if 'depend' in vars[d]:
ddeps = ddeps + vars[d]['depend']
i = i + 1
if d in vars and ('depend' not in vars[d]) \
and ('=' not in vars[d]) and (d not in vars[n]['depend']) \
and l_or(isintent_in, isintent_inout, isintent_inplace)(vars[n]):
vars[d]['depend'] = [n]
if ni > 1:
vars[d]['='] = '%s%s(%s,%s)%s' % (
pd, shape_macro, n, i, ad)
else:
vars[d]['='] = '%slen(%s)%s' % (pd, n, ad)
# /---< no check
if 1 and 'check' not in vars[d]:
if ni > 1:
vars[d]['check'] = ['%s%s(%s,%i)%s==%s'
% (pd, shape_macro, n, i, ad, d)]
else:
vars[d]['check'] = [
'%slen(%s)%s>=%s' % (pd, n, ad, d)]
if 'attrspec' not in vars[d]:
vars[d]['attrspec'] = ['optional']
if ('optional' not in vars[d]['attrspec']) and\
('required' not in vars[d]['attrspec']):
vars[d]['attrspec'].append('optional')
elif d not in ['*', ':']:
#/----< no check
if flag:
if d in vars:
if n not in ddeps:
vars[n]['depend'].append(d)
else:
vars[n]['depend'] = vars[n]['depend'] + ddeps
elif isstring(vars[n]):
length = '1'
if 'charselector' in vars[n]:
if '*' in vars[n]['charselector']:
length = _eval_length(vars[n]['charselector']['*'],
params)
vars[n]['charselector']['*'] = length
elif 'len' in vars[n]['charselector']:
length = _eval_length(vars[n]['charselector']['len'],
params)
del vars[n]['charselector']['len']
vars[n]['charselector']['*'] = length
if not vars[n]['check']:
del vars[n]['check']
if flag and not vars[n]['depend']:
del vars[n]['depend']
if '=' in vars[n]:
if 'attrspec' not in vars[n]:
vars[n]['attrspec'] = []
if ('optional' not in vars[n]['attrspec']) and \
('required' not in vars[n]['attrspec']):
vars[n]['attrspec'].append('optional')
if 'depend' not in vars[n]:
vars[n]['depend'] = []
for v, m in list(dep_matches.items()):
if m(vars[n]['=']):
vars[n]['depend'].append(v)
if not vars[n]['depend']:
del vars[n]['depend']
if isscalar(vars[n]):
vars[n]['='] = _eval_scalar(vars[n]['='], params)
for n in list(vars.keys()):
if n == block['name']: # n is block name
if 'note' in vars[n]:
block['note'] = vars[n]['note']
if block['block'] == 'function':
if 'result' in block and block['result'] in vars:
vars[n] = appenddecl(vars[n], vars[block['result']])
if 'prefix' in block:
pr = block['prefix']
ispure = 0
isrec = 1
pr1 = pr.replace('pure', '')
ispure = (not pr == pr1)
pr = pr1.replace('recursive', '')
isrec = (not pr == pr1)
m = typespattern[0].match(pr)
if m:
typespec, selector, attr, edecl = cracktypespec0(
m.group('this'), m.group('after'))
kindselect, charselect, typename = cracktypespec(
typespec, selector)
vars[n]['typespec'] = typespec
if kindselect:
if 'kind' in kindselect:
try:
kindselect['kind'] = eval(
kindselect['kind'], {}, params)
except:
pass
vars[n]['kindselector'] = kindselect
if charselect:
vars[n]['charselector'] = charselect
if typename:
vars[n]['typename'] = typename
if ispure:
vars[n] = setattrspec(vars[n], 'pure')
if isrec:
vars[n] = setattrspec(vars[n], 'recursive')
else:
outmess(
'analyzevars: prefix (%s) were not used\n' % repr(block['prefix']))
if not block['block'] in ['module', 'pythonmodule', 'python module', 'block data']:
if 'commonvars' in block:
neededvars = copy.copy(block['args'] + block['commonvars'])
else:
neededvars = copy.copy(block['args'])
for n in list(vars.keys()):
if l_or(isintent_callback, isintent_aux)(vars[n]):
neededvars.append(n)
if 'entry' in block:
neededvars.extend(list(block['entry'].keys()))
for k in list(block['entry'].keys()):
for n in block['entry'][k]:
if n not in neededvars:
neededvars.append(n)
if block['block'] == 'function':
if 'result' in block:
neededvars.append(block['result'])
else:
neededvars.append(block['name'])
if block['block'] in ['subroutine', 'function']:
name = block['name']
if name in vars and 'intent' in vars[name]:
block['intent'] = vars[name]['intent']
if block['block'] == 'type':
neededvars.extend(list(vars.keys()))
for n in list(vars.keys()):
if n not in neededvars:
del vars[n]
return vars
analyzeargs_re_1 = re.compile(r'\A[a-z]+[\w$]*\Z', re.I)
def expr2name(a, block, args=[]):
orig_a = a
a_is_expr = not analyzeargs_re_1.match(a)
if a_is_expr: # `a` is an expression
implicitrules, attrrules = buildimplicitrules(block)
at = determineexprtype(a, block['vars'], implicitrules)
na = 'e_'
for c in a:
c = c.lower()
if c not in string.ascii_lowercase + string.digits:
c = '_'
na = na + c
if na[-1] == '_':
na = na + 'e'
else:
na = na + '_e'
a = na
while a in block['vars'] or a in block['args']:
a = a + 'r'
if a in args:
k = 1
while a + str(k) in args:
k = k + 1
a = a + str(k)
if a_is_expr:
block['vars'][a] = at
else:
if a not in block['vars']:
if orig_a in block['vars']:
block['vars'][a] = block['vars'][orig_a]
else:
block['vars'][a] = {}
if 'externals' in block and orig_a in block['externals'] + block['interfaced']:
block['vars'][a] = setattrspec(block['vars'][a], 'external')
return a
def analyzeargs(block):
setmesstext(block)
implicitrules, attrrules = buildimplicitrules(block)
if 'args' not in block:
block['args'] = []
args = []
for a in block['args']:
a = expr2name(a, block, args)
args.append(a)
block['args'] = args
if 'entry' in block:
for k, args1 in list(block['entry'].items()):
for a in args1:
if a not in block['vars']:
block['vars'][a] = {}
for b in block['body']:
if b['name'] in args:
if 'externals' not in block:
block['externals'] = []
if b['name'] not in block['externals']:
block['externals'].append(b['name'])
if 'result' in block and block['result'] not in block['vars']:
block['vars'][block['result']] = {}
return block
determineexprtype_re_1 = re.compile(r'\A\(.+?[,].+?\)\Z', re.I)
determineexprtype_re_2 = re.compile(r'\A[+-]?\d+(_(P<name>[\w]+)|)\Z', re.I)
determineexprtype_re_3 = re.compile(
r'\A[+-]?[\d.]+[\d+-de.]*(_(P<name>[\w]+)|)\Z', re.I)
determineexprtype_re_4 = re.compile(r'\A\(.*\)\Z', re.I)
determineexprtype_re_5 = re.compile(r'\A(?P<name>\w+)\s*\(.*?\)\s*\Z', re.I)
def _ensure_exprdict(r):
if isinstance(r, int):
return {'typespec': 'integer'}
if isinstance(r, float):
return {'typespec': 'real'}
if isinstance(r, complex):
return {'typespec': 'complex'}
if isinstance(r, dict):
return r
raise AssertionError(repr(r))
def determineexprtype(expr, vars, rules={}):
if expr in vars:
return _ensure_exprdict(vars[expr])
expr = expr.strip()
if determineexprtype_re_1.match(expr):
return {'typespec': 'complex'}
m = determineexprtype_re_2.match(expr)
if m:
if 'name' in m.groupdict() and m.group('name'):
outmess(
'determineexprtype: selected kind types not supported (%s)\n' % repr(expr))
return {'typespec': 'integer'}
m = determineexprtype_re_3.match(expr)
if m:
if 'name' in m.groupdict() and m.group('name'):
outmess(
'determineexprtype: selected kind types not supported (%s)\n' % repr(expr))
return {'typespec': 'real'}
for op in ['+', '-', '*', '/']:
for e in [x.strip() for x in markoutercomma(expr, comma=op).split('@' + op + '@')]:
if e in vars:
return _ensure_exprdict(vars[e])
t = {}
if determineexprtype_re_4.match(expr): # in parenthesis
t = determineexprtype(expr[1:-1], vars, rules)
else:
m = determineexprtype_re_5.match(expr)
if m:
rn = m.group('name')
t = determineexprtype(m.group('name'), vars, rules)
if t and 'attrspec' in t:
del t['attrspec']
if not t:
if rn[0] in rules:
return _ensure_exprdict(rules[rn[0]])
if expr[0] in '\'"':
return {'typespec': 'character', 'charselector': {'*': '*'}}
if not t:
outmess(
'determineexprtype: could not determine expressions (%s) type.\n' % (repr(expr)))
return t
######
def crack2fortrangen(block, tab='\n', as_interface=False):
global skipfuncs, onlyfuncs
setmesstext(block)
ret = ''
if isinstance(block, list):
for g in block:
if g and g['block'] in ['function', 'subroutine']:
if g['name'] in skipfuncs:
continue
if onlyfuncs and g['name'] not in onlyfuncs:
continue
ret = ret + crack2fortrangen(g, tab, as_interface=as_interface)
return ret
prefix = ''
name = ''
args = ''
blocktype = block['block']
if blocktype == 'program':
return ''
argsl = []
if 'name' in block:
name = block['name']
if 'args' in block:
vars = block['vars']
for a in block['args']:
a = expr2name(a, block, argsl)
if not isintent_callback(vars[a]):
argsl.append(a)
if block['block'] == 'function' or argsl:
args = '(%s)' % ','.join(argsl)
f2pyenhancements = ''
if 'f2pyenhancements' in block:
for k in list(block['f2pyenhancements'].keys()):
f2pyenhancements = '%s%s%s %s' % (
f2pyenhancements, tab + tabchar, k, block['f2pyenhancements'][k])
intent_lst = block.get('intent', [])[:]
if blocktype == 'function' and 'callback' in intent_lst:
intent_lst.remove('callback')
if intent_lst:
f2pyenhancements = '%s%sintent(%s) %s' %\
(f2pyenhancements, tab + tabchar,
','.join(intent_lst), name)
use = ''
if 'use' in block:
use = use2fortran(block['use'], tab + tabchar)
common = ''
if 'common' in block:
common = common2fortran(block['common'], tab + tabchar)
if name == 'unknown_interface':
name = ''
result = ''
if 'result' in block:
result = ' result (%s)' % block['result']
if block['result'] not in argsl:
argsl.append(block['result'])
body = crack2fortrangen(block['body'], tab + tabchar)
vars = vars2fortran(
block, block['vars'], argsl, tab + tabchar, as_interface=as_interface)
mess = ''
if 'from' in block and not as_interface:
mess = '! in %s' % block['from']
if 'entry' in block:
entry_stmts = ''
for k, i in list(block['entry'].items()):
entry_stmts = '%s%sentry %s(%s)' \
% (entry_stmts, tab + tabchar, k, ','.join(i))
body = body + entry_stmts
if blocktype == 'block data' and name == '_BLOCK_DATA_':
name = ''
ret = '%s%s%s %s%s%s %s%s%s%s%s%s%send %s %s' % (
tab, prefix, blocktype, name, args, result, mess, f2pyenhancements, use, vars, common, body, tab, blocktype, name)
return ret
def common2fortran(common, tab=''):
ret = ''
for k in list(common.keys()):
if k == '_BLNK_':
ret = '%s%scommon %s' % (ret, tab, ','.join(common[k]))
else:
ret = '%s%scommon /%s/ %s' % (ret, tab, k, ','.join(common[k]))
return ret
def use2fortran(use, tab=''):
ret = ''
for m in list(use.keys()):
ret = '%s%suse %s,' % (ret, tab, m)
if use[m] == {}:
if ret and ret[-1] == ',':
ret = ret[:-1]
continue
if 'only' in use[m] and use[m]['only']:
ret = '%s only:' % (ret)
if 'map' in use[m] and use[m]['map']:
c = ' '
for k in list(use[m]['map'].keys()):
if k == use[m]['map'][k]:
ret = '%s%s%s' % (ret, c, k)
c = ','
else:
ret = '%s%s%s=>%s' % (ret, c, k, use[m]['map'][k])
c = ','
if ret and ret[-1] == ',':
ret = ret[:-1]
return ret
def true_intent_list(var):
lst = var['intent']
ret = []
for intent in lst:
try:
c = eval('isintent_%s(var)' % intent)
except NameError:
c = 0
if c:
ret.append(intent)
return ret
def vars2fortran(block, vars, args, tab='', as_interface=False):
"""
TODO:
public sub
...
"""
setmesstext(block)
ret = ''
nout = []
for a in args:
if a in block['vars']:
nout.append(a)
if 'commonvars' in block:
for a in block['commonvars']:
if a in vars:
if a not in nout:
nout.append(a)
else:
errmess(
'vars2fortran: Confused?!: "%s" is not defined in vars.\n' % a)
if 'varnames' in block:
nout.extend(block['varnames'])
if not as_interface:
for a in list(vars.keys()):
if a not in nout:
nout.append(a)
for a in nout:
if 'depend' in vars[a]:
for d in vars[a]['depend']:
if d in vars and 'depend' in vars[d] and a in vars[d]['depend']:
errmess(
'vars2fortran: Warning: cross-dependence between variables "%s" and "%s"\n' % (a, d))
if 'externals' in block and a in block['externals']:
if isintent_callback(vars[a]):
ret = '%s%sintent(callback) %s' % (ret, tab, a)
ret = '%s%sexternal %s' % (ret, tab, a)
if isoptional(vars[a]):
ret = '%s%soptional %s' % (ret, tab, a)
if a in vars and 'typespec' not in vars[a]:
continue
cont = 1
for b in block['body']:
if a == b['name'] and b['block'] == 'function':
cont = 0
break
if cont:
continue
if a not in vars:
show(vars)
outmess('vars2fortran: No definition for argument "%s".\n' % a)
continue
if a == block['name'] and not block['block'] == 'function':
continue
if 'typespec' not in vars[a]:
if 'attrspec' in vars[a] and 'external' in vars[a]['attrspec']:
if a in args:
ret = '%s%sexternal %s' % (ret, tab, a)
continue
show(vars[a])
outmess('vars2fortran: No typespec for argument "%s".\n' % a)
continue
vardef = vars[a]['typespec']
if vardef == 'type' and 'typename' in vars[a]:
vardef = '%s(%s)' % (vardef, vars[a]['typename'])
selector = {}
if 'kindselector' in vars[a]:
selector = vars[a]['kindselector']
elif 'charselector' in vars[a]:
selector = vars[a]['charselector']
if '*' in selector:
if selector['*'] in ['*', ':']:
vardef = '%s*(%s)' % (vardef, selector['*'])
else:
vardef = '%s*%s' % (vardef, selector['*'])
else:
if 'len' in selector:
vardef = '%s(len=%s' % (vardef, selector['len'])
if 'kind' in selector:
vardef = '%s,kind=%s)' % (vardef, selector['kind'])
else:
vardef = '%s)' % (vardef)
elif 'kind' in selector:
vardef = '%s(kind=%s)' % (vardef, selector['kind'])
c = ' '
if 'attrspec' in vars[a]:
attr = []
for l in vars[a]['attrspec']:
if l not in ['external']:
attr.append(l)
if attr:
vardef = '%s, %s' % (vardef, ','.join(attr))
c = ','
if 'dimension' in vars[a]:
vardef = '%s%sdimension(%s)' % (
vardef, c, ','.join(vars[a]['dimension']))
c = ','
if 'intent' in vars[a]:
lst = true_intent_list(vars[a])
if lst:
vardef = '%s%sintent(%s)' % (vardef, c, ','.join(lst))
c = ','
if 'check' in vars[a]:
vardef = '%s%scheck(%s)' % (vardef, c, ','.join(vars[a]['check']))
c = ','
if 'depend' in vars[a]:
vardef = '%s%sdepend(%s)' % (
vardef, c, ','.join(vars[a]['depend']))
c = ','
if '=' in vars[a]:
v = vars[a]['=']
if vars[a]['typespec'] in ['complex', 'double complex']:
try:
v = eval(v)
v = '(%s,%s)' % (v.real, v.imag)
except:
pass
vardef = '%s :: %s=%s' % (vardef, a, v)
else:
vardef = '%s :: %s' % (vardef, a)
ret = '%s%s%s' % (ret, tab, vardef)
return ret
######
def crackfortran(files):
global usermodules
outmess('Reading fortran codes...\n', 0)
readfortrancode(files, crackline)
outmess('Post-processing...\n', 0)
usermodules = []
postlist = postcrack(grouplist[0])
outmess('Post-processing (stage 2)...\n', 0)
postlist = postcrack2(postlist)
return usermodules + postlist
def crack2fortran(block):
global f2py_version
pyf = crack2fortrangen(block) + '\n'
header = """! -*- f90 -*-
! Note: the context of this file is case sensitive.
"""
footer = """
! This file was auto-generated with f2py (version:%s).
! See http://cens.ioc.ee/projects/f2py2e/
""" % (f2py_version)
return header + pyf + footer
if __name__ == "__main__":
files = []
funcs = []
f = 1
f2 = 0
f3 = 0
showblocklist = 0
for l in sys.argv[1:]:
if l == '':
pass
elif l[0] == ':':
f = 0
elif l == '-quiet':
quiet = 1
verbose = 0
elif l == '-verbose':
verbose = 2
quiet = 0
elif l == '-fix':
if strictf77:
outmess(
'Use option -f90 before -fix if Fortran 90 code is in fix form.\n', 0)
skipemptyends = 1
sourcecodeform = 'fix'
elif l == '-skipemptyends':
skipemptyends = 1
elif l == '--ignore-contains':
ignorecontains = 1
elif l == '-f77':
strictf77 = 1
sourcecodeform = 'fix'
elif l == '-f90':
strictf77 = 0
sourcecodeform = 'free'
skipemptyends = 1
elif l == '-h':
f2 = 1
elif l == '-show':
showblocklist = 1
elif l == '-m':
f3 = 1
elif l[0] == '-':
errmess('Unknown option %s\n' % repr(l))
elif f2:
f2 = 0
pyffilename = l
elif f3:
f3 = 0
f77modulename = l
elif f:
try:
open(l).close()
files.append(l)
except IOError as detail:
errmess('IOError: %s\n' % str(detail))
else:
funcs.append(l)
if not strictf77 and f77modulename and not skipemptyends:
outmess("""\
Warning: You have specifyied module name for non Fortran 77 code
that should not need one (expect if you are scanning F90 code
for non module blocks but then you should use flag -skipemptyends
and also be sure that the files do not contain programs without program statement).
""", 0)
postlist = crackfortran(files, funcs)
if pyffilename:
outmess('Writing fortran code to file %s\n' % repr(pyffilename), 0)
pyf = crack2fortran(postlist)
f = open(pyffilename, 'w')
f.write(pyf)
f.close()
if showblocklist:
show(postlist)
| 38.347592 | 207 | 0.472503 |
from __future__ import division, absolute_import, print_function
import sys
import string
import fileinput
import re
import os
import copy
import platform
from . import __version__
from .auxfuncs import *
f2py_version = __version__.version
strictf77 = 1
sourcecodeform = 'fix' # 'fix','free'
quiet = 0 # Be verbose if 0 (Obsolete: not used any more)
verbose = 1 # Be quiet if 0, extra verbose if > 1.
tabchar = 4 * ' '
pyffilename = ''
f77modulename = ''
skipemptyends = 0 # for old F77 programs without 'program' statement
ignorecontains = 1
dolowercase = 1
debug = []
# Global variables
beginpattern = ''
currentfilename = ''
expectbegin = 1
f90modulevars = {}
filepositiontext = ''
gotnextfile = 1
groupcache = None
groupcounter = 0
grouplist = {groupcounter: []}
groupname = ''
include_paths = []
neededmodule = -1
onlyfuncs = []
previous_context = None
skipblocksuntil = -1
skipfuncs = []
skipfunctions = []
usermodules = []
def reset_global_f2py_vars():
global groupcounter, grouplist, neededmodule, expectbegin
global skipblocksuntil, usermodules, f90modulevars, gotnextfile
global filepositiontext, currentfilename, skipfunctions, skipfuncs
global onlyfuncs, include_paths, previous_context
global strictf77, sourcecodeform, quiet, verbose, tabchar, pyffilename
global f77modulename, skipemptyends, ignorecontains, dolowercase, debug
# flags
strictf77 = 1
sourcecodeform = 'fix'
quiet = 0
verbose = 1
tabchar = 4 * ' '
pyffilename = ''
f77modulename = ''
skipemptyends = 0
ignorecontains = 1
dolowercase = 1
debug = []
# variables
groupcounter = 0
grouplist = {groupcounter: []}
neededmodule = -1
expectbegin = 1
skipblocksuntil = -1
usermodules = []
f90modulevars = {}
gotnextfile = 1
filepositiontext = ''
currentfilename = ''
skipfunctions = []
skipfuncs = []
onlyfuncs = []
include_paths = []
previous_context = None
def outmess(line, flag=1):
global filepositiontext
if not verbose:
return
if not quiet:
if flag:
sys.stdout.write(filepositiontext)
sys.stdout.write(line)
re._MAXCACHE = 50
defaultimplicitrules = {}
for c in "abcdefghopqrstuvwxyz$_":
defaultimplicitrules[c] = {'typespec': 'real'}
for c in "ijklmn":
defaultimplicitrules[c] = {'typespec': 'integer'}
del c
badnames = {}
invbadnames = {}
for n in ['int', 'double', 'float', 'char', 'short', 'long', 'void', 'case', 'while',
'return', 'signed', 'unsigned', 'if', 'for', 'typedef', 'sizeof', 'union',
'struct', 'static', 'register', 'new', 'break', 'do', 'goto', 'switch',
'continue', 'else', 'inline', 'extern', 'delete', 'const', 'auto',
'len', 'rank', 'shape', 'index', 'slen', 'size', '_i',
'max', 'min',
'flen', 'fshape',
'string', 'complex_double', 'float_double', 'stdin', 'stderr', 'stdout',
'type', 'default']:
badnames[n] = n + '_bn'
invbadnames[n + '_bn'] = n
def rmbadname1(name):
if name in badnames:
errmess('rmbadname1: Replacing "%s" with "%s".\n' %
(name, badnames[name]))
return badnames[name]
return name
def rmbadname(names):
return [rmbadname1(_m) for _m in names]
def undo_rmbadname1(name):
if name in invbadnames:
errmess('undo_rmbadname1: Replacing "%s" with "%s".\n'
% (name, invbadnames[name]))
return invbadnames[name]
return name
def undo_rmbadname(names):
return [undo_rmbadname1(_m) for _m in names]
def getextension(name):
i = name.rfind('.')
if i == -1:
return ''
if '\\' in name[i:]:
return ''
if '/' in name[i:]:
return ''
return name[i + 1:]
is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z', re.I).match
_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-', re.I).search
_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-', re.I).search
_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-', re.I).search
_free_f90_start = re.compile(r'[^c*]\s*[^\s\d\t]', re.I).match
def is_free_format(file):
# f90 allows both fixed and free format, assuming fixed unless
# signs of free format are detected.
result = 0
f = open(file, 'r')
line = f.readline()
n = 15 # the number of non-comment lines to scan for hints
if _has_f_header(line):
n = 0
elif _has_f90_header(line):
n = 0
result = 1
while n > 0 and line:
if line[0] != '!' and line.strip():
n -= 1
if (line[0] != '\t' and _free_f90_start(line[:5])) or line[-2:-1] == '&':
result = 1
break
line = f.readline()
f.close()
return result
# Read fortran (77,90) code
def readfortrancode(ffile, dowithline=show, istop=1):
global gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77
global beginpattern, quiet, verbose, dolowercase, include_paths
if not istop:
saveglobals = gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77,\
beginpattern, quiet, verbose, dolowercase
if ffile == []:
return
localdolowercase = dolowercase
cont = 0
finalline = ''
ll = ''
commentline = re.compile(
r'(?P<line>([^"]*["][^"]*["][^"!]*|[^\']*\'[^\']*\'[^\'!]*|[^!\'"]*))!{1}(?P<rest>.*)')
includeline = re.compile(
r'\s*include\s*(\'|")(?P<name>[^\'"]*)(\'|")', re.I)
cont1 = re.compile(r'(?P<line>.*)&\s*\Z')
cont2 = re.compile(r'(\s*&|)(?P<line>.*)')
mline_mark = re.compile(r".*?'''")
if istop:
dowithline('', -1)
ll, l1 = '', ''
spacedigits = [' '] + [str(_m) for _m in range(10)]
filepositiontext = ''
fin = fileinput.FileInput(ffile)
while True:
l = fin.readline()
if not l:
break
if fin.isfirstline():
filepositiontext = ''
currentfilename = fin.filename()
gotnextfile = 1
l1 = l
strictf77 = 0
sourcecodeform = 'fix'
ext = os.path.splitext(currentfilename)[1]
if is_f_file(currentfilename) and \
not (_has_f90_header(l) or _has_fix_header(l)):
strictf77 = 1
elif is_free_format(currentfilename) and not _has_fix_header(l):
sourcecodeform = 'free'
if strictf77:
beginpattern = beginpattern77
else:
beginpattern = beginpattern90
outmess('\tReading file %s (format:%s%s)\n'
% (repr(currentfilename), sourcecodeform,
strictf77 and ',strict' or ''))
l = l.expandtabs().replace('\xa0', ' ')
# Get rid of newline characters
while not l == '':
if l[-1] not in "\n\r\f":
break
l = l[:-1]
if not strictf77:
r = commentline.match(l)
if r:
l = r.group('line') + ' ' # Strip comments starting with `!'
rl = r.group('rest')
if rl[:4].lower() == 'f2py': # f2py directive
l = l + 4 * ' '
r = commentline.match(rl[4:])
if r:
l = l + r.group('line')
else:
l = l + rl[4:]
if l.strip() == '': # Skip empty line
cont = 0
continue
if sourcecodeform == 'fix':
if l[0] in ['*', 'c', '!', 'C', '#']:
if l[1:5].lower() == 'f2py': # f2py directive
l = ' ' + l[5:]
else: # Skip comment line
cont = 0
continue
elif strictf77:
if len(l) > 72:
l = l[:72]
if not (l[0] in spacedigits):
raise Exception('readfortrancode: Found non-(space,digit) char '
'in the first column.\n\tAre you sure that '
'this code is in fix form?\n\tline=%s' % repr(l))
if (not cont or strictf77) and (len(l) > 5 and not l[5] == ' '):
# Continuation of a previous line
ll = ll + l[6:]
finalline = ''
origfinalline = ''
else:
if not strictf77:
# F90 continuation
r = cont1.match(l)
if r:
l = r.group('line') # Continuation follows ..
if cont:
ll = ll + cont2.match(l).group('line')
finalline = ''
origfinalline = ''
else:
# clean up line beginning from possible digits.
l = ' ' + l[5:]
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
ll = l
cont = (r is not None)
else:
# clean up line beginning from possible digits.
l = ' ' + l[5:]
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
ll = l
elif sourcecodeform == 'free':
if not cont and ext == '.pyf' and mline_mark.match(l):
l = l + '\n'
while True:
lc = fin.readline()
if not lc:
errmess(
'Unexpected end of file when reading multiline\n')
break
l = l + lc
if mline_mark.match(lc):
break
l = l.rstrip()
r = cont1.match(l)
if r:
l = r.group('line') # Continuation follows ..
if cont:
ll = ll + cont2.match(l).group('line')
finalline = ''
origfinalline = ''
else:
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
ll = l
cont = (r is not None)
else:
raise ValueError(
"Flag sourcecodeform must be either 'fix' or 'free': %s" % repr(sourcecodeform))
filepositiontext = 'Line #%d in %s:"%s"\n\t' % (
fin.filelineno() - 1, currentfilename, l1)
m = includeline.match(origfinalline)
if m:
fn = m.group('name')
if os.path.isfile(fn):
readfortrancode(fn, dowithline=dowithline, istop=0)
else:
include_dirs = [
os.path.dirname(currentfilename)] + include_paths
foundfile = 0
for inc_dir in include_dirs:
fn1 = os.path.join(inc_dir, fn)
if os.path.isfile(fn1):
foundfile = 1
readfortrancode(fn1, dowithline=dowithline, istop=0)
break
if not foundfile:
outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n' % (
repr(fn), os.pathsep.join(include_dirs)))
else:
dowithline(finalline)
l1 = ll
if localdolowercase:
finalline = ll.lower()
else:
finalline = ll
origfinalline = ll
filepositiontext = 'Line #%d in %s:"%s"\n\t' % (
fin.filelineno() - 1, currentfilename, l1)
m = includeline.match(origfinalline)
if m:
fn = m.group('name')
if os.path.isfile(fn):
readfortrancode(fn, dowithline=dowithline, istop=0)
else:
include_dirs = [os.path.dirname(currentfilename)] + include_paths
foundfile = 0
for inc_dir in include_dirs:
fn1 = os.path.join(inc_dir, fn)
if os.path.isfile(fn1):
foundfile = 1
readfortrancode(fn1, dowithline=dowithline, istop=0)
break
if not foundfile:
outmess('readfortrancode: could not find include file %s in %s. Ignoring.\n' % (
repr(fn), os.pathsep.join(include_dirs)))
else:
dowithline(finalline)
filepositiontext = ''
fin.close()
if istop:
dowithline('', 1)
else:
gotnextfile, filepositiontext, currentfilename, sourcecodeform, strictf77,\
beginpattern, quiet, verbose, dolowercase = saveglobals
# Crack line
beforethisafter = r'\s*(?P<before>%s(?=\s*(\b(%s)\b)))' + \
r'\s*(?P<this>(\b(%s)\b))' + \
r'\s*(?P<after>%s)\s*\Z'
##
fortrantypes = r'character|logical|integer|real|complex|double\s*(precision\s*(complex|)|complex)|type(?=\s*\([\w\s,=(*)]*\))|byte'
typespattern = re.compile(
beforethisafter % ('', fortrantypes, fortrantypes, '.*'), re.I), 'type'
typespattern4implicit = re.compile(beforethisafter % (
'', fortrantypes + '|static|automatic|undefined', fortrantypes + '|static|automatic|undefined', '.*'), re.I)
#
functionpattern = re.compile(beforethisafter % (
r'([a-z]+[\w\s(=*+-/)]*?|)', 'function', 'function', '.*'), re.I), 'begin'
subroutinepattern = re.compile(beforethisafter % (
r'[a-z\s]*?', 'subroutine', 'subroutine', '.*'), re.I), 'begin'
# modulepattern=re.compile(beforethisafter%('[a-z\s]*?','module','module','.*'),re.I),'begin'
#
groupbegins77 = r'program|block\s*data'
beginpattern77 = re.compile(
beforethisafter % ('', groupbegins77, groupbegins77, '.*'), re.I), 'begin'
groupbegins90 = groupbegins77 + \
r'|module(?!\s*procedure)|python\s*module|interface|type(?!\s*\()'
beginpattern90 = re.compile(
beforethisafter % ('', groupbegins90, groupbegins90, '.*'), re.I), 'begin'
groupends = r'end|endprogram|endblockdata|endmodule|endpythonmodule|endinterface'
endpattern = re.compile(
beforethisafter % ('', groupends, groupends, r'[\w\s]*'), re.I), 'end'
# endifs='end\s*(if|do|where|select|while|forall)'
endifs = r'(end\s*(if|do|where|select|while|forall))|(module\s*procedure)'
endifpattern = re.compile(
beforethisafter % (r'[\w]*?', endifs, endifs, r'[\w\s]*'), re.I), 'endif'
#
implicitpattern = re.compile(
beforethisafter % ('', 'implicit', 'implicit', '.*'), re.I), 'implicit'
dimensionpattern = re.compile(beforethisafter % (
'', 'dimension|virtual', 'dimension|virtual', '.*'), re.I), 'dimension'
externalpattern = re.compile(
beforethisafter % ('', 'external', 'external', '.*'), re.I), 'external'
optionalpattern = re.compile(
beforethisafter % ('', 'optional', 'optional', '.*'), re.I), 'optional'
requiredpattern = re.compile(
beforethisafter % ('', 'required', 'required', '.*'), re.I), 'required'
publicpattern = re.compile(
beforethisafter % ('', 'public', 'public', '.*'), re.I), 'public'
privatepattern = re.compile(
beforethisafter % ('', 'private', 'private', '.*'), re.I), 'private'
intrisicpattern = re.compile(
beforethisafter % ('', 'intrisic', 'intrisic', '.*'), re.I), 'intrisic'
intentpattern = re.compile(beforethisafter % (
'', 'intent|depend|note|check', 'intent|depend|note|check', r'\s*\(.*?\).*'), re.I), 'intent'
parameterpattern = re.compile(
beforethisafter % ('', 'parameter', 'parameter', r'\s*\(.*'), re.I), 'parameter'
datapattern = re.compile(
beforethisafter % ('', 'data', 'data', '.*'), re.I), 'data'
callpattern = re.compile(
beforethisafter % ('', 'call', 'call', '.*'), re.I), 'call'
entrypattern = re.compile(
beforethisafter % ('', 'entry', 'entry', '.*'), re.I), 'entry'
callfunpattern = re.compile(
beforethisafter % ('', 'callfun', 'callfun', '.*'), re.I), 'callfun'
commonpattern = re.compile(
beforethisafter % ('', 'common', 'common', '.*'), re.I), 'common'
usepattern = re.compile(
beforethisafter % ('', 'use', 'use', '.*'), re.I), 'use'
containspattern = re.compile(
beforethisafter % ('', 'contains', 'contains', ''), re.I), 'contains'
formatpattern = re.compile(
beforethisafter % ('', 'format', 'format', '.*'), re.I), 'format'
# Non-fortran and f2py-specific statements
f2pyenhancementspattern = re.compile(beforethisafter % ('', 'threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef',
'threadsafe|fortranname|callstatement|callprotoargument|usercode|pymethoddef', '.*'), re.I | re.S), 'f2pyenhancements'
multilinepattern = re.compile(
r"\s*(?P<before>''')(?P<this>.*?)(?P<after>''')\s*\Z", re.S), 'multiline'
##
def _simplifyargs(argsline):
a = []
for n in markoutercomma(argsline).split('@,@'):
for r in '(),':
n = n.replace(r, '_')
a.append(n)
return ','.join(a)
crackline_re_1 = re.compile(r'\s*(?P<result>\b[a-z]+[\w]*\b)\s*[=].*', re.I)
def crackline(line, reset=0):
global beginpattern, groupcounter, groupname, groupcache, grouplist
global filepositiontext, currentfilename, neededmodule, expectbegin
global skipblocksuntil, skipemptyends, previous_context, gotnextfile
if ';' in line and not (f2pyenhancementspattern[0].match(line) or
multilinepattern[0].match(line)):
for l in line.split(';'):
# XXX: non-zero reset values need testing
assert reset == 0, repr(reset)
crackline(l, reset)
return
if reset < 0:
groupcounter = 0
groupname = {groupcounter: ''}
groupcache = {groupcounter: {}}
grouplist = {groupcounter: []}
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['vars'] = {}
groupcache[groupcounter]['block'] = ''
groupcache[groupcounter]['name'] = ''
neededmodule = -1
skipblocksuntil = -1
return
if reset > 0:
fl = 0
if f77modulename and neededmodule == groupcounter:
fl = 2
while groupcounter > fl:
outmess('crackline: groupcounter=%s groupname=%s\n' %
(repr(groupcounter), repr(groupname)))
outmess(
'crackline: Mismatch of blocks encountered. Trying to fix it by assuming "end" statement.\n')
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1
if f77modulename and neededmodule == groupcounter:
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end interface
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end module
neededmodule = -1
return
if line == '':
return
flag = 0
for pat in [dimensionpattern, externalpattern, intentpattern, optionalpattern,
requiredpattern,
parameterpattern, datapattern, publicpattern, privatepattern,
intrisicpattern,
endifpattern, endpattern,
formatpattern,
beginpattern, functionpattern, subroutinepattern,
implicitpattern, typespattern, commonpattern,
callpattern, usepattern, containspattern,
entrypattern,
f2pyenhancementspattern,
multilinepattern
]:
m = pat[0].match(line)
if m:
break
flag = flag + 1
if not m:
re_1 = crackline_re_1
if 0 <= skipblocksuntil <= groupcounter:
return
if 'externals' in groupcache[groupcounter]:
for name in groupcache[groupcounter]['externals']:
if name in invbadnames:
name = invbadnames[name]
if 'interfaced' in groupcache[groupcounter] and name in groupcache[groupcounter]['interfaced']:
continue
m1 = re.match(
r'(?P<before>[^"]*)\b%s\b\s*@\(@(?P<args>[^@]*)@\)@.*\Z' % name, markouterparen(line), re.I)
if m1:
m2 = re_1.match(m1.group('before'))
a = _simplifyargs(m1.group('args'))
if m2:
line = 'callfun %s(%s) result (%s)' % (
name, a, m2.group('result'))
else:
line = 'callfun %s(%s)' % (name, a)
m = callfunpattern[0].match(line)
if not m:
outmess(
'crackline: could not resolve function call for line=%s.\n' % repr(line))
return
analyzeline(m, 'callfun', line)
return
if verbose > 1 or (verbose == 1 and currentfilename.lower().endswith('.pyf')):
previous_context = None
outmess('crackline:%d: No pattern for line\n' % (groupcounter))
return
elif pat[1] == 'end':
if 0 <= skipblocksuntil < groupcounter:
groupcounter = groupcounter - 1
if skipblocksuntil <= groupcounter:
return
if groupcounter <= 0:
raise Exception('crackline: groupcounter(=%s) is nonpositive. '
'Check the blocks.'
% (groupcounter))
m1 = beginpattern[0].match((line))
if (m1) and (not m1.group('this') == groupname[groupcounter]):
raise Exception('crackline: End group %s does not match with '
'previous Begin group %s\n\t%s' %
(repr(m1.group('this')), repr(groupname[groupcounter]),
filepositiontext)
)
if skipblocksuntil == groupcounter:
skipblocksuntil = -1
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1
if not skipemptyends:
expectbegin = 1
elif pat[1] == 'begin':
if 0 <= skipblocksuntil <= groupcounter:
groupcounter = groupcounter + 1
return
gotnextfile = 0
analyzeline(m, pat[1], line)
expectbegin = 0
elif pat[1] == 'endif':
pass
elif pat[1] == 'contains':
if ignorecontains:
return
if 0 <= skipblocksuntil <= groupcounter:
return
skipblocksuntil = groupcounter
else:
if 0 <= skipblocksuntil <= groupcounter:
return
analyzeline(m, pat[1], line)
def markouterparen(line):
l = ''
f = 0
for c in line:
if c == '(':
f = f + 1
if f == 1:
l = l + '@(@'
continue
elif c == ')':
f = f - 1
if f == 0:
l = l + '@)@'
continue
l = l + c
return l
def markoutercomma(line, comma=','):
l = ''
f = 0
cc = ''
for c in line:
if (not cc or cc == ')') and c == '(':
f = f + 1
cc = ')'
elif not cc and c == '\'' and (not l or l[-1] != '\\'):
f = f + 1
cc = '\''
elif c == cc:
f = f - 1
if f == 0:
cc = ''
elif c == comma and f == 0:
l = l + '@' + comma + '@'
continue
l = l + c
assert not f, repr((f, line, l, cc))
return l
def unmarkouterparen(line):
r = line.replace('@(@', '(').replace('@)@', ')')
return r
def appenddecl(decl, decl2, force=1):
if not decl:
decl = {}
if not decl2:
return decl
if decl is decl2:
return decl
for k in list(decl2.keys()):
if k == 'typespec':
if force or k not in decl:
decl[k] = decl2[k]
elif k == 'attrspec':
for l in decl2[k]:
decl = setattrspec(decl, l, force)
elif k == 'kindselector':
decl = setkindselector(decl, decl2[k], force)
elif k == 'charselector':
decl = setcharselector(decl, decl2[k], force)
elif k in ['=', 'typename']:
if force or k not in decl:
decl[k] = decl2[k]
elif k == 'note':
pass
elif k in ['intent', 'check', 'dimension', 'optional', 'required']:
errmess('appenddecl: "%s" not implemented.\n' % k)
else:
raise Exception('appenddecl: Unknown variable definition key:' +
str(k))
return decl
selectpattern = re.compile(
r'\s*(?P<this>(@\(@.*?@\)@|[*][\d*]+|[*]\s*@\(@.*?@\)@|))(?P<after>.*)\Z', re.I)
nameargspattern = re.compile(
r'\s*(?P<name>\b[\w$]+\b)\s*(@\(@\s*(?P<args>[\w\s,]*)\s*@\)@|)\s*((result(\s*@\(@\s*(?P<result>\b[\w$]+\b)\s*@\)@|))|(bind\s*@\(@\s*(?P<bind>.*)\s*@\)@))*\s*\Z', re.I)
callnameargspattern = re.compile(
r'\s*(?P<name>\b[\w$]+\b)\s*@\(@\s*(?P<args>.*)\s*@\)@\s*\Z', re.I)
real16pattern = re.compile(
r'([-+]?(?:\d+(?:\.\d*)?|\d*\.\d+))[dD]((?:[-+]?\d+)?)')
real8pattern = re.compile(
r'([-+]?((?:\d+(?:\.\d*)?|\d*\.\d+))[eE]((?:[-+]?\d+)?)|(\d+\.\d*))')
_intentcallbackpattern = re.compile(r'intent\s*\(.*?\bcallback\b', re.I)
def _is_intent_callback(vdecl):
for a in vdecl.get('attrspec', []):
if _intentcallbackpattern.match(a):
return 1
return 0
def _resolvenameargspattern(line):
line = markouterparen(line)
m1 = nameargspattern.match(line)
if m1:
return m1.group('name'), m1.group('args'), m1.group('result'), m1.group('bind')
m1 = callnameargspattern.match(line)
if m1:
return m1.group('name'), m1.group('args'), None, None
return None, [], None, None
def analyzeline(m, case, line):
global groupcounter, groupname, groupcache, grouplist, filepositiontext
global currentfilename, f77modulename, neededinterface, neededmodule
global expectbegin, gotnextfile, previous_context
block = m.group('this')
if case != 'multiline':
previous_context = None
if expectbegin and case not in ['begin', 'call', 'callfun', 'type'] \
and not skipemptyends and groupcounter < 1:
newname = os.path.basename(currentfilename).split('.')[0]
outmess(
'analyzeline: no group yet. Creating program group with name "%s".\n' % newname)
gotnextfile = 0
groupcounter = groupcounter + 1
groupname[groupcounter] = 'program'
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['vars'] = {}
groupcache[groupcounter]['block'] = 'program'
groupcache[groupcounter]['name'] = newname
groupcache[groupcounter]['from'] = 'fromsky'
expectbegin = 0
if case in ['begin', 'call', 'callfun']:
# Crack line => block,name,args,result
block = block.lower()
if re.match(r'block\s*data', block, re.I):
block = 'block data'
if re.match(r'python\s*module', block, re.I):
block = 'python module'
name, args, result, bind = _resolvenameargspattern(m.group('after'))
if name is None:
if block == 'block data':
name = '_BLOCK_DATA_'
else:
name = ''
if block not in ['interface', 'block data']:
outmess('analyzeline: No name/args pattern found for line.\n')
previous_context = (block, name, groupcounter)
if args:
args = rmbadname([x.strip()
for x in markoutercomma(args).split('@,@')])
else:
args = []
if '' in args:
while '' in args:
args.remove('')
outmess(
'analyzeline: argument list is malformed (missing argument).\n')
# end of crack line => block,name,args,result
needmodule = 0
needinterface = 0
if case in ['call', 'callfun']:
needinterface = 1
if 'args' not in groupcache[groupcounter]:
return
if name not in groupcache[groupcounter]['args']:
return
for it in grouplist[groupcounter]:
if it['name'] == name:
return
if name in groupcache[groupcounter]['interfaced']:
return
block = {'call': 'subroutine', 'callfun': 'function'}[case]
if f77modulename and neededmodule == -1 and groupcounter <= 1:
neededmodule = groupcounter + 2
needmodule = 1
if block != 'interface':
needinterface = 1
# Create new block(s)
groupcounter = groupcounter + 1
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
if needmodule:
if verbose > 1:
outmess('analyzeline: Creating module block %s\n' %
repr(f77modulename), 0)
groupname[groupcounter] = 'module'
groupcache[groupcounter]['block'] = 'python module'
groupcache[groupcounter]['name'] = f77modulename
groupcache[groupcounter]['from'] = ''
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['interfaced'] = []
groupcache[groupcounter]['vars'] = {}
groupcounter = groupcounter + 1
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
if needinterface:
if verbose > 1:
outmess('analyzeline: Creating additional interface block (groupcounter=%s).\n' % (
groupcounter), 0)
groupname[groupcounter] = 'interface'
groupcache[groupcounter]['block'] = 'interface'
groupcache[groupcounter]['name'] = 'unknown_interface'
groupcache[groupcounter]['from'] = '%s:%s' % (
groupcache[groupcounter - 1]['from'], groupcache[groupcounter - 1]['name'])
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['interfaced'] = []
groupcache[groupcounter]['vars'] = {}
groupcounter = groupcounter + 1
groupcache[groupcounter] = {}
grouplist[groupcounter] = []
groupname[groupcounter] = block
groupcache[groupcounter]['block'] = block
if not name:
name = 'unknown_' + block
groupcache[groupcounter]['prefix'] = m.group('before')
groupcache[groupcounter]['name'] = rmbadname1(name)
groupcache[groupcounter]['result'] = result
if groupcounter == 1:
groupcache[groupcounter]['from'] = currentfilename
else:
if f77modulename and groupcounter == 3:
groupcache[groupcounter]['from'] = '%s:%s' % (
groupcache[groupcounter - 1]['from'], currentfilename)
else:
groupcache[groupcounter]['from'] = '%s:%s' % (
groupcache[groupcounter - 1]['from'], groupcache[groupcounter - 1]['name'])
for k in list(groupcache[groupcounter].keys()):
if not groupcache[groupcounter][k]:
del groupcache[groupcounter][k]
groupcache[groupcounter]['args'] = args
groupcache[groupcounter]['body'] = []
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['interfaced'] = []
groupcache[groupcounter]['vars'] = {}
groupcache[groupcounter]['entry'] = {}
# end of creation
if block == 'type':
groupcache[groupcounter]['varnames'] = []
if case in ['call', 'callfun']: # set parents variables
if name not in groupcache[groupcounter - 2]['externals']:
groupcache[groupcounter - 2]['externals'].append(name)
groupcache[groupcounter]['vars'] = copy.deepcopy(
groupcache[groupcounter - 2]['vars'])
try:
del groupcache[groupcounter]['vars'][name][
groupcache[groupcounter]['vars'][name]['attrspec'].index('external')]
except:
pass
if block in ['function', 'subroutine']: # set global attributes
try:
groupcache[groupcounter]['vars'][name] = appenddecl(
groupcache[groupcounter]['vars'][name], groupcache[groupcounter - 2]['vars'][''])
except:
pass
if case == 'callfun': # return type
if result and result in groupcache[groupcounter]['vars']:
if not name == result:
groupcache[groupcounter]['vars'][name] = appenddecl(
groupcache[groupcounter]['vars'][name], groupcache[groupcounter]['vars'][result])
# if groupcounter>1: # name is interfaced
try:
groupcache[groupcounter - 2]['interfaced'].append(name)
except:
pass
if block == 'function':
t = typespattern[0].match(m.group('before') + ' ' + name)
if t:
typespec, selector, attr, edecl = cracktypespec0(
t.group('this'), t.group('after'))
updatevars(typespec, selector, attr, edecl)
if case in ['call', 'callfun']:
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end routine
grouplist[groupcounter - 1].append(groupcache[groupcounter])
grouplist[groupcounter - 1][-1]['body'] = grouplist[groupcounter]
del grouplist[groupcounter]
groupcounter = groupcounter - 1 # end interface
elif case == 'entry':
name, args, result, bind = _resolvenameargspattern(m.group('after'))
if name is not None:
if args:
args = rmbadname([x.strip()
for x in markoutercomma(args).split('@,@')])
else:
args = []
assert result is None, repr(result)
groupcache[groupcounter]['entry'][name] = args
previous_context = ('entry', name, groupcounter)
elif case == 'type':
typespec, selector, attr, edecl = cracktypespec0(
block, m.group('after'))
last_name = updatevars(typespec, selector, attr, edecl)
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case in ['dimension', 'intent', 'optional', 'required', 'external', 'public', 'private', 'intrisic']:
edecl = groupcache[groupcounter]['vars']
ll = m.group('after').strip()
i = ll.find('::')
if i < 0 and case == 'intent':
i = markouterparen(ll).find('@)@') - 2
ll = ll[:i + 1] + '::' + ll[i + 1:]
i = ll.find('::')
if ll[i:] == '::' and 'args' in groupcache[groupcounter]:
outmess('All arguments will have attribute %s%s\n' %
(m.group('this'), ll[:i]))
ll = ll + ','.join(groupcache[groupcounter]['args'])
if i < 0:
i = 0
pl = ''
else:
pl = ll[:i].strip()
ll = ll[i + 2:]
ch = markoutercomma(pl).split('@,@')
if len(ch) > 1:
pl = ch[0]
outmess('analyzeline: cannot handle multiple attributes without type specification. Ignoring %r.\n' % (
','.join(ch[1:])))
last_name = None
for e in [x.strip() for x in markoutercomma(ll).split('@,@')]:
m1 = namepattern.match(e)
if not m1:
if case in ['public', 'private']:
k = ''
else:
print(m.groupdict())
outmess('analyzeline: no name pattern found in %s statement for %s. Skipping.\n' % (
case, repr(e)))
continue
else:
k = rmbadname1(m1.group('name'))
if k not in edecl:
edecl[k] = {}
if case == 'dimension':
ap = case + m1.group('after')
if case == 'intent':
ap = m.group('this') + pl
if _intentcallbackpattern.match(ap):
if k not in groupcache[groupcounter]['args']:
if groupcounter > 1:
if '__user__' not in groupcache[groupcounter - 2]['name']:
outmess(
'analyzeline: missing __user__ module (could be nothing)\n')
# fixes ticket 1693
if k != groupcache[groupcounter]['name']:
outmess('analyzeline: appending intent(callback) %s'
' to %s arguments\n' % (k, groupcache[groupcounter]['name']))
groupcache[groupcounter]['args'].append(k)
else:
errmess(
'analyzeline: intent(callback) %s is ignored' % (k))
else:
errmess('analyzeline: intent(callback) %s is already'
' in argument list' % (k))
if case in ['optional', 'required', 'public', 'external', 'private', 'intrisic']:
ap = case
if 'attrspec' in edecl[k]:
edecl[k]['attrspec'].append(ap)
else:
edecl[k]['attrspec'] = [ap]
if case == 'external':
if groupcache[groupcounter]['block'] == 'program':
outmess('analyzeline: ignoring program arguments\n')
continue
if k not in groupcache[groupcounter]['args']:
continue
if 'externals' not in groupcache[groupcounter]:
groupcache[groupcounter]['externals'] = []
groupcache[groupcounter]['externals'].append(k)
last_name = k
groupcache[groupcounter]['vars'] = edecl
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case == 'parameter':
edecl = groupcache[groupcounter]['vars']
ll = m.group('after').strip()[1:-1]
last_name = None
for e in markoutercomma(ll).split('@,@'):
try:
k, initexpr = [x.strip() for x in e.split('=')]
except:
outmess(
'analyzeline: could not extract name,expr in parameter statement "%s" of "%s"\n' % (e, ll))
continue
params = get_parameters(edecl)
k = rmbadname1(k)
if k not in edecl:
edecl[k] = {}
if '=' in edecl[k] and (not edecl[k]['='] == initexpr):
outmess('analyzeline: Overwriting the value of parameter "%s" ("%s") with "%s".\n' % (
k, edecl[k]['='], initexpr))
t = determineexprtype(initexpr, params)
if t:
if t.get('typespec') == 'real':
tt = list(initexpr)
for m in real16pattern.finditer(initexpr):
tt[m.start():m.end()] = list(
initexpr[m.start():m.end()].lower().replace('d', 'e'))
initexpr = ''.join(tt)
elif t.get('typespec') == 'complex':
initexpr = initexpr[1:].lower().replace('d', 'e').\
replace(',', '+1j*(')
try:
v = eval(initexpr, {}, params)
except (SyntaxError, NameError, TypeError) as msg:
errmess('analyzeline: Failed to evaluate %r. Ignoring: %s\n'
% (initexpr, msg))
continue
edecl[k]['='] = repr(v)
if 'attrspec' in edecl[k]:
edecl[k]['attrspec'].append('parameter')
else:
edecl[k]['attrspec'] = ['parameter']
last_name = k
groupcache[groupcounter]['vars'] = edecl
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case == 'implicit':
if m.group('after').strip().lower() == 'none':
groupcache[groupcounter]['implicit'] = None
elif m.group('after'):
if 'implicit' in groupcache[groupcounter]:
impl = groupcache[groupcounter]['implicit']
else:
impl = {}
if impl is None:
outmess(
'analyzeline: Overwriting earlier "implicit none" statement.\n')
impl = {}
for e in markoutercomma(m.group('after')).split('@,@'):
decl = {}
m1 = re.match(
r'\s*(?P<this>.*?)\s*(\(\s*(?P<after>[a-z-, ]+)\s*\)\s*|)\Z', e, re.I)
if not m1:
outmess(
'analyzeline: could not extract info of implicit statement part "%s"\n' % (e))
continue
m2 = typespattern4implicit.match(m1.group('this'))
if not m2:
outmess(
'analyzeline: could not extract types pattern of implicit statement part "%s"\n' % (e))
continue
typespec, selector, attr, edecl = cracktypespec0(
m2.group('this'), m2.group('after'))
kindselect, charselect, typename = cracktypespec(
typespec, selector)
decl['typespec'] = typespec
decl['kindselector'] = kindselect
decl['charselector'] = charselect
decl['typename'] = typename
for k in list(decl.keys()):
if not decl[k]:
del decl[k]
for r in markoutercomma(m1.group('after')).split('@,@'):
if '-' in r:
try:
begc, endc = [x.strip() for x in r.split('-')]
except:
outmess(
'analyzeline: expected "<char>-<char>" instead of "%s" in range list of implicit statement\n' % r)
continue
else:
begc = endc = r.strip()
if not len(begc) == len(endc) == 1:
outmess(
'analyzeline: expected "<char>-<char>" instead of "%s" in range list of implicit statement (2)\n' % r)
continue
for o in range(ord(begc), ord(endc) + 1):
impl[chr(o)] = decl
groupcache[groupcounter]['implicit'] = impl
elif case == 'data':
ll = []
dl = ''
il = ''
f = 0
fc = 1
inp = 0
for c in m.group('after'):
if not inp:
if c == "'":
fc = not fc
if c == '/' and fc:
f = f + 1
continue
if c == '(':
inp = inp + 1
elif c == ')':
inp = inp - 1
if f == 0:
dl = dl + c
elif f == 1:
il = il + c
elif f == 2:
dl = dl.strip()
if dl.startswith(','):
dl = dl[1:].strip()
ll.append([dl, il])
dl = c
il = ''
f = 0
if f == 2:
dl = dl.strip()
if dl.startswith(','):
dl = dl[1:].strip()
ll.append([dl, il])
vars = {}
if 'vars' in groupcache[groupcounter]:
vars = groupcache[groupcounter]['vars']
last_name = None
for l in ll:
l = [x.strip() for x in l]
if l[0][0] == ',':
l[0] = l[0][1:]
if l[0][0] == '(':
outmess(
'analyzeline: implied-DO list "%s" is not supported. Skipping.\n' % l[0])
continue
i = 0
j = 0
llen = len(l[1])
for v in rmbadname([x.strip() for x in markoutercomma(l[0]).split('@,@')]):
if v[0] == '(':
outmess(
'analyzeline: implied-DO list "%s" is not supported. Skipping.\n' % v)
# XXX: subsequent init expressions may get wrong values.
# Ignoring since data statements are irrelevant for
# wrapping.
continue
fc = 0
while (i < llen) and (fc or not l[1][i] == ','):
if l[1][i] == "'":
fc = not fc
i = i + 1
i = i + 1
if v not in vars:
vars[v] = {}
if '=' in vars[v] and not vars[v]['='] == l[1][j:i - 1]:
outmess('analyzeline: changing init expression of "%s" ("%s") to "%s"\n' % (
v, vars[v]['='], l[1][j:i - 1]))
vars[v]['='] = l[1][j:i - 1]
j = i
last_name = v
groupcache[groupcounter]['vars'] = vars
if last_name is not None:
previous_context = ('variable', last_name, groupcounter)
elif case == 'common':
line = m.group('after').strip()
if not line[0] == '/':
line = '//' + line
cl = []
f = 0
bn = ''
ol = ''
for c in line:
if c == '/':
f = f + 1
continue
if f >= 3:
bn = bn.strip()
if not bn:
bn = '_BLNK_'
cl.append([bn, ol])
f = f - 2
bn = ''
ol = ''
if f % 2:
bn = bn + c
else:
ol = ol + c
bn = bn.strip()
if not bn:
bn = '_BLNK_'
cl.append([bn, ol])
commonkey = {}
if 'common' in groupcache[groupcounter]:
commonkey = groupcache[groupcounter]['common']
for c in cl:
if c[0] not in commonkey:
commonkey[c[0]] = []
for i in [x.strip() for x in markoutercomma(c[1]).split('@,@')]:
if i:
commonkey[c[0]].append(i)
groupcache[groupcounter]['common'] = commonkey
previous_context = ('common', bn, groupcounter)
elif case == 'use':
m1 = re.match(
r'\A\s*(?P<name>\b[\w]+\b)\s*((,(\s*\bonly\b\s*:|(?P<notonly>))\s*(?P<list>.*))|)\s*\Z', m.group('after'), re.I)
if m1:
mm = m1.groupdict()
if 'use' not in groupcache[groupcounter]:
groupcache[groupcounter]['use'] = {}
name = m1.group('name')
groupcache[groupcounter]['use'][name] = {}
isonly = 0
if 'list' in mm and mm['list'] is not None:
if 'notonly' in mm and mm['notonly'] is None:
isonly = 1
groupcache[groupcounter]['use'][name]['only'] = isonly
ll = [x.strip() for x in mm['list'].split(',')]
rl = {}
for l in ll:
if '=' in l:
m2 = re.match(
r'\A\s*(?P<local>\b[\w]+\b)\s*=\s*>\s*(?P<use>\b[\w]+\b)\s*\Z', l, re.I)
if m2:
rl[m2.group('local').strip()] = m2.group(
'use').strip()
else:
outmess(
'analyzeline: Not local=>use pattern found in %s\n' % repr(l))
else:
rl[l] = l
groupcache[groupcounter]['use'][name]['map'] = rl
else:
pass
else:
print(m.groupdict())
outmess('analyzeline: Could not crack the use statement.\n')
elif case in ['f2pyenhancements']:
if 'f2pyenhancements' not in groupcache[groupcounter]:
groupcache[groupcounter]['f2pyenhancements'] = {}
d = groupcache[groupcounter]['f2pyenhancements']
if m.group('this') == 'usercode' and 'usercode' in d:
if isinstance(d['usercode'], str):
d['usercode'] = [d['usercode']]
d['usercode'].append(m.group('after'))
else:
d[m.group('this')] = m.group('after')
elif case == 'multiline':
if previous_context is None:
if verbose:
outmess('analyzeline: No context for multiline block.\n')
return
gc = groupcounter
appendmultiline(groupcache[gc],
previous_context[:2],
m.group('this'))
else:
if verbose > 1:
print(m.groupdict())
outmess('analyzeline: No code implemented for line.\n')
def appendmultiline(group, context_name, ml):
if 'f2pymultilines' not in group:
group['f2pymultilines'] = {}
d = group['f2pymultilines']
if context_name not in d:
d[context_name] = []
d[context_name].append(ml)
return
def cracktypespec0(typespec, ll):
selector = None
attr = None
if re.match(r'double\s*complex', typespec, re.I):
typespec = 'double complex'
elif re.match(r'double\s*precision', typespec, re.I):
typespec = 'double precision'
else:
typespec = typespec.strip().lower()
m1 = selectpattern.match(markouterparen(ll))
if not m1:
outmess(
'cracktypespec0: no kind/char_selector pattern found for line.\n')
return
d = m1.groupdict()
for k in list(d.keys()):
d[k] = unmarkouterparen(d[k])
if typespec in ['complex', 'integer', 'logical', 'real', 'character', 'type']:
selector = d['this']
ll = d['after']
i = ll.find('::')
if i >= 0:
attr = ll[:i].strip()
ll = ll[i + 2:]
return typespec, selector, attr, ll
#####
namepattern = re.compile(r'\s*(?P<name>\b[\w]+\b)\s*(?P<after>.*)\s*\Z', re.I)
kindselector = re.compile(
r'\s*(\(\s*(kind\s*=)?\s*(?P<kind>.*)\s*\)|[*]\s*(?P<kind2>.*?))\s*\Z', re.I)
charselector = re.compile(
r'\s*(\((?P<lenkind>.*)\)|[*]\s*(?P<charlen>.*))\s*\Z', re.I)
lenkindpattern = re.compile(
r'\s*(kind\s*=\s*(?P<kind>.*?)\s*(@,@\s*len\s*=\s*(?P<len>.*)|)|(len\s*=\s*|)(?P<len2>.*?)\s*(@,@\s*(kind\s*=\s*|)(?P<kind2>.*)|))\s*\Z', re.I)
lenarraypattern = re.compile(
r'\s*(@\(@\s*(?!/)\s*(?P<array>.*?)\s*@\)@\s*[*]\s*(?P<len>.*?)|([*]\s*(?P<len2>.*?)|)\s*(@\(@\s*(?!/)\s*(?P<array2>.*?)\s*@\)@|))\s*(=\s*(?P<init>.*?)|(@\(@|)/\s*(?P<init2>.*?)\s*/(@\)@|)|)\s*\Z', re.I)
def removespaces(expr):
expr = expr.strip()
if len(expr) <= 1:
return expr
expr2 = expr[0]
for i in range(1, len(expr) - 1):
if (expr[i] == ' ' and
((expr[i + 1] in "()[]{}=+-/* ") or
(expr[i - 1] in "()[]{}=+-/* "))):
continue
expr2 = expr2 + expr[i]
expr2 = expr2 + expr[-1]
return expr2
def markinnerspaces(line):
l = ''
f = 0
cc = '\''
cb = ''
for c in line:
if cb == '\\' and c in ['\\', '\'', '"']:
l = l + c
cb = c
continue
if f == 0 and c in ['\'', '"']:
cc = c
if c == cc:
f = f + 1
elif c == cc:
f = f - 1
elif c == ' ' and f == 1:
l = l + '@_@'
continue
l = l + c
cb = c
return l
def updatevars(typespec, selector, attrspec, entitydecl):
global groupcache, groupcounter
last_name = None
kindselect, charselect, typename = cracktypespec(typespec, selector)
if attrspec:
attrspec = [x.strip() for x in markoutercomma(attrspec).split('@,@')]
l = []
c = re.compile(r'(?P<start>[a-zA-Z]+)')
for a in attrspec:
if not a:
continue
m = c.match(a)
if m:
s = m.group('start').lower()
a = s + a[len(s):]
l.append(a)
attrspec = l
el = [x.strip() for x in markoutercomma(entitydecl).split('@,@')]
el1 = []
for e in el:
for e1 in [x.strip() for x in markoutercomma(removespaces(markinnerspaces(e)), comma=' ').split('@ @')]:
if e1:
el1.append(e1.replace('@_@', ' '))
for e in el1:
m = namepattern.match(e)
if not m:
outmess(
'updatevars: no name pattern found for entity=%s. Skipping.\n' % (repr(e)))
continue
ename = rmbadname1(m.group('name'))
edecl = {}
if ename in groupcache[groupcounter]['vars']:
edecl = groupcache[groupcounter]['vars'][ename].copy()
not_has_typespec = 'typespec' not in edecl
if not_has_typespec:
edecl['typespec'] = typespec
elif typespec and (not typespec == edecl['typespec']):
outmess('updatevars: attempt to change the type of "%s" ("%s") to "%s". Ignoring.\n' % (
ename, edecl['typespec'], typespec))
if 'kindselector' not in edecl:
edecl['kindselector'] = copy.copy(kindselect)
elif kindselect:
for k in list(kindselect.keys()):
if k in edecl['kindselector'] and (not kindselect[k] == edecl['kindselector'][k]):
outmess('updatevars: attempt to change the kindselector "%s" of "%s" ("%s") to "%s". Ignoring.\n' % (
k, ename, edecl['kindselector'][k], kindselect[k]))
else:
edecl['kindselector'][k] = copy.copy(kindselect[k])
if 'charselector' not in edecl and charselect:
if not_has_typespec:
edecl['charselector'] = charselect
else:
errmess('updatevars:%s: attempt to change empty charselector to %r. Ignoring.\n'
% (ename, charselect))
elif charselect:
for k in list(charselect.keys()):
if k in edecl['charselector'] and (not charselect[k] == edecl['charselector'][k]):
outmess('updatevars: attempt to change the charselector "%s" of "%s" ("%s") to "%s". Ignoring.\n' % (
k, ename, edecl['charselector'][k], charselect[k]))
else:
edecl['charselector'][k] = copy.copy(charselect[k])
if 'typename' not in edecl:
edecl['typename'] = typename
elif typename and (not edecl['typename'] == typename):
outmess('updatevars: attempt to change the typename of "%s" ("%s") to "%s". Ignoring.\n' % (
ename, edecl['typename'], typename))
if 'attrspec' not in edecl:
edecl['attrspec'] = copy.copy(attrspec)
elif attrspec:
for a in attrspec:
if a not in edecl['attrspec']:
edecl['attrspec'].append(a)
else:
edecl['typespec'] = copy.copy(typespec)
edecl['kindselector'] = copy.copy(kindselect)
edecl['charselector'] = copy.copy(charselect)
edecl['typename'] = typename
edecl['attrspec'] = copy.copy(attrspec)
if m.group('after'):
m1 = lenarraypattern.match(markouterparen(m.group('after')))
if m1:
d1 = m1.groupdict()
for lk in ['len', 'array', 'init']:
if d1[lk + '2'] is not None:
d1[lk] = d1[lk + '2']
del d1[lk + '2']
for k in list(d1.keys()):
if d1[k] is not None:
d1[k] = unmarkouterparen(d1[k])
else:
del d1[k]
if 'len' in d1 and 'array' in d1:
if d1['len'] == '':
d1['len'] = d1['array']
del d1['array']
else:
d1['array'] = d1['array'] + ',' + d1['len']
del d1['len']
errmess('updatevars: "%s %s" is mapped to "%s %s(%s)"\n' % (
typespec, e, typespec, ename, d1['array']))
if 'array' in d1:
dm = 'dimension(%s)' % d1['array']
if 'attrspec' not in edecl or (not edecl['attrspec']):
edecl['attrspec'] = [dm]
else:
edecl['attrspec'].append(dm)
for dm1 in edecl['attrspec']:
if dm1[:9] == 'dimension' and dm1 != dm:
del edecl['attrspec'][-1]
errmess('updatevars:%s: attempt to change %r to %r. Ignoring.\n'
% (ename, dm1, dm))
break
if 'len' in d1:
if typespec in ['complex', 'integer', 'logical', 'real']:
if ('kindselector' not in edecl) or (not edecl['kindselector']):
edecl['kindselector'] = {}
edecl['kindselector']['*'] = d1['len']
elif typespec == 'character':
if ('charselector' not in edecl) or (not edecl['charselector']):
edecl['charselector'] = {}
if 'len' in edecl['charselector']:
del edecl['charselector']['len']
edecl['charselector']['*'] = d1['len']
if 'init' in d1:
if '=' in edecl and (not edecl['='] == d1['init']):
outmess('updatevars: attempt to change the init expression of "%s" ("%s") to "%s". Ignoring.\n' % (
ename, edecl['='], d1['init']))
else:
edecl['='] = d1['init']
else:
outmess('updatevars: could not crack entity declaration "%s". Ignoring.\n' % (
ename + m.group('after')))
for k in list(edecl.keys()):
if not edecl[k]:
del edecl[k]
groupcache[groupcounter]['vars'][ename] = edecl
if 'varnames' in groupcache[groupcounter]:
groupcache[groupcounter]['varnames'].append(ename)
last_name = ename
return last_name
def cracktypespec(typespec, selector):
kindselect = None
charselect = None
typename = None
if selector:
if typespec in ['complex', 'integer', 'logical', 'real']:
kindselect = kindselector.match(selector)
if not kindselect:
outmess(
'cracktypespec: no kindselector pattern found for %s\n' % (repr(selector)))
return
kindselect = kindselect.groupdict()
kindselect['*'] = kindselect['kind2']
del kindselect['kind2']
for k in list(kindselect.keys()):
if not kindselect[k]:
del kindselect[k]
for k, i in list(kindselect.items()):
kindselect[k] = rmbadname1(i)
elif typespec == 'character':
charselect = charselector.match(selector)
if not charselect:
outmess(
'cracktypespec: no charselector pattern found for %s\n' % (repr(selector)))
return
charselect = charselect.groupdict()
charselect['*'] = charselect['charlen']
del charselect['charlen']
if charselect['lenkind']:
lenkind = lenkindpattern.match(
markoutercomma(charselect['lenkind']))
lenkind = lenkind.groupdict()
for lk in ['len', 'kind']:
if lenkind[lk + '2']:
lenkind[lk] = lenkind[lk + '2']
charselect[lk] = lenkind[lk]
del lenkind[lk + '2']
del charselect['lenkind']
for k in list(charselect.keys()):
if not charselect[k]:
del charselect[k]
for k, i in list(charselect.items()):
charselect[k] = rmbadname1(i)
elif typespec == 'type':
typename = re.match(r'\s*\(\s*(?P<name>\w+)\s*\)', selector, re.I)
if typename:
typename = typename.group('name')
else:
outmess('cracktypespec: no typename found in %s\n' %
(repr(typespec + selector)))
else:
outmess('cracktypespec: no selector used for %s\n' %
(repr(selector)))
return kindselect, charselect, typename
######
def setattrspec(decl, attr, force=0):
if not decl:
decl = {}
if not attr:
return decl
if 'attrspec' not in decl:
decl['attrspec'] = [attr]
return decl
if force:
decl['attrspec'].append(attr)
if attr in decl['attrspec']:
return decl
if attr == 'static' and 'automatic' not in decl['attrspec']:
decl['attrspec'].append(attr)
elif attr == 'automatic' and 'static' not in decl['attrspec']:
decl['attrspec'].append(attr)
elif attr == 'public' and 'private' not in decl['attrspec']:
decl['attrspec'].append(attr)
elif attr == 'private' and 'public' not in decl['attrspec']:
decl['attrspec'].append(attr)
else:
decl['attrspec'].append(attr)
return decl
def setkindselector(decl, sel, force=0):
if not decl:
decl = {}
if not sel:
return decl
if 'kindselector' not in decl:
decl['kindselector'] = sel
return decl
for k in list(sel.keys()):
if force or k not in decl['kindselector']:
decl['kindselector'][k] = sel[k]
return decl
def setcharselector(decl, sel, force=0):
if not decl:
decl = {}
if not sel:
return decl
if 'charselector' not in decl:
decl['charselector'] = sel
return decl
for k in list(sel.keys()):
if force or k not in decl['charselector']:
decl['charselector'][k] = sel[k]
return decl
def getblockname(block, unknown='unknown'):
if 'name' in block:
return block['name']
return unknown
# post processing
def setmesstext(block):
global filepositiontext
try:
filepositiontext = 'In: %s:%s\n' % (block['from'], block['name'])
except:
pass
def get_usedict(block):
usedict = {}
if 'parent_block' in block:
usedict = get_usedict(block['parent_block'])
if 'use' in block:
usedict.update(block['use'])
return usedict
def get_useparameters(block, param_map=None):
global f90modulevars
if param_map is None:
param_map = {}
usedict = get_usedict(block)
if not usedict:
return param_map
for usename, mapping in list(usedict.items()):
usename = usename.lower()
if usename not in f90modulevars:
outmess('get_useparameters: no module %s info used by %s\n' %
(usename, block.get('name')))
continue
mvars = f90modulevars[usename]
params = get_parameters(mvars)
if not params:
continue
# XXX: apply mapping
if mapping:
errmess('get_useparameters: mapping for %s not impl.' % (mapping))
for k, v in list(params.items()):
if k in param_map:
outmess('get_useparameters: overriding parameter %s with'
' value from module %s' % (repr(k), repr(usename)))
param_map[k] = v
return param_map
def postcrack2(block, tab='', param_map=None):
global f90modulevars
if not f90modulevars:
return block
if isinstance(block, list):
ret = []
for g in block:
g = postcrack2(g, tab=tab + '\t', param_map=param_map)
ret.append(g)
return ret
setmesstext(block)
outmess('%sBlock: %s\n' % (tab, block['name']), 0)
if param_map is None:
param_map = get_useparameters(block)
if param_map is not None and 'vars' in block:
vars = block['vars']
for n in list(vars.keys()):
var = vars[n]
if 'kindselector' in var:
kind = var['kindselector']
if 'kind' in kind:
val = kind['kind']
if val in param_map:
kind['kind'] = param_map[val]
new_body = []
for b in block['body']:
b = postcrack2(b, tab=tab + '\t', param_map=param_map)
new_body.append(b)
block['body'] = new_body
return block
def postcrack(block, args=None, tab=''):
global usermodules, onlyfunctions
if isinstance(block, list):
gret = []
uret = []
for g in block:
setmesstext(g)
g = postcrack(g, tab=tab + '\t')
# sort user routines to appear first
if 'name' in g and '__user__' in g['name']:
uret.append(g)
else:
gret.append(g)
return uret + gret
setmesstext(block)
if not isinstance(block, dict) and 'block' not in block:
raise Exception('postcrack: Expected block dictionary instead of ' +
str(block))
if 'name' in block and not block['name'] == 'unknown_interface':
outmess('%sBlock: %s\n' % (tab, block['name']), 0)
block = analyzeargs(block)
block = analyzecommon(block)
block['vars'] = analyzevars(block)
block['sortvars'] = sortvarnames(block['vars'])
if 'args' in block and block['args']:
args = block['args']
block['body'] = analyzebody(block, args, tab=tab)
userisdefined = []
if 'use' in block:
useblock = block['use']
for k in list(useblock.keys()):
if '__user__' in k:
userisdefined.append(k)
else:
useblock = {}
name = ''
if 'name' in block:
name = block['name']
# and not userisdefined: # Build a __user__ module
if 'externals' in block and block['externals']:
interfaced = []
if 'interfaced' in block:
interfaced = block['interfaced']
mvars = copy.copy(block['vars'])
if name:
mname = name + '__user__routines'
else:
mname = 'unknown__user__routines'
if mname in userisdefined:
i = 1
while '%s_%i' % (mname, i) in userisdefined:
i = i + 1
mname = '%s_%i' % (mname, i)
interface = {'block': 'interface', 'body': [],
'vars': {}, 'name': name + '_user_interface'}
for e in block['externals']:
if e in interfaced:
edef = []
j = -1
for b in block['body']:
j = j + 1
if b['block'] == 'interface':
i = -1
for bb in b['body']:
i = i + 1
if 'name' in bb and bb['name'] == e:
edef = copy.copy(bb)
del b['body'][i]
break
if edef:
if not b['body']:
del block['body'][j]
del interfaced[interfaced.index(e)]
break
interface['body'].append(edef)
else:
if e in mvars and not isexternal(mvars[e]):
interface['vars'][e] = mvars[e]
if interface['vars'] or interface['body']:
block['interfaced'] = interfaced
mblock = {'block': 'python module', 'body': [
interface], 'vars': {}, 'name': mname, 'interfaced': block['externals']}
useblock[mname] = {}
usermodules.append(mblock)
if useblock:
block['use'] = useblock
return block
def sortvarnames(vars):
indep = []
dep = []
for v in list(vars.keys()):
if 'depend' in vars[v] and vars[v]['depend']:
dep.append(v)
else:
indep.append(v)
n = len(dep)
i = 0
while dep: # XXX: How to catch dependence cycles correctly?
v = dep[0]
fl = 0
for w in dep[1:]:
if w in vars[v]['depend']:
fl = 1
break
if fl:
dep = dep[1:] + [v]
i = i + 1
if i > n:
errmess('sortvarnames: failed to compute dependencies because'
' of cyclic dependencies between '
+ ', '.join(dep) + '\n')
indep = indep + dep
break
else:
indep.append(v)
dep = dep[1:]
n = len(dep)
i = 0
return indep
def analyzecommon(block):
if not hascommon(block):
return block
commonvars = []
for k in list(block['common'].keys()):
comvars = []
for e in block['common'][k]:
m = re.match(
r'\A\s*\b(?P<name>.*?)\b\s*(\((?P<dims>.*?)\)|)\s*\Z', e, re.I)
if m:
dims = []
if m.group('dims'):
dims = [x.strip()
for x in markoutercomma(m.group('dims')).split('@,@')]
n = m.group('name').strip()
if n in block['vars']:
if 'attrspec' in block['vars'][n]:
block['vars'][n]['attrspec'].append(
'dimension(%s)' % (','.join(dims)))
else:
block['vars'][n]['attrspec'] = [
'dimension(%s)' % (','.join(dims))]
else:
if dims:
block['vars'][n] = {
'attrspec': ['dimension(%s)' % (','.join(dims))]}
else:
block['vars'][n] = {}
if n not in commonvars:
commonvars.append(n)
else:
n = e
errmess(
'analyzecommon: failed to extract "<name>[(<dims>)]" from "%s" in common /%s/.\n' % (e, k))
comvars.append(n)
block['common'][k] = comvars
if 'commonvars' not in block:
block['commonvars'] = commonvars
else:
block['commonvars'] = block['commonvars'] + commonvars
return block
def analyzebody(block, args, tab=''):
global usermodules, skipfuncs, onlyfuncs, f90modulevars
setmesstext(block)
body = []
for b in block['body']:
b['parent_block'] = block
if b['block'] in ['function', 'subroutine']:
if args is not None and b['name'] not in args:
continue
else:
as_ = b['args']
if b['name'] in skipfuncs:
continue
if onlyfuncs and b['name'] not in onlyfuncs:
continue
b['saved_interface'] = crack2fortrangen(
b, '\n' + ' ' * 6, as_interface=True)
else:
as_ = args
b = postcrack(b, as_, tab=tab + '\t')
if b['block'] == 'interface' and not b['body']:
if 'f2pyenhancements' not in b:
continue
if b['block'].replace(' ', '') == 'pythonmodule':
usermodules.append(b)
else:
if b['block'] == 'module':
f90modulevars[b['name']] = b['vars']
body.append(b)
return body
def buildimplicitrules(block):
setmesstext(block)
implicitrules = defaultimplicitrules
attrrules = {}
if 'implicit' in block:
if block['implicit'] is None:
implicitrules = None
if verbose > 1:
outmess(
'buildimplicitrules: no implicit rules for routine %s.\n' % repr(block['name']))
else:
for k in list(block['implicit'].keys()):
if block['implicit'][k].get('typespec') not in ['static', 'automatic']:
implicitrules[k] = block['implicit'][k]
else:
attrrules[k] = block['implicit'][k]['typespec']
return implicitrules, attrrules
def myeval(e, g=None, l=None):
r = eval(e, g, l)
if type(r) in [type(0), type(0.0)]:
return r
raise ValueError('r=%r' % (r))
getlincoef_re_1 = re.compile(r'\A\b\w+\b\Z', re.I)
def getlincoef(e, xset): # e = a*x+b ; x in xset
try:
c = int(myeval(e, {}, {}))
return 0, c, None
except:
pass
if getlincoef_re_1.match(e):
return 1, 0, e
len_e = len(e)
for x in xset:
if len(x) > len_e:
continue
if re.search(r'\w\s*\([^)]*\b' + x + r'\b', e):
# skip function calls having x as an argument, e.g max(1, x)
continue
re_1 = re.compile(r'(?P<before>.*?)\b' + x + r'\b(?P<after>.*)', re.I)
m = re_1.match(e)
if m:
try:
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 0, m1.group('after'))
m1 = re_1.match(ee)
b = myeval(ee, {}, {})
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 1, m1.group('after'))
m1 = re_1.match(ee)
a = myeval(ee, {}, {}) - b
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 0.5, m1.group('after'))
m1 = re_1.match(ee)
c = myeval(ee, {}, {})
# computing another point to be sure that expression is linear
m1 = re_1.match(e)
while m1:
ee = '%s(%s)%s' % (
m1.group('before'), 1.5, m1.group('after'))
m1 = re_1.match(ee)
c2 = myeval(ee, {}, {})
if (a * 0.5 + b == c and a * 1.5 + b == c2):
return a, b, x
except:
pass
break
return None, None, None
_varname_match = re.compile(r'\A[a-z]\w*\Z').match
def getarrlen(dl, args, star='*'):
edl = []
try:
edl.append(myeval(dl[0], {}, {}))
except:
edl.append(dl[0])
try:
edl.append(myeval(dl[1], {}, {}))
except:
edl.append(dl[1])
if isinstance(edl[0], int):
p1 = 1 - edl[0]
if p1 == 0:
d = str(dl[1])
elif p1 < 0:
d = '%s-%s' % (dl[1], -p1)
else:
d = '%s+%s' % (dl[1], p1)
elif isinstance(edl[1], int):
p1 = 1 + edl[1]
if p1 == 0:
d = '-(%s)' % (dl[0])
else:
d = '%s-(%s)' % (p1, dl[0])
else:
d = '%s-(%s)+1' % (dl[1], dl[0])
try:
return repr(myeval(d, {}, {})), None, None
except:
pass
d1, d2 = getlincoef(dl[0], args), getlincoef(dl[1], args)
if None not in [d1[0], d2[0]]:
if (d1[0], d2[0]) == (0, 0):
return repr(d2[1] - d1[1] + 1), None, None
b = d2[1] - d1[1] + 1
d1 = (d1[0], 0, d1[2])
d2 = (d2[0], b, d2[2])
if d1[0] == 0 and d2[2] in args:
if b < 0:
return '%s * %s - %s' % (d2[0], d2[2], -b), d2[2], '+%s)/(%s)' % (-b, d2[0])
elif b:
return '%s * %s + %s' % (d2[0], d2[2], b), d2[2], '-%s)/(%s)' % (b, d2[0])
else:
return '%s * %s' % (d2[0], d2[2]), d2[2], ')/(%s)' % (d2[0])
if d2[0] == 0 and d1[2] in args:
if b < 0:
return '%s * %s - %s' % (-d1[0], d1[2], -b), d1[2], '+%s)/(%s)' % (-b, -d1[0])
elif b:
return '%s * %s + %s' % (-d1[0], d1[2], b), d1[2], '-%s)/(%s)' % (b, -d1[0])
else:
return '%s * %s' % (-d1[0], d1[2]), d1[2], ')/(%s)' % (-d1[0])
if d1[2] == d2[2] and d1[2] in args:
a = d2[0] - d1[0]
if not a:
return repr(b), None, None
if b < 0:
return '%s * %s - %s' % (a, d1[2], -b), d2[2], '+%s)/(%s)' % (-b, a)
elif b:
return '%s * %s + %s' % (a, d1[2], b), d2[2], '-%s)/(%s)' % (b, a)
else:
return '%s * %s' % (a, d1[2]), d2[2], ')/(%s)' % (a)
if d1[0] == d2[0] == 1:
c = str(d1[2])
if c not in args:
if _varname_match(c):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c))
c = '(%s)' % c
if b == 0:
d = '%s-%s' % (d2[2], c)
elif b < 0:
d = '%s-%s-%s' % (d2[2], c, -b)
else:
d = '%s-%s+%s' % (d2[2], c, b)
elif d1[0] == 0:
c2 = str(d2[2])
if c2 not in args:
if _varname_match(c2):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c2))
c2 = '(%s)' % c2
if d2[0] == 1:
pass
elif d2[0] == -1:
c2 = '-%s' % c2
else:
c2 = '%s*%s' % (d2[0], c2)
if b == 0:
d = c2
elif b < 0:
d = '%s-%s' % (c2, -b)
else:
d = '%s+%s' % (c2, b)
elif d2[0] == 0:
c1 = str(d1[2])
if c1 not in args:
if _varname_match(c1):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c1))
c1 = '(%s)' % c1
if d1[0] == 1:
c1 = '-%s' % c1
elif d1[0] == -1:
c1 = '+%s' % c1
elif d1[0] < 0:
c1 = '+%s*%s' % (-d1[0], c1)
else:
c1 = '-%s*%s' % (d1[0], c1)
if b == 0:
d = c1
elif b < 0:
d = '%s-%s' % (c1, -b)
else:
d = '%s+%s' % (c1, b)
else:
c1 = str(d1[2])
if c1 not in args:
if _varname_match(c1):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c1))
c1 = '(%s)' % c1
if d1[0] == 1:
c1 = '-%s' % c1
elif d1[0] == -1:
c1 = '+%s' % c1
elif d1[0] < 0:
c1 = '+%s*%s' % (-d1[0], c1)
else:
c1 = '-%s*%s' % (d1[0], c1)
c2 = str(d2[2])
if c2 not in args:
if _varname_match(c2):
outmess('\tgetarrlen:variable "%s" undefined\n' % (c2))
c2 = '(%s)' % c2
if d2[0] == 1:
pass
elif d2[0] == -1:
c2 = '-%s' % c2
else:
c2 = '%s*%s' % (d2[0], c2)
if b == 0:
d = '%s%s' % (c2, c1)
elif b < 0:
d = '%s%s-%s' % (c2, c1, -b)
else:
d = '%s%s+%s' % (c2, c1, b)
return d, None, None
word_pattern = re.compile(r'\b[a-z][\w$]*\b', re.I)
def _get_depend_dict(name, vars, deps):
if name in vars:
words = vars[name].get('depend', [])
if '=' in vars[name] and not isstring(vars[name]):
for word in word_pattern.findall(vars[name]['=']):
if word not in words and word in vars:
words.append(word)
for word in words[:]:
for w in deps.get(word, []) \
or _get_depend_dict(word, vars, deps):
if w not in words:
words.append(w)
else:
outmess('_get_depend_dict: no dependence info for %s\n' % (repr(name)))
words = []
deps[name] = words
return words
def _calc_depend_dict(vars):
names = list(vars.keys())
depend_dict = {}
for n in names:
_get_depend_dict(n, vars, depend_dict)
return depend_dict
def get_sorted_names(vars):
depend_dict = _calc_depend_dict(vars)
names = []
for name in list(depend_dict.keys()):
if not depend_dict[name]:
names.append(name)
del depend_dict[name]
while depend_dict:
for name, lst in list(depend_dict.items()):
new_lst = [n for n in lst if n in depend_dict]
if not new_lst:
names.append(name)
del depend_dict[name]
else:
depend_dict[name] = new_lst
return [name for name in names if name in vars]
def _kind_func(string):
# XXX: return something sensible.
if string[0] in "'\"":
string = string[1:-1]
if real16pattern.match(string):
return 8
elif real8pattern.match(string):
return 4
return 'kind(' + string + ')'
def _selected_int_kind_func(r):
# XXX: This should be processor dependent
m = 10 ** r
if m <= 2 ** 8:
return 1
if m <= 2 ** 16:
return 2
if m <= 2 ** 32:
return 4
if m <= 2 ** 63:
return 8
if m <= 2 ** 128:
return 16
return -1
def _selected_real_kind_func(p, r=0, radix=0):
# XXX: This should be processor dependent
# This is only good for 0 <= p <= 20
if p < 7:
return 4
if p < 16:
return 8
machine = platform.machine().lower()
if machine.startswith('power') or machine.startswith('ppc64'):
if p <= 20:
return 16
else:
if p < 19:
return 10
elif p <= 20:
return 16
return -1
def get_parameters(vars, global_params={}):
params = copy.copy(global_params)
g_params = copy.copy(global_params)
for name, func in [('kind', _kind_func),
('selected_int_kind', _selected_int_kind_func),
('selected_real_kind', _selected_real_kind_func), ]:
if name not in g_params:
g_params[name] = func
param_names = []
for n in get_sorted_names(vars):
if 'attrspec' in vars[n] and 'parameter' in vars[n]['attrspec']:
param_names.append(n)
kind_re = re.compile(r'\bkind\s*\(\s*(?P<value>.*)\s*\)', re.I)
selected_int_kind_re = re.compile(
r'\bselected_int_kind\s*\(\s*(?P<value>.*)\s*\)', re.I)
selected_kind_re = re.compile(
r'\bselected_(int|real)_kind\s*\(\s*(?P<value>.*)\s*\)', re.I)
for n in param_names:
if '=' in vars[n]:
v = vars[n]['=']
if islogical(vars[n]):
v = v.lower()
for repl in [
('.false.', 'False'),
('.true.', 'True'),
# TODO: test .eq., .neq., etc replacements.
]:
v = v.replace(*repl)
v = kind_re.sub(r'kind("\1")', v)
v = selected_int_kind_re.sub(r'selected_int_kind(\1)', v)
# We need to act according to the data.
# The easy case is if the data has a kind-specifier,
# then we may easily remove those specifiers.
# However, it may be that the user uses other specifiers...(!)
is_replaced = False
if 'kindselector' in vars[n]:
if 'kind' in vars[n]['kindselector']:
orig_v_len = len(v)
v = v.replace('_' + vars[n]['kindselector']['kind'], '')
# Again, this will be true if even a single specifier
# has been replaced, see comment above.
is_replaced = len(v) < orig_v_len
if not is_replaced:
if not selected_kind_re.match(v):
v_ = v.split('_')
# In case there are additive parameters
if len(v_) > 1:
v = ''.join(v_[:-1]).lower().replace(v_[-1].lower(), '')
# Currently this will not work for complex numbers.
# There is missing code for extracting a complex number,
# which may be defined in either of these:
# a) (Re, Im)
# b) cmplx(Re, Im)
# c) dcmplx(Re, Im)
# d) cmplx(Re, Im, <prec>)
if isdouble(vars[n]):
tt = list(v)
for m in real16pattern.finditer(v):
tt[m.start():m.end()] = list(
v[m.start():m.end()].lower().replace('d', 'e'))
v = ''.join(tt)
elif iscomplex(vars[n]):
# FIXME complex numbers may also have exponents
if v[0] == '(' and v[-1] == ')':
# FIXME, unused l looks like potential bug
l = markoutercomma(v[1:-1]).split('@,@')
try:
params[n] = eval(v, g_params, params)
except Exception as msg:
params[n] = v
outmess('get_parameters: got "%s" on %s\n' % (msg, repr(v)))
if isstring(vars[n]) and isinstance(params[n], int):
params[n] = chr(params[n])
nl = n.lower()
if nl != n:
params[nl] = params[n]
else:
print(vars[n])
outmess(
'get_parameters:parameter %s does not have value?!\n' % (repr(n)))
return params
def _eval_length(length, params):
if length in ['(:)', '(*)', '*']:
return '(*)'
return _eval_scalar(length, params)
_is_kind_number = re.compile(r'\d+_').match
def _eval_scalar(value, params):
if _is_kind_number(value):
value = value.split('_')[0]
try:
value = str(eval(value, {}, params))
except (NameError, SyntaxError):
return value
except Exception as msg:
errmess('"%s" in evaluating %r '
'(available names: %s)\n'
% (msg, value, list(params.keys())))
return value
def analyzevars(block):
global f90modulevars
setmesstext(block)
implicitrules, attrrules = buildimplicitrules(block)
vars = copy.copy(block['vars'])
if block['block'] == 'function' and block['name'] not in vars:
vars[block['name']] = {}
if '' in block['vars']:
del vars['']
if 'attrspec' in block['vars']['']:
gen = block['vars']['']['attrspec']
for n in list(vars.keys()):
for k in ['public', 'private']:
if k in gen:
vars[n] = setattrspec(vars[n], k)
svars = []
args = block['args']
for a in args:
try:
vars[a]
svars.append(a)
except KeyError:
pass
for n in list(vars.keys()):
if n not in args:
svars.append(n)
params = get_parameters(vars, get_useparameters(block))
dep_matches = {}
name_match = re.compile(r'\w[\w\d_$]*').match
for v in list(vars.keys()):
m = name_match(v)
if m:
n = v[m.start():m.end()]
try:
dep_matches[n]
except KeyError:
dep_matches[n] = re.compile(r'.*\b%s\b' % (v), re.I).match
for n in svars:
if n[0] in list(attrrules.keys()):
vars[n] = setattrspec(vars[n], attrrules[n[0]])
if 'typespec' not in vars[n]:
if not('attrspec' in vars[n] and 'external' in vars[n]['attrspec']):
if implicitrules:
ln0 = n[0].lower()
for k in list(implicitrules[ln0].keys()):
if k == 'typespec' and implicitrules[ln0][k] == 'undefined':
continue
if k not in vars[n]:
vars[n][k] = implicitrules[ln0][k]
elif k == 'attrspec':
for l in implicitrules[ln0][k]:
vars[n] = setattrspec(vars[n], l)
elif n in block['args']:
outmess('analyzevars: typespec of variable %s is not defined in routine %s.\n' % (
repr(n), block['name']))
if 'charselector' in vars[n]:
if 'len' in vars[n]['charselector']:
l = vars[n]['charselector']['len']
try:
l = str(eval(l, {}, params))
except:
pass
vars[n]['charselector']['len'] = l
if 'kindselector' in vars[n]:
if 'kind' in vars[n]['kindselector']:
l = vars[n]['kindselector']['kind']
try:
l = str(eval(l, {}, params))
except:
pass
vars[n]['kindselector']['kind'] = l
savelindims = {}
if 'attrspec' in vars[n]:
attr = vars[n]['attrspec']
attr.reverse()
vars[n]['attrspec'] = []
dim, intent, depend, check, note = None, None, None, None, None
for a in attr:
if a[:9] == 'dimension':
dim = (a[9:].strip())[1:-1]
elif a[:6] == 'intent':
intent = (a[6:].strip())[1:-1]
elif a[:6] == 'depend':
depend = (a[6:].strip())[1:-1]
elif a[:5] == 'check':
check = (a[5:].strip())[1:-1]
elif a[:4] == 'note':
note = (a[4:].strip())[1:-1]
else:
vars[n] = setattrspec(vars[n], a)
if intent:
if 'intent' not in vars[n]:
vars[n]['intent'] = []
for c in [x.strip() for x in markoutercomma(intent).split('@,@')]:
# Remove spaces so that 'in out' becomes 'inout'
tmp = c.replace(' ', '')
if tmp not in vars[n]['intent']:
vars[n]['intent'].append(tmp)
intent = None
if note:
note = note.replace('\\n\\n', '\n\n')
note = note.replace('\\n ', '\n')
if 'note' not in vars[n]:
vars[n]['note'] = [note]
else:
vars[n]['note'].append(note)
note = None
if depend is not None:
if 'depend' not in vars[n]:
vars[n]['depend'] = []
for c in rmbadname([x.strip() for x in markoutercomma(depend).split('@,@')]):
if c not in vars[n]['depend']:
vars[n]['depend'].append(c)
depend = None
if check is not None:
if 'check' not in vars[n]:
vars[n]['check'] = []
for c in [x.strip() for x in markoutercomma(check).split('@,@')]:
if c not in vars[n]['check']:
vars[n]['check'].append(c)
check = None
if dim and 'dimension' not in vars[n]:
vars[n]['dimension'] = []
for d in rmbadname([x.strip() for x in markoutercomma(dim).split('@,@')]):
star = '*'
if d == ':':
star = ':'
if d in params:
d = str(params[d])
for p in list(params.keys()):
re_1 = re.compile(r'(?P<before>.*?)\b' + p + r'\b(?P<after>.*)', re.I)
m = re_1.match(d)
while m:
d = m.group('before') + \
str(params[p]) + m.group('after')
m = re_1.match(d)
if d == star:
dl = [star]
else:
dl = markoutercomma(d, ':').split('@:@')
if len(dl) == 2 and '*' in dl: # e.g. dimension(5:*)
dl = ['*']
d = '*'
if len(dl) == 1 and not dl[0] == star:
dl = ['1', dl[0]]
if len(dl) == 2:
d, v, di = getarrlen(dl, list(block['vars'].keys()))
if d[:4] == '1 * ':
d = d[4:]
if di and di[-4:] == '/(1)':
di = di[:-4]
if v:
savelindims[d] = v, di
vars[n]['dimension'].append(d)
if 'dimension' in vars[n]:
if isintent_c(vars[n]):
shape_macro = 'shape'
else:
shape_macro = 'shape' # 'fshape'
if isstringarray(vars[n]):
if 'charselector' in vars[n]:
d = vars[n]['charselector']
if '*' in d:
d = d['*']
errmess('analyzevars: character array "character*%s %s(%s)" is considered as "character %s(%s)"; "intent(c)" is forced.\n'
% (d, n,
','.join(vars[n]['dimension']),
n, ','.join(vars[n]['dimension'] + [d])))
vars[n]['dimension'].append(d)
del vars[n]['charselector']
if 'intent' not in vars[n]:
vars[n]['intent'] = []
if 'c' not in vars[n]['intent']:
vars[n]['intent'].append('c')
else:
errmess(
"analyzevars: charselector=%r unhandled." % (d))
if 'check' not in vars[n] and 'args' in block and n in block['args']:
flag = 'depend' not in vars[n]
if flag:
vars[n]['depend'] = []
vars[n]['check'] = []
if 'dimension' in vars[n]:
#/----< no check
i = -1
ni = len(vars[n]['dimension'])
for d in vars[n]['dimension']:
ddeps = [] # dependecies of 'd'
ad = ''
pd = ''
if d not in vars:
if d in savelindims:
pd, ad = '(', savelindims[d][1]
d = savelindims[d][0]
else:
for r in block['args']:
if r not in vars:
continue
if re.match(r'.*?\b' + r + r'\b', d, re.I):
ddeps.append(r)
if d in vars:
if 'attrspec' in vars[d]:
for aa in vars[d]['attrspec']:
if aa[:6] == 'depend':
ddeps += aa[6:].strip()[1:-1].split(',')
if 'depend' in vars[d]:
ddeps = ddeps + vars[d]['depend']
i = i + 1
if d in vars and ('depend' not in vars[d]) \
and ('=' not in vars[d]) and (d not in vars[n]['depend']) \
and l_or(isintent_in, isintent_inout, isintent_inplace)(vars[n]):
vars[d]['depend'] = [n]
if ni > 1:
vars[d]['='] = '%s%s(%s,%s)%s' % (
pd, shape_macro, n, i, ad)
else:
vars[d]['='] = '%slen(%s)%s' % (pd, n, ad)
# /---< no check
if 1 and 'check' not in vars[d]:
if ni > 1:
vars[d]['check'] = ['%s%s(%s,%i)%s==%s'
% (pd, shape_macro, n, i, ad, d)]
else:
vars[d]['check'] = [
'%slen(%s)%s>=%s' % (pd, n, ad, d)]
if 'attrspec' not in vars[d]:
vars[d]['attrspec'] = ['optional']
if ('optional' not in vars[d]['attrspec']) and\
('required' not in vars[d]['attrspec']):
vars[d]['attrspec'].append('optional')
elif d not in ['*', ':']:
#/----< no check
if flag:
if d in vars:
if n not in ddeps:
vars[n]['depend'].append(d)
else:
vars[n]['depend'] = vars[n]['depend'] + ddeps
elif isstring(vars[n]):
length = '1'
if 'charselector' in vars[n]:
if '*' in vars[n]['charselector']:
length = _eval_length(vars[n]['charselector']['*'],
params)
vars[n]['charselector']['*'] = length
elif 'len' in vars[n]['charselector']:
length = _eval_length(vars[n]['charselector']['len'],
params)
del vars[n]['charselector']['len']
vars[n]['charselector']['*'] = length
if not vars[n]['check']:
del vars[n]['check']
if flag and not vars[n]['depend']:
del vars[n]['depend']
if '=' in vars[n]:
if 'attrspec' not in vars[n]:
vars[n]['attrspec'] = []
if ('optional' not in vars[n]['attrspec']) and \
('required' not in vars[n]['attrspec']):
vars[n]['attrspec'].append('optional')
if 'depend' not in vars[n]:
vars[n]['depend'] = []
for v, m in list(dep_matches.items()):
if m(vars[n]['=']):
vars[n]['depend'].append(v)
if not vars[n]['depend']:
del vars[n]['depend']
if isscalar(vars[n]):
vars[n]['='] = _eval_scalar(vars[n]['='], params)
for n in list(vars.keys()):
if n == block['name']: # n is block name
if 'note' in vars[n]:
block['note'] = vars[n]['note']
if block['block'] == 'function':
if 'result' in block and block['result'] in vars:
vars[n] = appenddecl(vars[n], vars[block['result']])
if 'prefix' in block:
pr = block['prefix']
ispure = 0
isrec = 1
pr1 = pr.replace('pure', '')
ispure = (not pr == pr1)
pr = pr1.replace('recursive', '')
isrec = (not pr == pr1)
m = typespattern[0].match(pr)
if m:
typespec, selector, attr, edecl = cracktypespec0(
m.group('this'), m.group('after'))
kindselect, charselect, typename = cracktypespec(
typespec, selector)
vars[n]['typespec'] = typespec
if kindselect:
if 'kind' in kindselect:
try:
kindselect['kind'] = eval(
kindselect['kind'], {}, params)
except:
pass
vars[n]['kindselector'] = kindselect
if charselect:
vars[n]['charselector'] = charselect
if typename:
vars[n]['typename'] = typename
if ispure:
vars[n] = setattrspec(vars[n], 'pure')
if isrec:
vars[n] = setattrspec(vars[n], 'recursive')
else:
outmess(
'analyzevars: prefix (%s) were not used\n' % repr(block['prefix']))
if not block['block'] in ['module', 'pythonmodule', 'python module', 'block data']:
if 'commonvars' in block:
neededvars = copy.copy(block['args'] + block['commonvars'])
else:
neededvars = copy.copy(block['args'])
for n in list(vars.keys()):
if l_or(isintent_callback, isintent_aux)(vars[n]):
neededvars.append(n)
if 'entry' in block:
neededvars.extend(list(block['entry'].keys()))
for k in list(block['entry'].keys()):
for n in block['entry'][k]:
if n not in neededvars:
neededvars.append(n)
if block['block'] == 'function':
if 'result' in block:
neededvars.append(block['result'])
else:
neededvars.append(block['name'])
if block['block'] in ['subroutine', 'function']:
name = block['name']
if name in vars and 'intent' in vars[name]:
block['intent'] = vars[name]['intent']
if block['block'] == 'type':
neededvars.extend(list(vars.keys()))
for n in list(vars.keys()):
if n not in neededvars:
del vars[n]
return vars
analyzeargs_re_1 = re.compile(r'\A[a-z]+[\w$]*\Z', re.I)
def expr2name(a, block, args=[]):
orig_a = a
a_is_expr = not analyzeargs_re_1.match(a)
if a_is_expr: # `a` is an expression
implicitrules, attrrules = buildimplicitrules(block)
at = determineexprtype(a, block['vars'], implicitrules)
na = 'e_'
for c in a:
c = c.lower()
if c not in string.ascii_lowercase + string.digits:
c = '_'
na = na + c
if na[-1] == '_':
na = na + 'e'
else:
na = na + '_e'
a = na
while a in block['vars'] or a in block['args']:
a = a + 'r'
if a in args:
k = 1
while a + str(k) in args:
k = k + 1
a = a + str(k)
if a_is_expr:
block['vars'][a] = at
else:
if a not in block['vars']:
if orig_a in block['vars']:
block['vars'][a] = block['vars'][orig_a]
else:
block['vars'][a] = {}
if 'externals' in block and orig_a in block['externals'] + block['interfaced']:
block['vars'][a] = setattrspec(block['vars'][a], 'external')
return a
def analyzeargs(block):
setmesstext(block)
implicitrules, attrrules = buildimplicitrules(block)
if 'args' not in block:
block['args'] = []
args = []
for a in block['args']:
a = expr2name(a, block, args)
args.append(a)
block['args'] = args
if 'entry' in block:
for k, args1 in list(block['entry'].items()):
for a in args1:
if a not in block['vars']:
block['vars'][a] = {}
for b in block['body']:
if b['name'] in args:
if 'externals' not in block:
block['externals'] = []
if b['name'] not in block['externals']:
block['externals'].append(b['name'])
if 'result' in block and block['result'] not in block['vars']:
block['vars'][block['result']] = {}
return block
determineexprtype_re_1 = re.compile(r'\A\(.+?[,].+?\)\Z', re.I)
determineexprtype_re_2 = re.compile(r'\A[+-]?\d+(_(P<name>[\w]+)|)\Z', re.I)
determineexprtype_re_3 = re.compile(
r'\A[+-]?[\d.]+[\d+-de.]*(_(P<name>[\w]+)|)\Z', re.I)
determineexprtype_re_4 = re.compile(r'\A\(.*\)\Z', re.I)
determineexprtype_re_5 = re.compile(r'\A(?P<name>\w+)\s*\(.*?\)\s*\Z', re.I)
def _ensure_exprdict(r):
if isinstance(r, int):
return {'typespec': 'integer'}
if isinstance(r, float):
return {'typespec': 'real'}
if isinstance(r, complex):
return {'typespec': 'complex'}
if isinstance(r, dict):
return r
raise AssertionError(repr(r))
def determineexprtype(expr, vars, rules={}):
if expr in vars:
return _ensure_exprdict(vars[expr])
expr = expr.strip()
if determineexprtype_re_1.match(expr):
return {'typespec': 'complex'}
m = determineexprtype_re_2.match(expr)
if m:
if 'name' in m.groupdict() and m.group('name'):
outmess(
'determineexprtype: selected kind types not supported (%s)\n' % repr(expr))
return {'typespec': 'integer'}
m = determineexprtype_re_3.match(expr)
if m:
if 'name' in m.groupdict() and m.group('name'):
outmess(
'determineexprtype: selected kind types not supported (%s)\n' % repr(expr))
return {'typespec': 'real'}
for op in ['+', '-', '*', '/']:
for e in [x.strip() for x in markoutercomma(expr, comma=op).split('@' + op + '@')]:
if e in vars:
return _ensure_exprdict(vars[e])
t = {}
if determineexprtype_re_4.match(expr): # in parenthesis
t = determineexprtype(expr[1:-1], vars, rules)
else:
m = determineexprtype_re_5.match(expr)
if m:
rn = m.group('name')
t = determineexprtype(m.group('name'), vars, rules)
if t and 'attrspec' in t:
del t['attrspec']
if not t:
if rn[0] in rules:
return _ensure_exprdict(rules[rn[0]])
if expr[0] in '\'"':
return {'typespec': 'character', 'charselector': {'*': '*'}}
if not t:
outmess(
'determineexprtype: could not determine expressions (%s) type.\n' % (repr(expr)))
return t
######
def crack2fortrangen(block, tab='\n', as_interface=False):
global skipfuncs, onlyfuncs
setmesstext(block)
ret = ''
if isinstance(block, list):
for g in block:
if g and g['block'] in ['function', 'subroutine']:
if g['name'] in skipfuncs:
continue
if onlyfuncs and g['name'] not in onlyfuncs:
continue
ret = ret + crack2fortrangen(g, tab, as_interface=as_interface)
return ret
prefix = ''
name = ''
args = ''
blocktype = block['block']
if blocktype == 'program':
return ''
argsl = []
if 'name' in block:
name = block['name']
if 'args' in block:
vars = block['vars']
for a in block['args']:
a = expr2name(a, block, argsl)
if not isintent_callback(vars[a]):
argsl.append(a)
if block['block'] == 'function' or argsl:
args = '(%s)' % ','.join(argsl)
f2pyenhancements = ''
if 'f2pyenhancements' in block:
for k in list(block['f2pyenhancements'].keys()):
f2pyenhancements = '%s%s%s %s' % (
f2pyenhancements, tab + tabchar, k, block['f2pyenhancements'][k])
intent_lst = block.get('intent', [])[:]
if blocktype == 'function' and 'callback' in intent_lst:
intent_lst.remove('callback')
if intent_lst:
f2pyenhancements = '%s%sintent(%s) %s' %\
(f2pyenhancements, tab + tabchar,
','.join(intent_lst), name)
use = ''
if 'use' in block:
use = use2fortran(block['use'], tab + tabchar)
common = ''
if 'common' in block:
common = common2fortran(block['common'], tab + tabchar)
if name == 'unknown_interface':
name = ''
result = ''
if 'result' in block:
result = ' result (%s)' % block['result']
if block['result'] not in argsl:
argsl.append(block['result'])
body = crack2fortrangen(block['body'], tab + tabchar)
vars = vars2fortran(
block, block['vars'], argsl, tab + tabchar, as_interface=as_interface)
mess = ''
if 'from' in block and not as_interface:
mess = '! in %s' % block['from']
if 'entry' in block:
entry_stmts = ''
for k, i in list(block['entry'].items()):
entry_stmts = '%s%sentry %s(%s)' \
% (entry_stmts, tab + tabchar, k, ','.join(i))
body = body + entry_stmts
if blocktype == 'block data' and name == '_BLOCK_DATA_':
name = ''
ret = '%s%s%s %s%s%s %s%s%s%s%s%s%send %s %s' % (
tab, prefix, blocktype, name, args, result, mess, f2pyenhancements, use, vars, common, body, tab, blocktype, name)
return ret
def common2fortran(common, tab=''):
ret = ''
for k in list(common.keys()):
if k == '_BLNK_':
ret = '%s%scommon %s' % (ret, tab, ','.join(common[k]))
else:
ret = '%s%scommon /%s/ %s' % (ret, tab, k, ','.join(common[k]))
return ret
def use2fortran(use, tab=''):
ret = ''
for m in list(use.keys()):
ret = '%s%suse %s,' % (ret, tab, m)
if use[m] == {}:
if ret and ret[-1] == ',':
ret = ret[:-1]
continue
if 'only' in use[m] and use[m]['only']:
ret = '%s only:' % (ret)
if 'map' in use[m] and use[m]['map']:
c = ' '
for k in list(use[m]['map'].keys()):
if k == use[m]['map'][k]:
ret = '%s%s%s' % (ret, c, k)
c = ','
else:
ret = '%s%s%s=>%s' % (ret, c, k, use[m]['map'][k])
c = ','
if ret and ret[-1] == ',':
ret = ret[:-1]
return ret
def true_intent_list(var):
lst = var['intent']
ret = []
for intent in lst:
try:
c = eval('isintent_%s(var)' % intent)
except NameError:
c = 0
if c:
ret.append(intent)
return ret
def vars2fortran(block, vars, args, tab='', as_interface=False):
setmesstext(block)
ret = ''
nout = []
for a in args:
if a in block['vars']:
nout.append(a)
if 'commonvars' in block:
for a in block['commonvars']:
if a in vars:
if a not in nout:
nout.append(a)
else:
errmess(
'vars2fortran: Confused?!: "%s" is not defined in vars.\n' % a)
if 'varnames' in block:
nout.extend(block['varnames'])
if not as_interface:
for a in list(vars.keys()):
if a not in nout:
nout.append(a)
for a in nout:
if 'depend' in vars[a]:
for d in vars[a]['depend']:
if d in vars and 'depend' in vars[d] and a in vars[d]['depend']:
errmess(
'vars2fortran: Warning: cross-dependence between variables "%s" and "%s"\n' % (a, d))
if 'externals' in block and a in block['externals']:
if isintent_callback(vars[a]):
ret = '%s%sintent(callback) %s' % (ret, tab, a)
ret = '%s%sexternal %s' % (ret, tab, a)
if isoptional(vars[a]):
ret = '%s%soptional %s' % (ret, tab, a)
if a in vars and 'typespec' not in vars[a]:
continue
cont = 1
for b in block['body']:
if a == b['name'] and b['block'] == 'function':
cont = 0
break
if cont:
continue
if a not in vars:
show(vars)
outmess('vars2fortran: No definition for argument "%s".\n' % a)
continue
if a == block['name'] and not block['block'] == 'function':
continue
if 'typespec' not in vars[a]:
if 'attrspec' in vars[a] and 'external' in vars[a]['attrspec']:
if a in args:
ret = '%s%sexternal %s' % (ret, tab, a)
continue
show(vars[a])
outmess('vars2fortran: No typespec for argument "%s".\n' % a)
continue
vardef = vars[a]['typespec']
if vardef == 'type' and 'typename' in vars[a]:
vardef = '%s(%s)' % (vardef, vars[a]['typename'])
selector = {}
if 'kindselector' in vars[a]:
selector = vars[a]['kindselector']
elif 'charselector' in vars[a]:
selector = vars[a]['charselector']
if '*' in selector:
if selector['*'] in ['*', ':']:
vardef = '%s*(%s)' % (vardef, selector['*'])
else:
vardef = '%s*%s' % (vardef, selector['*'])
else:
if 'len' in selector:
vardef = '%s(len=%s' % (vardef, selector['len'])
if 'kind' in selector:
vardef = '%s,kind=%s)' % (vardef, selector['kind'])
else:
vardef = '%s)' % (vardef)
elif 'kind' in selector:
vardef = '%s(kind=%s)' % (vardef, selector['kind'])
c = ' '
if 'attrspec' in vars[a]:
attr = []
for l in vars[a]['attrspec']:
if l not in ['external']:
attr.append(l)
if attr:
vardef = '%s, %s' % (vardef, ','.join(attr))
c = ','
if 'dimension' in vars[a]:
vardef = '%s%sdimension(%s)' % (
vardef, c, ','.join(vars[a]['dimension']))
c = ','
if 'intent' in vars[a]:
lst = true_intent_list(vars[a])
if lst:
vardef = '%s%sintent(%s)' % (vardef, c, ','.join(lst))
c = ','
if 'check' in vars[a]:
vardef = '%s%scheck(%s)' % (vardef, c, ','.join(vars[a]['check']))
c = ','
if 'depend' in vars[a]:
vardef = '%s%sdepend(%s)' % (
vardef, c, ','.join(vars[a]['depend']))
c = ','
if '=' in vars[a]:
v = vars[a]['=']
if vars[a]['typespec'] in ['complex', 'double complex']:
try:
v = eval(v)
v = '(%s,%s)' % (v.real, v.imag)
except:
pass
vardef = '%s :: %s=%s' % (vardef, a, v)
else:
vardef = '%s :: %s' % (vardef, a)
ret = '%s%s%s' % (ret, tab, vardef)
return ret
######
def crackfortran(files):
global usermodules
outmess('Reading fortran codes...\n', 0)
readfortrancode(files, crackline)
outmess('Post-processing...\n', 0)
usermodules = []
postlist = postcrack(grouplist[0])
outmess('Post-processing (stage 2)...\n', 0)
postlist = postcrack2(postlist)
return usermodules + postlist
def crack2fortran(block):
global f2py_version
pyf = crack2fortrangen(block) + '\n'
header = """! -*- f90 -*-
! Note: the context of this file is case sensitive.
"""
footer = """
! This file was auto-generated with f2py (version:%s).
! See http://cens.ioc.ee/projects/f2py2e/
""" % (f2py_version)
return header + pyf + footer
if __name__ == "__main__":
files = []
funcs = []
f = 1
f2 = 0
f3 = 0
showblocklist = 0
for l in sys.argv[1:]:
if l == '':
pass
elif l[0] == ':':
f = 0
elif l == '-quiet':
quiet = 1
verbose = 0
elif l == '-verbose':
verbose = 2
quiet = 0
elif l == '-fix':
if strictf77:
outmess(
'Use option -f90 before -fix if Fortran 90 code is in fix form.\n', 0)
skipemptyends = 1
sourcecodeform = 'fix'
elif l == '-skipemptyends':
skipemptyends = 1
elif l == '--ignore-contains':
ignorecontains = 1
elif l == '-f77':
strictf77 = 1
sourcecodeform = 'fix'
elif l == '-f90':
strictf77 = 0
sourcecodeform = 'free'
skipemptyends = 1
elif l == '-h':
f2 = 1
elif l == '-show':
showblocklist = 1
elif l == '-m':
f3 = 1
elif l[0] == '-':
errmess('Unknown option %s\n' % repr(l))
elif f2:
f2 = 0
pyffilename = l
elif f3:
f3 = 0
f77modulename = l
elif f:
try:
open(l).close()
files.append(l)
except IOError as detail:
errmess('IOError: %s\n' % str(detail))
else:
funcs.append(l)
if not strictf77 and f77modulename and not skipemptyends:
outmess("""\
Warning: You have specifyied module name for non Fortran 77 code
that should not need one (expect if you are scanning F90 code
for non module blocks but then you should use flag -skipemptyends
and also be sure that the files do not contain programs without program statement).
""", 0)
postlist = crackfortran(files, funcs)
if pyffilename:
outmess('Writing fortran code to file %s\n' % repr(pyffilename), 0)
pyf = crack2fortran(postlist)
f = open(pyffilename, 'w')
f.write(pyf)
f.close()
if showblocklist:
show(postlist)
| true | true |
f738ab779415b269bda9727a7fdc7c85dda2ca86 | 2,925 | py | Python | code_server/run_evaluate.py | Dragon-M-Ren/grad_code | d814b81adaec709d5dffd737f0c350953cc361fd | [
"Apache-2.0"
] | null | null | null | code_server/run_evaluate.py | Dragon-M-Ren/grad_code | d814b81adaec709d5dffd737f0c350953cc361fd | [
"Apache-2.0"
] | null | null | null | code_server/run_evaluate.py | Dragon-M-Ren/grad_code | d814b81adaec709d5dffd737f0c350953cc361fd | [
"Apache-2.0"
] | null | null | null | from evaluate import *
import tensorflow as tf
from utils import *
from model.gcn import GCN
from model.mlp import MLP
from model.firstcheb import FirstCheb
from model.gat import GAT
from model.dcnn import DCNN
from model.spectralcnn import SpectralCNN
from model.chebnet import ChebNet
from model.graphsage import GraphSage
from model.graphsage_meanpool import GraphSageMeanPool
from model.graphsage_maxpool import GraphSageMaxPool
from hyperpara_optim import *
import scipy.sparse as sp
import numpy as np
import pickle as pkl
from process_data import *
from draw import *
import os
'''
This file will run the test script
Three kinds of file are saved
result_path/dataset_name: the original data
processed_result_path/dataset_name/: processed data
'''
#Model done
#MLP GCN FirstCheb GAT
#Model left
#SpectralCNN, DCNN, GraphSage,
model_list = [DCNN]
model_name_list = ['dcnn']
dataset_name_list = ['pubmed']
#dataset_name_list = ['citeseer', 'cora', 'pubmed']
dataset_numbers = 10
parameter_appendix_list = ['rand']
dataset_path = './data/evaluate'
parameter_path = './hyperparameter'
result_path = './direct_output'
processed_result_path = './processed_output'
evaluate_times = 2
train_size = 230
val_size = 500
for model, model_name in zip(model_list, model_name_list):
for dataset_name in dataset_name_list:
for parameter_appendix in parameter_appendix_list:
train_info_list, acc_list, time_list = evaluate_model(model,
model_name, dataset_path, dataset_name, dataset_numbers, parameter_path,
parameter_appendix, result_path, evaluate_times, train_size, val_size)
#save to file
save_path = os.path.join(result_path, dataset_name)
file_name = model_name + parameter_appendix
#make directory
if not os.path.exists(save_path):
os.makedirs(save_path)
save_file = open(os.path.join(save_path, file_name), 'wb')
pkl.dump((train_info_list, acc_list, time_list), save_file)
save_file.close()
#process output data
train_info, acc, time = process_output(train_info_list, acc_list, time_list)
#save processed data
save_path = os.path.join(processed_result_path, dataset_name)
file_name = model_name + parameter_appendix
#make directory
if not os.path.exists(save_path):
os.makedirs(save_path)
save_file = open(os.path.join(save_path, file_name), 'wb')
pkl.dump((train_info, acc, time), save_file)
save_file.close()
#save train image
save_path = os.path.join(processed_result_path, dataset_name)
plot_train(train_info['train_loss'], train_info['train_acc'],
train_info['val_loss'], train_info['val_acc'],
save_path, model_name, True)
| 31.117021 | 89 | 0.696068 | from evaluate import *
import tensorflow as tf
from utils import *
from model.gcn import GCN
from model.mlp import MLP
from model.firstcheb import FirstCheb
from model.gat import GAT
from model.dcnn import DCNN
from model.spectralcnn import SpectralCNN
from model.chebnet import ChebNet
from model.graphsage import GraphSage
from model.graphsage_meanpool import GraphSageMeanPool
from model.graphsage_maxpool import GraphSageMaxPool
from hyperpara_optim import *
import scipy.sparse as sp
import numpy as np
import pickle as pkl
from process_data import *
from draw import *
import os
model_list = [DCNN]
model_name_list = ['dcnn']
dataset_name_list = ['pubmed']
dataset_numbers = 10
parameter_appendix_list = ['rand']
dataset_path = './data/evaluate'
parameter_path = './hyperparameter'
result_path = './direct_output'
processed_result_path = './processed_output'
evaluate_times = 2
train_size = 230
val_size = 500
for model, model_name in zip(model_list, model_name_list):
for dataset_name in dataset_name_list:
for parameter_appendix in parameter_appendix_list:
train_info_list, acc_list, time_list = evaluate_model(model,
model_name, dataset_path, dataset_name, dataset_numbers, parameter_path,
parameter_appendix, result_path, evaluate_times, train_size, val_size)
save_path = os.path.join(result_path, dataset_name)
file_name = model_name + parameter_appendix
if not os.path.exists(save_path):
os.makedirs(save_path)
save_file = open(os.path.join(save_path, file_name), 'wb')
pkl.dump((train_info_list, acc_list, time_list), save_file)
save_file.close()
train_info, acc, time = process_output(train_info_list, acc_list, time_list)
save_path = os.path.join(processed_result_path, dataset_name)
file_name = model_name + parameter_appendix
if not os.path.exists(save_path):
os.makedirs(save_path)
save_file = open(os.path.join(save_path, file_name), 'wb')
pkl.dump((train_info, acc, time), save_file)
save_file.close()
save_path = os.path.join(processed_result_path, dataset_name)
plot_train(train_info['train_loss'], train_info['train_acc'],
train_info['val_loss'], train_info['val_acc'],
save_path, model_name, True)
| true | true |
f738ae17c137a7db78adb44ca30ff26da58f5b69 | 1,401 | py | Python | clock.py | vishal71421/pythonclock | 1688be589bdcef3a17431c5aa7f91f43b710b000 | [
"MIT"
] | 1 | 2020-08-12T18:24:46.000Z | 2020-08-12T18:24:46.000Z | clock.py | vishal71421/pythonclock | 1688be589bdcef3a17431c5aa7f91f43b710b000 | [
"MIT"
] | null | null | null | clock.py | vishal71421/pythonclock | 1688be589bdcef3a17431c5aa7f91f43b710b000 | [
"MIT"
] | 1 | 2022-03-30T11:50:29.000Z | 2022-03-30T11:50:29.000Z | # Simple analogue clock in Python 3
import turtle
import time
wndw = turtle.Screen()
wndw.bgcolor("black")
wndw.setup(width=600, height=600)
wndw.title("Analogue Clock")
wndw.tracer(0)
# Create the drawing pen
pen = turtle.Turtle()
pen.hideturtle()
pen.speed(0)
pen.pensize(3)
def draw_clock(hr, mn, sec, pen):
# Draw clock face
pen.up()
pen.goto(0, 210)
pen.setheading(180)
pen.color("green")
pen.pendown()
pen.circle(210)
# Draw hour hashes
pen.up()
pen.goto(0, 0)
pen.setheading(90)
for _ in range(12):
pen.fd(190)
pen.pendown()
pen.fd(20)
pen.penup()
pen.goto(0, 0)
pen.rt(30)
# Draw the hands
# Each tuple in list hands describes the color, the length
# and the divisor for the angle
hands = [("white", 80, 12), ("blue", 150, 60), ("red", 110, 60)]
time_set = (hr, mn, sec)
for hand in hands:
time_part = time_set[hands.index(hand)]
angle = (time_part/hand[2])*360
pen.penup()
pen.goto(0, 0)
pen.color(hand[0])
pen.setheading(90)
pen.rt(angle)
pen.pendown()
pen.fd(hand[1])
while True:
hr = int(time.strftime("%I"))
mn = int(time.strftime("%M"))
sec = int(time.strftime("%S"))
draw_clock(hr, mn, sec, pen)
wndw.update()
time.sleep(1)
pen.clear()
wndw.mainloop()
| 19.732394 | 68 | 0.578158 |
import turtle
import time
wndw = turtle.Screen()
wndw.bgcolor("black")
wndw.setup(width=600, height=600)
wndw.title("Analogue Clock")
wndw.tracer(0)
pen = turtle.Turtle()
pen.hideturtle()
pen.speed(0)
pen.pensize(3)
def draw_clock(hr, mn, sec, pen):
pen.up()
pen.goto(0, 210)
pen.setheading(180)
pen.color("green")
pen.pendown()
pen.circle(210)
pen.up()
pen.goto(0, 0)
pen.setheading(90)
for _ in range(12):
pen.fd(190)
pen.pendown()
pen.fd(20)
pen.penup()
pen.goto(0, 0)
pen.rt(30)
hands = [("white", 80, 12), ("blue", 150, 60), ("red", 110, 60)]
time_set = (hr, mn, sec)
for hand in hands:
time_part = time_set[hands.index(hand)]
angle = (time_part/hand[2])*360
pen.penup()
pen.goto(0, 0)
pen.color(hand[0])
pen.setheading(90)
pen.rt(angle)
pen.pendown()
pen.fd(hand[1])
while True:
hr = int(time.strftime("%I"))
mn = int(time.strftime("%M"))
sec = int(time.strftime("%S"))
draw_clock(hr, mn, sec, pen)
wndw.update()
time.sleep(1)
pen.clear()
wndw.mainloop()
| true | true |
f738ae6a93e083d3796cdb1f95525084c99a21fa | 1,809 | py | Python | azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/models/image_create_result.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/models/image_create_result.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/models/image_create_result.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2019-06-17T22:18:23.000Z | 2019-06-17T22:18:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ImageCreateResult(Model):
"""ImageCreateResult.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar source_url: Source URL of the image.
:vartype source_url: str
:ivar status: Status of the image creation. Possible values include: 'OK',
'OKDuplicate', 'ErrorSource', 'ErrorImageFormat', 'ErrorImageSize',
'ErrorStorage', 'ErrorLimitExceed', 'ErrorTagLimitExceed',
'ErrorRegionLimitExceed', 'ErrorUnknown',
'ErrorNegativeAndRegularTagOnSameImage'
:vartype status: str or
~azure.cognitiveservices.vision.customvision.training.models.ImageCreateStatus
:ivar image: The image.
:vartype image:
~azure.cognitiveservices.vision.customvision.training.models.Image
"""
_validation = {
'source_url': {'readonly': True},
'status': {'readonly': True},
'image': {'readonly': True},
}
_attribute_map = {
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'image': {'key': 'image', 'type': 'Image'},
}
def __init__(self, **kwargs):
super(ImageCreateResult, self).__init__(**kwargs)
self.source_url = None
self.status = None
self.image = None
| 34.788462 | 83 | 0.615257 |
from msrest.serialization import Model
class ImageCreateResult(Model):
_validation = {
'source_url': {'readonly': True},
'status': {'readonly': True},
'image': {'readonly': True},
}
_attribute_map = {
'source_url': {'key': 'sourceUrl', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'image': {'key': 'image', 'type': 'Image'},
}
def __init__(self, **kwargs):
super(ImageCreateResult, self).__init__(**kwargs)
self.source_url = None
self.status = None
self.image = None
| true | true |
f738aeabb12e34c1fac77f5e37010adaa44ec4b8 | 1,819 | py | Python | src/sentry/api/serializers/models/alert_rule_trigger_action.py | pierredup/sentry | 0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80 | [
"BSD-3-Clause"
] | null | null | null | src/sentry/api/serializers/models/alert_rule_trigger_action.py | pierredup/sentry | 0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80 | [
"BSD-3-Clause"
] | null | null | null | src/sentry/api/serializers/models/alert_rule_trigger_action.py | pierredup/sentry | 0145e4b3bc0e775bf3482fe65f5e1a689d0dbb80 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import
import six
from sentry.api.serializers import register, Serializer
from sentry.incidents.models import AlertRuleTriggerAction
@register(AlertRuleTriggerAction)
class AlertRuleTriggerActionSerializer(Serializer):
def human_desc(self, action):
# Returns a human readable description to display in the UI
if action.type == action.Type.EMAIL.value:
if action.target:
if action.target_type == action.TargetType.USER.value:
return "Send an email to " + action.target.email
elif action.target_type == action.TargetType.TEAM.value:
return "Send an email to members of #" + action.target.slug
elif action.type == action.Type.PAGERDUTY.value:
return "Send a PagerDuty notification to " + action.target_display
elif action.type == action.Type.SLACK.value:
return "Send a Slack notification to " + action.target_display
def serialize(self, obj, attrs, user):
from sentry.incidents.endpoints.serializers import action_target_type_to_string
return {
"id": six.text_type(obj.id),
"alertRuleTriggerId": six.text_type(obj.alert_rule_trigger_id),
"type": AlertRuleTriggerAction.get_registered_type(
AlertRuleTriggerAction.Type(obj.type)
).slug,
"targetType": action_target_type_to_string[
AlertRuleTriggerAction.TargetType(obj.target_type)
],
"targetIdentifier": obj.target_display
if obj.target_display is not None
else obj.target_identifier,
"integrationId": obj.integration_id,
"dateCreated": obj.date_added,
"desc": self.human_desc(obj),
}
| 42.302326 | 87 | 0.654755 | from __future__ import absolute_import
import six
from sentry.api.serializers import register, Serializer
from sentry.incidents.models import AlertRuleTriggerAction
@register(AlertRuleTriggerAction)
class AlertRuleTriggerActionSerializer(Serializer):
def human_desc(self, action):
if action.type == action.Type.EMAIL.value:
if action.target:
if action.target_type == action.TargetType.USER.value:
return "Send an email to " + action.target.email
elif action.target_type == action.TargetType.TEAM.value:
return "Send an email to members of #" + action.target.slug
elif action.type == action.Type.PAGERDUTY.value:
return "Send a PagerDuty notification to " + action.target_display
elif action.type == action.Type.SLACK.value:
return "Send a Slack notification to " + action.target_display
def serialize(self, obj, attrs, user):
from sentry.incidents.endpoints.serializers import action_target_type_to_string
return {
"id": six.text_type(obj.id),
"alertRuleTriggerId": six.text_type(obj.alert_rule_trigger_id),
"type": AlertRuleTriggerAction.get_registered_type(
AlertRuleTriggerAction.Type(obj.type)
).slug,
"targetType": action_target_type_to_string[
AlertRuleTriggerAction.TargetType(obj.target_type)
],
"targetIdentifier": obj.target_display
if obj.target_display is not None
else obj.target_identifier,
"integrationId": obj.integration_id,
"dateCreated": obj.date_added,
"desc": self.human_desc(obj),
}
| true | true |
f738af43ce1b3a193391862e705092eb5b0f05da | 2,489 | py | Python | unified/tests/test_xfs.py | LoganCook/reporting-unified | 9d2c7e083c5c400e9120bb8552348e41406a1bc1 | [
"Apache-2.0"
] | null | null | null | unified/tests/test_xfs.py | LoganCook/reporting-unified | 9d2c7e083c5c400e9120bb8552348e41406a1bc1 | [
"Apache-2.0"
] | null | null | null | unified/tests/test_xfs.py | LoganCook/reporting-unified | 9d2c7e083c5c400e9120bb8552348e41406a1bc1 | [
"Apache-2.0"
] | null | null | null | import unittest
from flask import json
from ..apis.xfs import app
from . import client_get, now, now_minus_24hrs
get = client_get(app)
class XFSTestCase(unittest.TestCase):
def test_root_not_allowed(self):
rv = get('/')
self.assertEqual(rv.status_code, 404)
def test_all_top_objects_should_pass(self):
for route in app.url_map.iter_rules():
rule = route.rule
# top objects' have pattern of /blar
# ingest only accept PUT and OPTIONS
if rule not in ('/static/<path:filename>', '/ingest') and 'summary' not in rule and 'list' not in rule:
print('Testing %s' % rule)
resp = get('%s?count=10' % rule)
data = json.loads(resp.data)
self.assertEqual(resp.status_code, 200)
self.assertGreaterEqual(len(data), 1)
def test_filesystem_not_found(self):
rule = '/filesystem/not/summary'
# Can deal non-uuid id quitely
resp = get(rule)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
self.assertEqual(len(data), 0)
rule = '/filesystem/12345678123456781234567812345678/summary'
resp = get(rule)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
self.assertEqual(len(data), 0)
def test_usage_summary(self):
resp = get('/usage/summary?start=%s&end=%s' % (now_minus_24hrs, now))
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
self.assertTrue(isinstance(data, list))
print(data)
def test_instance_methods(self):
instance_types = ('filesystem', 'owner')
methods = ('summary', 'list')
for itype in instance_types:
resp = get('/%s?count=1' % itype)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
print(data)
self.assertTrue(isinstance(data, list))
self.assertGreater(len(data), 0)
target_id = data[0]['id']
for method in methods:
resp = get('/%s/%s/%s?start=%s&end=%s' % (itype, target_id, method, now_minus_24hrs, now))
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
print(data)
self.assertTrue(isinstance(data, list) or isinstance(data, dict))
self.assertGreater(len(data), 0)
| 37.149254 | 115 | 0.593009 | import unittest
from flask import json
from ..apis.xfs import app
from . import client_get, now, now_minus_24hrs
get = client_get(app)
class XFSTestCase(unittest.TestCase):
def test_root_not_allowed(self):
rv = get('/')
self.assertEqual(rv.status_code, 404)
def test_all_top_objects_should_pass(self):
for route in app.url_map.iter_rules():
rule = route.rule
# ingest only accept PUT and OPTIONS
if rule not in ('/static/<path:filename>', '/ingest') and 'summary' not in rule and 'list' not in rule:
print('Testing %s' % rule)
resp = get('%s?count=10' % rule)
data = json.loads(resp.data)
self.assertEqual(resp.status_code, 200)
self.assertGreaterEqual(len(data), 1)
def test_filesystem_not_found(self):
rule = '/filesystem/not/summary'
# Can deal non-uuid id quitely
resp = get(rule)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
self.assertEqual(len(data), 0)
rule = '/filesystem/12345678123456781234567812345678/summary'
resp = get(rule)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
self.assertEqual(len(data), 0)
def test_usage_summary(self):
resp = get('/usage/summary?start=%s&end=%s' % (now_minus_24hrs, now))
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
self.assertTrue(isinstance(data, list))
print(data)
def test_instance_methods(self):
instance_types = ('filesystem', 'owner')
methods = ('summary', 'list')
for itype in instance_types:
resp = get('/%s?count=1' % itype)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
print(data)
self.assertTrue(isinstance(data, list))
self.assertGreater(len(data), 0)
target_id = data[0]['id']
for method in methods:
resp = get('/%s/%s/%s?start=%s&end=%s' % (itype, target_id, method, now_minus_24hrs, now))
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.data)
print(data)
self.assertTrue(isinstance(data, list) or isinstance(data, dict))
self.assertGreater(len(data), 0)
| true | true |
f738af46f448eb15c41e30d9aaae2454e0c5a8dd | 11,680 | py | Python | goose3/crawler.py | Nimit-Khurana/goose3 | 5006bf40f88f15f9b6a1d0da5b9945eaa306c1aa | [
"Apache-2.0"
] | null | null | null | goose3/crawler.py | Nimit-Khurana/goose3 | 5006bf40f88f15f9b6a1d0da5b9945eaa306c1aa | [
"Apache-2.0"
] | null | null | null | goose3/crawler.py | Nimit-Khurana/goose3 | 5006bf40f88f15f9b6a1d0da5b9945eaa306c1aa | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""\
This is a python port of "Goose" orignialy licensed to Gravity.com
under one or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.
Python port was written by Xavier Grangier for Recrutae
Gravity.com licenses this file
to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import glob
from copy import deepcopy
import dateutil.parser
from dateutil.tz import tzutc
from goose3.article import Article
from goose3.utils import URLHelper, RawHelper
from goose3.text import get_encodings_from_content
from goose3.extractors.content import StandardContentExtractor
from goose3.extractors.videos import VideoExtractor
from goose3.extractors.title import TitleExtractor
from goose3.extractors.images import ImageExtractor
from goose3.extractors.links import LinksExtractor
from goose3.extractors.tweets import TweetsExtractor
from goose3.extractors.authors import AuthorsExtractor
from goose3.extractors.tags import TagsExtractor
from goose3.extractors.opengraph import OpenGraphExtractor
from goose3.extractors.publishdate import PublishDateExtractor
from goose3.extractors.schema import SchemaExtractor
from goose3.extractors.metas import MetasExtractor
from goose3.cleaners import StandardDocumentCleaner
from goose3.outputformatters import StandardOutputFormatter
from goose3.network import NetworkFetcher
class CrawlCandidate(object):
def __init__(self, config, url, raw_html):
self.config = config
# parser
self.parser = self.config.get_parser()
self.url = url
self.raw_html = raw_html
class Crawler(object):
def __init__(self, config, fetcher=None):
# config
self.config = config
# parser
self.parser = self.config.get_parser()
# article
self.article = Article()
# init the extractor
self.extractor = self.get_extractor()
# init the document cleaner
self.cleaner = self.get_cleaner()
# init the output formatter
self.formatter = self.get_formatter()
# metas extractor
self.metas_extractor = self.get_metas_extractor()
# opengraph extractor
self.opengraph_extractor = self.get_opengraph_extractor()
# schema.org news article extractor
self.schema_extractor = self.get_schema_extractor()
# publishdate extractor
self.publishdate_extractor = self.get_publishdate_extractor()
# tags extractor
self.tags_extractor = self.get_tags_extractor()
# authors extractor
self.authors_extractor = self.get_authors_extractor()
# tweets extractor
self.tweets_extractor = self.get_tweets_extractor()
# links extractor
self.links_extractor = self.get_links_extractor()
# video extractor
self.video_extractor = self.get_video_extractor()
# title extractor
self.title_extractor = self.get_title_extractor()
# html fetcher
if isinstance(fetcher, NetworkFetcher):
self.fetcher = fetcher
else:
self.fetcher = NetworkFetcher(self.config)
# image extractor
self.image_extractor = self.get_image_extractor()
# TODO: use the log prefix
self.log_prefix = "crawler: "
def crawl(self, crawl_candidate):
# parser candidate
parse_candidate = self.get_parse_candidate(crawl_candidate)
# raw html
raw_html = self.get_html(crawl_candidate, parse_candidate)
if raw_html is None:
return self.article
return self.process(raw_html, parse_candidate.url, parse_candidate.link_hash)
def process(self, raw_html, final_url, link_hash):
#remove footnotes
raw_html_no_footnotes = self.formatter.remove_footnotes(raw_html)
# create document
doc = self.get_document(raw_html_no_footnotes)
# article
self.article._final_url = final_url
self.article._link_hash = link_hash
self.article._raw_html = raw_html_no_footnotes
self.article._doc = doc
self.article._raw_doc = deepcopy(doc)
# open graph
self.article._opengraph = self.opengraph_extractor.extract()
# schema.org:
# - (ReportageNewsArticle) https://pending.schema.org/ReportageNewsArticle
# - (NewsArticle) https://schema.org/NewsArticle
# - (Article) https://schema.org/Article
self.article._schema = self.schema_extractor.extract()
if not self.article._final_url:
if "url" in self.article.opengraph:
self.article._final_url = self.article.opengraph["url"]
elif self.article.schema and "url" in self.article.schema:
self.article._final_url = self.article.schema["url"]
# meta
metas = self.metas_extractor.extract()
# print(metas)
self.article._meta_lang = metas['lang']
self.article._meta_favicon = metas['favicon']
self.article._meta_description = metas['description']
self.article._meta_keywords = metas['keywords']
self.article._meta_encoding = metas['encoding']
self.article._canonical_link = metas['canonical']
self.article._domain = metas['domain']
# publishdate
self.article._publish_date = self.publishdate_extractor.extract()
if self.article.publish_date:
try:
publish_datetime = dateutil.parser.parse(self.article.publish_date)
if publish_datetime.tzinfo:
self.article._publish_datetime_utc = publish_datetime.astimezone(tzutc())
else:
self.article._publish_datetime_utc = publish_datetime
except (ValueError, OverflowError):
self.article._publish_datetime_utc = None
# tags
self.article._tags = self.tags_extractor.extract()
# authors
self.article._authors = self.authors_extractor.extract()
# title
self.article._title = self.title_extractor.extract()
# check for known node as content body
# if we find one force the article.doc to be the found node
# this will prevent the cleaner to remove unwanted text content
article_body = self.extractor.get_known_article_tags()
if article_body is not None:
doc = article_body
# before we do any calcs on the body itself let's clean up the document
if not isinstance(doc, list):
doc = [self.cleaner.clean(doc)]
else:
doc = [self.cleaner.clean(deepcopy(x)) for x in doc]
# big stuff
self.article._top_node = self.extractor.calculate_best_node(doc)
# if we do not find an article within the discovered possible article nodes,
# try again with the root node.
if self.article._top_node is None:
# try again with the root node.
self.article._top_node = self.extractor.calculate_best_node(self.article._doc)
else:
# set the doc member to the discovered article node.
self.article._doc = doc
# if we have a top node
# let's process it
if self.article._top_node is not None:
# article links
self.article._links = self.links_extractor.extract()
# tweets
self.article._tweets = self.tweets_extractor.extract()
# video handling
self.article._movies = self.video_extractor.get_videos()
# image handling
if self.config.enable_image_fetching:
self.get_image()
# post cleanup
self.article._top_node = self.extractor.post_cleanup()
# clean_text
self.article._cleaned_text = self.formatter.get_formatted_text()
# cleanup tmp file
self.release_resources()
# return the article
return self.article
@staticmethod
def get_parse_candidate(crawl_candidate):
if crawl_candidate.raw_html:
return RawHelper.get_parsing_candidate(crawl_candidate.url, crawl_candidate.raw_html)
return URLHelper.get_parsing_candidate(crawl_candidate.url)
def get_image(self):
doc = self.article.raw_doc
top_node = self.article.top_node
self.article._top_image = self.image_extractor.get_best_image(doc, top_node)
def get_html(self, crawl_candidate, parsing_candidate):
# we got a raw_tml
# no need to fetch remote content
if crawl_candidate.raw_html:
return crawl_candidate.raw_html
# fetch HTML
response = self.fetcher.fetch_obj(parsing_candidate.url)
if response.encoding != 'ISO-8859-1': # requests has a good idea; use what it says
# return response as a unicode string
html = response.text
self.article._meta_encoding = response.encoding
else:
html = response.content
encodings = get_encodings_from_content(response.text)
if len(encodings) > 0:
self.article._meta_encoding = encodings[0]
response.encoding = encodings[0]
html = response.text
else:
self.article._meta_encoding = encodings
return html
def get_metas_extractor(self):
return MetasExtractor(self.config, self.article)
def get_publishdate_extractor(self):
return PublishDateExtractor(self.config, self.article)
def get_opengraph_extractor(self):
return OpenGraphExtractor(self.config, self.article)
def get_schema_extractor(self):
return SchemaExtractor(self.config, self.article)
def get_tags_extractor(self):
return TagsExtractor(self.config, self.article)
def get_authors_extractor(self):
return AuthorsExtractor(self.config, self.article)
def get_tweets_extractor(self):
return TweetsExtractor(self.config, self.article)
def get_links_extractor(self):
return LinksExtractor(self.config, self.article)
def get_title_extractor(self):
return TitleExtractor(self.config, self.article)
def get_image_extractor(self):
return ImageExtractor(self.fetcher, self.config, self.article)
def get_video_extractor(self):
return VideoExtractor(self.config, self.article)
def get_formatter(self):
return StandardOutputFormatter(self.config, self.article)
def get_cleaner(self):
return StandardDocumentCleaner(self.config, self.article)
def get_document(self, raw_html):
doc = self.parser.fromstring(raw_html)
return doc
def get_extractor(self):
return StandardContentExtractor(self.config, self.article)
def release_resources(self):
path = os.path.join(self.config.local_storage_path, '%s_*' % self.article.link_hash)
for fname in glob.glob(path):
try:
os.remove(fname)
except OSError:
# TODO: better log handeling
pass
| 34.658754 | 97 | 0.675171 |
import os
import glob
from copy import deepcopy
import dateutil.parser
from dateutil.tz import tzutc
from goose3.article import Article
from goose3.utils import URLHelper, RawHelper
from goose3.text import get_encodings_from_content
from goose3.extractors.content import StandardContentExtractor
from goose3.extractors.videos import VideoExtractor
from goose3.extractors.title import TitleExtractor
from goose3.extractors.images import ImageExtractor
from goose3.extractors.links import LinksExtractor
from goose3.extractors.tweets import TweetsExtractor
from goose3.extractors.authors import AuthorsExtractor
from goose3.extractors.tags import TagsExtractor
from goose3.extractors.opengraph import OpenGraphExtractor
from goose3.extractors.publishdate import PublishDateExtractor
from goose3.extractors.schema import SchemaExtractor
from goose3.extractors.metas import MetasExtractor
from goose3.cleaners import StandardDocumentCleaner
from goose3.outputformatters import StandardOutputFormatter
from goose3.network import NetworkFetcher
class CrawlCandidate(object):
def __init__(self, config, url, raw_html):
self.config = config
self.parser = self.config.get_parser()
self.url = url
self.raw_html = raw_html
class Crawler(object):
def __init__(self, config, fetcher=None):
self.config = config
self.parser = self.config.get_parser()
self.article = Article()
self.extractor = self.get_extractor()
self.cleaner = self.get_cleaner()
self.formatter = self.get_formatter()
self.metas_extractor = self.get_metas_extractor()
self.opengraph_extractor = self.get_opengraph_extractor()
self.schema_extractor = self.get_schema_extractor()
self.publishdate_extractor = self.get_publishdate_extractor()
self.tags_extractor = self.get_tags_extractor()
self.authors_extractor = self.get_authors_extractor()
self.tweets_extractor = self.get_tweets_extractor()
self.links_extractor = self.get_links_extractor()
self.video_extractor = self.get_video_extractor()
self.title_extractor = self.get_title_extractor()
if isinstance(fetcher, NetworkFetcher):
self.fetcher = fetcher
else:
self.fetcher = NetworkFetcher(self.config)
self.image_extractor = self.get_image_extractor()
self.log_prefix = "crawler: "
def crawl(self, crawl_candidate):
parse_candidate = self.get_parse_candidate(crawl_candidate)
raw_html = self.get_html(crawl_candidate, parse_candidate)
if raw_html is None:
return self.article
return self.process(raw_html, parse_candidate.url, parse_candidate.link_hash)
def process(self, raw_html, final_url, link_hash):
raw_html_no_footnotes = self.formatter.remove_footnotes(raw_html)
doc = self.get_document(raw_html_no_footnotes)
self.article._final_url = final_url
self.article._link_hash = link_hash
self.article._raw_html = raw_html_no_footnotes
self.article._doc = doc
self.article._raw_doc = deepcopy(doc)
self.article._opengraph = self.opengraph_extractor.extract()
self.article._schema = self.schema_extractor.extract()
if not self.article._final_url:
if "url" in self.article.opengraph:
self.article._final_url = self.article.opengraph["url"]
elif self.article.schema and "url" in self.article.schema:
self.article._final_url = self.article.schema["url"]
metas = self.metas_extractor.extract()
self.article._meta_lang = metas['lang']
self.article._meta_favicon = metas['favicon']
self.article._meta_description = metas['description']
self.article._meta_keywords = metas['keywords']
self.article._meta_encoding = metas['encoding']
self.article._canonical_link = metas['canonical']
self.article._domain = metas['domain']
self.article._publish_date = self.publishdate_extractor.extract()
if self.article.publish_date:
try:
publish_datetime = dateutil.parser.parse(self.article.publish_date)
if publish_datetime.tzinfo:
self.article._publish_datetime_utc = publish_datetime.astimezone(tzutc())
else:
self.article._publish_datetime_utc = publish_datetime
except (ValueError, OverflowError):
self.article._publish_datetime_utc = None
self.article._tags = self.tags_extractor.extract()
self.article._authors = self.authors_extractor.extract()
self.article._title = self.title_extractor.extract()
article_body = self.extractor.get_known_article_tags()
if article_body is not None:
doc = article_body
if not isinstance(doc, list):
doc = [self.cleaner.clean(doc)]
else:
doc = [self.cleaner.clean(deepcopy(x)) for x in doc]
# big stuff
self.article._top_node = self.extractor.calculate_best_node(doc)
# if we do not find an article within the discovered possible article nodes,
# try again with the root node.
if self.article._top_node is None:
# try again with the root node.
self.article._top_node = self.extractor.calculate_best_node(self.article._doc)
else:
# set the doc member to the discovered article node.
self.article._doc = doc
# if we have a top node
# let's process it
if self.article._top_node is not None:
self.article._links = self.links_extractor.extract()
self.article._tweets = self.tweets_extractor.extract()
self.article._movies = self.video_extractor.get_videos()
if self.config.enable_image_fetching:
self.get_image()
self.article._top_node = self.extractor.post_cleanup()
self.article._cleaned_text = self.formatter.get_formatted_text()
self.release_resources()
return self.article
@staticmethod
def get_parse_candidate(crawl_candidate):
if crawl_candidate.raw_html:
return RawHelper.get_parsing_candidate(crawl_candidate.url, crawl_candidate.raw_html)
return URLHelper.get_parsing_candidate(crawl_candidate.url)
def get_image(self):
doc = self.article.raw_doc
top_node = self.article.top_node
self.article._top_image = self.image_extractor.get_best_image(doc, top_node)
def get_html(self, crawl_candidate, parsing_candidate):
if crawl_candidate.raw_html:
return crawl_candidate.raw_html
response = self.fetcher.fetch_obj(parsing_candidate.url)
if response.encoding != 'ISO-8859-1':
html = response.text
self.article._meta_encoding = response.encoding
else:
html = response.content
encodings = get_encodings_from_content(response.text)
if len(encodings) > 0:
self.article._meta_encoding = encodings[0]
response.encoding = encodings[0]
html = response.text
else:
self.article._meta_encoding = encodings
return html
def get_metas_extractor(self):
return MetasExtractor(self.config, self.article)
def get_publishdate_extractor(self):
return PublishDateExtractor(self.config, self.article)
def get_opengraph_extractor(self):
return OpenGraphExtractor(self.config, self.article)
def get_schema_extractor(self):
return SchemaExtractor(self.config, self.article)
def get_tags_extractor(self):
return TagsExtractor(self.config, self.article)
def get_authors_extractor(self):
return AuthorsExtractor(self.config, self.article)
def get_tweets_extractor(self):
return TweetsExtractor(self.config, self.article)
def get_links_extractor(self):
return LinksExtractor(self.config, self.article)
def get_title_extractor(self):
return TitleExtractor(self.config, self.article)
def get_image_extractor(self):
return ImageExtractor(self.fetcher, self.config, self.article)
def get_video_extractor(self):
return VideoExtractor(self.config, self.article)
def get_formatter(self):
return StandardOutputFormatter(self.config, self.article)
def get_cleaner(self):
return StandardDocumentCleaner(self.config, self.article)
def get_document(self, raw_html):
doc = self.parser.fromstring(raw_html)
return doc
def get_extractor(self):
return StandardContentExtractor(self.config, self.article)
def release_resources(self):
path = os.path.join(self.config.local_storage_path, '%s_*' % self.article.link_hash)
for fname in glob.glob(path):
try:
os.remove(fname)
except OSError:
pass
| true | true |
f738af9551c78eaaaf480ca434a71c84550dc693 | 22,843 | py | Python | snakeai/gameplayAttackAndHideRandom/environmentattackandhiderandom.py | sunher/game | 84b01b2c69b5cdecbc301fb0e56380ff06bfe353 | [
"MIT"
] | null | null | null | snakeai/gameplayAttackAndHideRandom/environmentattackandhiderandom.py | sunher/game | 84b01b2c69b5cdecbc301fb0e56380ff06bfe353 | [
"MIT"
] | null | null | null | snakeai/gameplayAttackAndHideRandom/environmentattackandhiderandom.py | sunher/game | 84b01b2c69b5cdecbc301fb0e56380ff06bfe353 | [
"MIT"
] | null | null | null | import pprint
import random
import time
import numpy as np
import pandas as pd
from .entities import Snake, Field, CellType, SnakeAction, ALL_SNAKE_ACTIONS, SnakeDirection, Point
class EnvironmentAttackAndHideRandom(object):
"""
Represents the RL environment for the Snake game that implements the game logic,
provides rewards for the agent and keeps track of game statistics.
"""
def __init__(self, config, verbose=1):
"""
Create a new Snake RL environment.
Args:
config (dict): level configuration, typically found in JSON configs.
verbose (int): verbosity level:
0 = do not write any debug information;
1 = write a CSV file containing the statistics for every episode;
2 = same as 1, but also write a full log file containing the state of each timestep.
"""
self.field = Field(level_map=config['field'])
self.snake = None
self.fruit = []
self.poison = []
self.poison_num = 0
self.initial_snake_length = config['initial_snake_length']
self.rewards = config['rewards']
self.max_step_limit = config.get('max_step_limit', 1000)
self.is_game_over = False
self.timestep_index = 0
self.current_action = None
self.stats = EpisodeStatistics()
self.verbose = verbose
self.debug_file = None
self.stats_file = None
self.enemy = None
def seed(self, value):
""" Initialize the random state of the environment to make results reproducible. """
random.seed(value)
np.random.seed(value)
def get_random_empty_cell(self):
return self.field.get_random_empty_cell()
@property
def observation_shape(self):
""" Get the shape of the state observed at each timestep. """
return self.field.size, self.field.size
@property
def num_actions(self):
""" Get the number of actions the agent can take. """
return len(ALL_SNAKE_ACTIONS)
def new_episode(self):
""" Reset the environment and begin a new episode. """
self.field.create_level()
self.generate_rand_wall()
# print(self.field._cells)
self.stats.reset()
self.timestep_index = 0
self.enemy = None
self.fruit = []
self.poison = []
self.poison_num = 0
self.snake = Snake(self.field.get_random_empty_cell(), length=self.initial_snake_length)
self.field.place_snake(self.snake)
self.generate_emeny()
self.generate_poison()
self.current_action = None
self.is_game_over = False
result = TimestepResult(
observation=self.get_observation(),
reward=0,
is_episode_end=self.is_game_over
)
self.record_timestep_stats(result)
return result
def getResult(self):
result = TimestepResult(
observation=self.get_observation(),
reward=0,
is_episode_end=self.is_game_over
)
self.record_timestep_stats(result)
return result
def record_timestep_stats(self, result):
""" Record environment statistics according to the verbosity level. """
timestamp = time.strftime('%Y%m%d-%H%M%S')
# Write CSV header for the stats file.
if self.verbose >= 1 and self.stats_file is None:
self.stats_file = open('snake-env-{timestamp}.csv', 'w')
stats_csv_header_line = self.stats.to_dataframe()[:0].to_csv(index=None)
# print(stats_csv_header_line, self.stats_file, '', flush=True)
# Create a blank debug log file.
# if self.verbose >= 2 and self.debug_file is None:
# self.debug_file = open('snake-env-{timestamp}.log', 'w')
self.stats.record_timestep(self.current_action, result)
self.stats.timesteps_survived = self.timestep_index
# if self.verbose >= 2:
# print(result, self.debug_file)
# # Log episode stats if the appropriate verbosity level is set.
# if result.is_episode_end:
# if self.verbose >= 1:
# stats_csv_line = self.stats.to_dataframe().to_csv(header=False, index=None)
# print(stats_csv_line, self.stats_file, '', flush=True)
# if self.verbose >= 2:
# print(self.stats, self.debug_file)
def get_observation(self):
""" Observe the state of the environment. """
return np.copy(self.field._cells)
def choose_action(self, action):
""" Choose the action that will be taken at the next timestep. """
self.current_action = action
if action == SnakeAction.TURN_LEFT1:
self.snake.turn_left()
elif action == SnakeAction.TURN_LEFT2:
self.snake.turn_left()
self.snake.turn_left()
elif action == SnakeAction.TURN_LEFT3:
self.snake.turn_left()
self.snake.turn_left()
self.snake.turn_left()
elif action == SnakeAction.TURN_RIGHT1:
self.snake.turn_right()
elif action == SnakeAction.TURN_RIGHT2:
self.snake.turn_right()
self.snake.turn_right()
elif action == SnakeAction.TURN_RIGHT3:
self.snake.turn_right()
self.snake.turn_right()
self.snake.turn_right()
def create_wall(self, pos):
# self.point(pos).type = PointType.WALL
self.field[pos] = CellType.WALL
def create_fix_wall_1(self):
wall_pos = [Point(3, 4), Point(3, 5), Point(3, 6), Point(3, 7), Point(3, 8), Point(3, 9),
Point(6, 3), Point(6, 4), Point(6, 5), Point(6, 8), Point(6, 9), Point(6, 10),
Point(7, 6),
Point(8, 5), Point(8, 8),
Point(9, 4), Point(9, 9),
Point(10, 3), Point(10, 5), Point(10, 6), Point(10, 7), Point(10, 8), Point(10, 10),
Point(11, 11)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_2(self):
wall_pos = [Point(2, 3), Point(2, 10),
Point(3, 3), Point(3, 10),
Point(4, 4), Point(4, 9),
Point(5, 5), Point(5, 8),
Point(6, 6), Point(6, 7),
Point(7, 3), Point(7, 10),
Point(8, 3), Point(8, 6), Point(8, 7), Point(8, 10),
Point(9, 3), Point(9, 6), Point(9, 7), Point(9, 10),
Point(10, 4), Point(10, 5), Point(10, 8), Point(10, 9)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_3(self):
wall_pos = [Point(3, 2), Point(3, 3), Point(3, 8), Point(3, 9),
Point(4, 4), Point(4, 7), Point(4, 10),
Point(5, 4), Point(5, 7), Point(5, 10),
Point(6, 3),
Point(7, 2), Point(7, 7), Point(7, 10),
Point(8, 2), Point(8, 7), Point(8, 10),
Point(9, 2), Point(9, 7), Point(9, 10),
Point(10, 3), Point(10, 4), Point(10, 8), Point(10, 9)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_4(self):
wall_pos = [Point(3, 3), Point(3, 7), Point(3, 8), Point(3, 9),
Point(4, 3), Point(4, 6), Point(4, 10),
Point(5, 3), Point(5, 6), Point(5, 10),
Point(6, 8),
Point(7, 3), Point(7, 6), Point(7, 10),
Point(8, 3), Point(8, 6), Point(8, 10),
Point(9, 3), Point(9, 6), Point(9, 10),
Point(10, 3), Point(10, 7), Point(10, 8), Point(10, 9)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_5(self):
wall_pos = [Point(1, 2), Point(1, 6), Point(1, 7), Point(1, 11),
Point(2, 1), Point(2, 4), Point(2, 9), Point(2, 12),
Point(3, 3), Point(3, 6), Point(3, 7), Point(3, 10),
Point(4, 2), Point(4, 5), Point(4, 8), Point(4, 11),
Point(5, 4), Point(5, 9),
Point(6, 1), Point(6, 3), Point(6, 10), Point(6, 12),
Point(7, 1), Point(7, 3), Point(7, 10), Point(7, 12),
Point(8, 4), Point(8, 9),
Point(9, 2), Point(9, 5), Point(9, 8), Point(9, 11),
Point(10, 3), Point(10, 6), Point(10, 7), Point(10, 10),
Point(11, 1), Point(11, 4), Point(11, 9), Point(11, 12),
Point(12, 2), Point(12, 6), Point(12, 7), Point(12, 11)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_6(self):
wall_pos = [Point(1, 3), Point(1, 6), Point(1, 9),
Point(2, 2), Point(2, 5), Point(2, 8), Point(2, 11),
Point(3, 1), Point(3, 4), Point(3, 7), Point(3, 10), Point(3, 12),
Point(4, 3), Point(4, 6), Point(4, 9),
Point(5, 2), Point(5, 8), Point(5, 11),
Point(6, 1), Point(6, 4), Point(6, 12),
Point(7, 3), Point(7, 10),
Point(8, 2), Point(8, 5), Point(8, 8), Point(8, 11),
Point(9, 1), Point(9, 4), Point(9, 6), Point(9, 9), Point(9, 12),
Point(10, 3), Point(10, 7), Point(10, 10),
Point(11, 2), Point(11, 5), Point(11, 8), Point(11, 11),
Point(12, 3), Point(12, 6), Point(12, 9), Point(12, 12)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_7(self):
wall_pos = [Point(2, 2), Point(2, 11),
Point(3, 3), Point(3, 4), Point(3, 5), Point(3, 6), Point(3, 7), Point(3, 8), Point(3, 9),
Point(3, 10),
Point(5, 3), Point(5, 10),
Point(6, 3), Point(6, 10),
Point(7, 3), Point(7, 10),
Point(8, 3), Point(8, 10),
Point(10, 3), Point(10, 4), Point(10, 5), Point(10, 6), Point(10, 7), Point(10, 8), Point(10, 9),
Point(10, 10),
Point(11, 2), Point(11, 11)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_8(self):
wall_pos = [Point(1, 3), Point(1, 4), Point(1, 9), Point(1, 10),
Point(2, 3), Point(2, 4), Point(2, 9), Point(2, 10),
Point(3, 3), Point(3, 4), Point(3, 9), Point(3, 10),
Point(6, 1), Point(6, 2), Point(6, 3), Point(6, 4), Point(6, 9), Point(6, 10), Point(6, 11),
Point(6, 12),
Point(7, 1), Point(7, 2), Point(7, 3), Point(7, 4), Point(7, 9), Point(7, 10), Point(7, 11),
Point(7, 12),
Point(10, 3), Point(10, 4), Point(10, 9), Point(10, 10),
Point(11, 3), Point(11, 4), Point(11, 9), Point(11, 10),
Point(12, 3), Point(12, 4), Point(12, 9), Point(12, 10)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_9(self):
wall_pos = [Point(3, 5), Point(3, 6),
Point(4, 4), Point(4, 10),
Point(5, 4), Point(5, 5), Point(5, 7), Point(5, 8), Point(5, 9),
Point(6, 4), Point(6, 9),
Point(7, 3),
Point(8, 5), Point(8, 6), Point(8, 7), Point(8, 8),
Point(9, 2), Point(9, 6),
Point(10, 3), Point(10, 4), Point(10, 6), Point(10, 10),
Point(11, 2), Point(11, 9), Point(11, 11),
Point(12, 10)]
for pos in wall_pos:
self.create_wall(pos)
def generate_rand_wall(self):
fixnum = np.random.uniform()
if fixnum < 0.5:
randomnum = np.random.randint(1, 9)
funlist = {1: self.create_fix_wall_1, 2: self.create_fix_wall_2, 3: self.create_fix_wall_3,
4: self.create_fix_wall_4, 5: self.create_fix_wall_5, 6: self.create_fix_wall_6,
7: self.create_fix_wall_7, 8: self.create_fix_wall_8, 9: self.create_fix_wall_9}
funlist[randomnum]()
return
self.generate_wall()
# empty_pos = []
# for i in range(1, self._num_rows - 1):
# for j in range(1, self._num_cols - 1):
# t = self._content[i][j].type
# if t == PointType.EMPTY:
# empty_pos.append(Pos(i, j))
# empty_pos = self.field.get_empty_cell()
# wall num
# wallNum = np.random.randint(10, 50)
# wall4rate = np.random.uniform()
# h_pos = None
# if wall4rate < 0.5:
# if empty_pos:
# h_pos = random.choice(empty_pos)
# w_pos1 = h_pos.adj(Direc.LEFT)
# w_pos2 = h_pos.adj(Direc.UP)
# w_pos3 = h_pos.adj(Direc.RIGHT)
# w_pos4 = h_pos.adj(Direc.DOWN)
# for pos in [w_pos1, w_pos2, w_pos3, w_pos4]:
# if pos in empty_pos:
# self.create_wall(pos)
# empty_pos.remove(pos)
# wallNum -= 1
# while wallNum > 0:
# w_pos = random.choice(empty_pos)
# if h_pos != w_pos:
# self.create_wall(w_pos)
# empty_pos.remove(w_pos)
# wallNum -= 1
def generate_wall(self):
# emptyNum = len(self.field._empty_cells)
randnum = np.random.randint(10, 60)
i=0
while(i<randnum):
pos = random.choice(self.field.get_empty_cell())
i+=1
self.field[pos] = CellType.WALL
def generate_emeny(self, position=None):
""" Generate a new fruit at a random unoccupied cell. """
if position is None:
position = self.field.get_random_empty_cell()
self.enemy = position
self.field[position] = CellType.SNAKE_BODY
if np.random.random() > 0.2:
if (self.field[position + SnakeDirection.NORTH] == CellType.EMPTY):
self.field[position + SnakeDirection.NORTH] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.NORTH)
if (self.field[position + SnakeDirection.SOUTH] == CellType.EMPTY):
self.field[position + SnakeDirection.SOUTH] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.SOUTH)
if (self.field[position + SnakeDirection.WEST] == CellType.EMPTY):
self.field[position + SnakeDirection.WEST] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.WEST)
if (self.field[position + SnakeDirection.EAST] == CellType.EMPTY):
self.field[position + SnakeDirection.EAST] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.EAST)
if np.random.random() < 0.1:
position = self.field.get_random_empty_cell()
self.field[position] = CellType.FRUIT
self.fruit.append(position)
if np.random.random() < 0.1:
position = self.field.get_random_empty_cell()
self.field[position] = CellType.FRUIT
self.fruit.append(position)
def generate_snake(self, snake=None):
""" Generate a new fruit at a random unoccupied cell. """
self.snake = snake
self.field.place_snake(self.snake)
def generate_poison(self):
""" Generate a new fruit at a random unoccupied cell. """
if np.random.random() < 0:
self.poison_num = random.Random().choice([1, 2, 3])
for position in self.field.get_empty_cell():
if (0 < position.x <= self.poison_num or 0 < position.y <= self.poison_num or (
position.x + self.poison_num) >= (self.field.size - 1) or (position.y + self.poison_num) >= (
self.field.size - 1)):
self.field[position] = CellType.POISON
self.poison.append(position)
def be_poison(self, position):
""" Generate a new fruit at a random unoccupied cell. """
# if np.random.random() < 1:
if (0 < position.x <= self.poison_num or 0 < position.y <= self.poison_num or (
position.x + self.poison_num) >= (self.field.size - 1) or (position.y + self.poison_num) >= (
self.field.size - 1)):
return True
return False
def timestep(self):
""" Execute the timestep and return the new observable state. """
self.timestep_index += 1
reward = 0
isdie = False
old_head = self.snake.head
old_tail = self.snake.tail
# Are we about to eat the fruit?
if self.fruit.__contains__(self.snake.peek_next_move()):
self.fruit.remove(self.snake.peek_next_move())
# self.generate_fruit()
# old_tail = None
reward += self.rewards['ate_fruit']
self.stats.fruits_eaten += 1
elif self.be_poison(self.snake.peek_next_move()):
self.stats.poisons_eaten += 1
# If not, just move forward.
self.snake.move()
self.field.update_snake_footprint(old_head, old_tail, self.snake.head)
# Hit a wall or own body?
if not self.is_alive():
# reward -=self.fruit.__len__()
if self.has_hit_wall() or self.has_hit_own_body():
self.stats.termination_reason = 'hit_wall'
reward -= 0.7
isdie = True
self.field[self.snake.head] = CellType.SNAKE_HEAD
self.is_game_over = True
# reward *= 0.7
# print(self.fruit.__len__())
# if(self.get_wall_num(old_head) >= 2) and self.fruit.__len__()<=1:
# reward = self.get_wall_num(old_head) - self.fruit.__len__()
# else:
# reward = -1
reward += (self.get_wall_num(old_head) - 1.5)
if self.snake.length == 2 or self.snake.length == 1:
reward -= 2
if self.stats.poisons_eaten != 0:
reward -= 2
if (self.be_poison(old_head)):
reward -= 1
# reward += 0.99
# Exceeded the limit of moves?
if self.timestep_index >= self.max_step_limit:
self.is_game_over = True
self.stats.termination_reason = 'timestep_limit_exceeded'
result = TimestepResult(
observation=self.get_observation(),
reward=reward,
is_episode_end=self.is_game_over
)
self.record_timestep_stats(result)
return result
def get_wall_num(self, position=None):
num = 0
if self.field[position + SnakeDirection.NORTH] == CellType.WALL:
num += 1
if self.field[position + SnakeDirection.SOUTH] == CellType.WALL:
num += 1
if self.field[position + SnakeDirection.WEST] == CellType.WALL:
num += 1
if self.field[position + SnakeDirection.EAST] == CellType.WALL:
num += 1
if self.field[
position + SnakeDirection.NORTH] == CellType.POISON:
num += 0.5
if self.field[
position + SnakeDirection.SOUTH] == CellType.POISON:
num += 0.5
if self.field[
position + SnakeDirection.WEST] == CellType.POISON:
num += 0.5
if self.field[
position + SnakeDirection.EAST] == CellType.POISON:
num += 0.5
return num
def generate_fruit(self, position=None):
""" Generate a new fruit at a random unoccupied cell. """
if position is None:
position = self.field.get_random_empty_cell()
self.field[position] = CellType.FRUIT
self.fruit.append(position)
def has_hit_wall(self):
""" True if the snake has hit a wall, False otherwise. """
return self.field[self.snake.head] == CellType.WALL
def has_hit_own_body(self):
""" True if the snake has hit its own body, False otherwise. """
return self.field[self.snake.head] == CellType.SNAKE_BODY
def is_alive(self):
""" True if the snake is still alive, False otherwise. """
return not self.has_hit_wall() and not self.has_hit_own_body()
class TimestepResult(object):
""" Represents the information provided to the agent after each timestep. """
def __init__(self, observation, reward, is_episode_end):
self.observation = observation
self.reward = reward
self.is_episode_end = is_episode_end
def __str__(self):
field_map = '\n'.join([
''.join(str(cell) for cell in row)
for row in self.observation
])
return '{field_map}\nR = {self.reward} {self.is_episode_end}\n'
class EpisodeStatistics(object):
""" Represents the summary of the agent's performance during the episode. """
def __init__(self):
self.reset()
def reset(self):
""" Forget all previous statistics and prepare for a new episode. """
self.timesteps_survived = 0
self.sum_episode_rewards = 0
self.fruits_eaten = 0
self.poisons_eaten = 0
self.termination_reason = None
self.action_counter = {
action: 0
for action in ALL_SNAKE_ACTIONS
}
def record_timestep(self, action, result):
""" Update the stats based on the current timestep results. """
self.sum_episode_rewards += result.reward
if action is not None:
self.action_counter[action] += 1
def flatten(self):
""" Format all episode statistics as a flat object. """
flat_stats = {
'timesteps_survived': self.timesteps_survived,
'sum_episode_rewards': self.sum_episode_rewards,
'mean_reward': self.sum_episode_rewards / self.timesteps_survived if self.timesteps_survived else None,
'fruits_eaten': self.fruits_eaten,
'termination_reason': self.termination_reason,
}
flat_stats.update({
'action_counter_{action}': self.action_counter.get(action, 0)
for action in ALL_SNAKE_ACTIONS
})
return flat_stats
def to_dataframe(self):
""" Convert the episode statistics to a Pandas data frame. """
return pd.DataFrame([self.flatten()])
def __str__(self):
return pprint.pformat(self.flatten())
| 41.010772 | 117 | 0.541391 | import pprint
import random
import time
import numpy as np
import pandas as pd
from .entities import Snake, Field, CellType, SnakeAction, ALL_SNAKE_ACTIONS, SnakeDirection, Point
class EnvironmentAttackAndHideRandom(object):
def __init__(self, config, verbose=1):
self.field = Field(level_map=config['field'])
self.snake = None
self.fruit = []
self.poison = []
self.poison_num = 0
self.initial_snake_length = config['initial_snake_length']
self.rewards = config['rewards']
self.max_step_limit = config.get('max_step_limit', 1000)
self.is_game_over = False
self.timestep_index = 0
self.current_action = None
self.stats = EpisodeStatistics()
self.verbose = verbose
self.debug_file = None
self.stats_file = None
self.enemy = None
def seed(self, value):
random.seed(value)
np.random.seed(value)
def get_random_empty_cell(self):
return self.field.get_random_empty_cell()
@property
def observation_shape(self):
return self.field.size, self.field.size
@property
def num_actions(self):
return len(ALL_SNAKE_ACTIONS)
def new_episode(self):
self.field.create_level()
self.generate_rand_wall()
self.stats.reset()
self.timestep_index = 0
self.enemy = None
self.fruit = []
self.poison = []
self.poison_num = 0
self.snake = Snake(self.field.get_random_empty_cell(), length=self.initial_snake_length)
self.field.place_snake(self.snake)
self.generate_emeny()
self.generate_poison()
self.current_action = None
self.is_game_over = False
result = TimestepResult(
observation=self.get_observation(),
reward=0,
is_episode_end=self.is_game_over
)
self.record_timestep_stats(result)
return result
def getResult(self):
result = TimestepResult(
observation=self.get_observation(),
reward=0,
is_episode_end=self.is_game_over
)
self.record_timestep_stats(result)
return result
def record_timestep_stats(self, result):
timestamp = time.strftime('%Y%m%d-%H%M%S')
if self.verbose >= 1 and self.stats_file is None:
self.stats_file = open('snake-env-{timestamp}.csv', 'w')
stats_csv_header_line = self.stats.to_dataframe()[:0].to_csv(index=None)
self.stats.record_timestep(self.current_action, result)
self.stats.timesteps_survived = self.timestep_index
f get_observation(self):
return np.copy(self.field._cells)
def choose_action(self, action):
self.current_action = action
if action == SnakeAction.TURN_LEFT1:
self.snake.turn_left()
elif action == SnakeAction.TURN_LEFT2:
self.snake.turn_left()
self.snake.turn_left()
elif action == SnakeAction.TURN_LEFT3:
self.snake.turn_left()
self.snake.turn_left()
self.snake.turn_left()
elif action == SnakeAction.TURN_RIGHT1:
self.snake.turn_right()
elif action == SnakeAction.TURN_RIGHT2:
self.snake.turn_right()
self.snake.turn_right()
elif action == SnakeAction.TURN_RIGHT3:
self.snake.turn_right()
self.snake.turn_right()
self.snake.turn_right()
def create_wall(self, pos):
self.field[pos] = CellType.WALL
def create_fix_wall_1(self):
wall_pos = [Point(3, 4), Point(3, 5), Point(3, 6), Point(3, 7), Point(3, 8), Point(3, 9),
Point(6, 3), Point(6, 4), Point(6, 5), Point(6, 8), Point(6, 9), Point(6, 10),
Point(7, 6),
Point(8, 5), Point(8, 8),
Point(9, 4), Point(9, 9),
Point(10, 3), Point(10, 5), Point(10, 6), Point(10, 7), Point(10, 8), Point(10, 10),
Point(11, 11)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_2(self):
wall_pos = [Point(2, 3), Point(2, 10),
Point(3, 3), Point(3, 10),
Point(4, 4), Point(4, 9),
Point(5, 5), Point(5, 8),
Point(6, 6), Point(6, 7),
Point(7, 3), Point(7, 10),
Point(8, 3), Point(8, 6), Point(8, 7), Point(8, 10),
Point(9, 3), Point(9, 6), Point(9, 7), Point(9, 10),
Point(10, 4), Point(10, 5), Point(10, 8), Point(10, 9)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_3(self):
wall_pos = [Point(3, 2), Point(3, 3), Point(3, 8), Point(3, 9),
Point(4, 4), Point(4, 7), Point(4, 10),
Point(5, 4), Point(5, 7), Point(5, 10),
Point(6, 3),
Point(7, 2), Point(7, 7), Point(7, 10),
Point(8, 2), Point(8, 7), Point(8, 10),
Point(9, 2), Point(9, 7), Point(9, 10),
Point(10, 3), Point(10, 4), Point(10, 8), Point(10, 9)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_4(self):
wall_pos = [Point(3, 3), Point(3, 7), Point(3, 8), Point(3, 9),
Point(4, 3), Point(4, 6), Point(4, 10),
Point(5, 3), Point(5, 6), Point(5, 10),
Point(6, 8),
Point(7, 3), Point(7, 6), Point(7, 10),
Point(8, 3), Point(8, 6), Point(8, 10),
Point(9, 3), Point(9, 6), Point(9, 10),
Point(10, 3), Point(10, 7), Point(10, 8), Point(10, 9)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_5(self):
wall_pos = [Point(1, 2), Point(1, 6), Point(1, 7), Point(1, 11),
Point(2, 1), Point(2, 4), Point(2, 9), Point(2, 12),
Point(3, 3), Point(3, 6), Point(3, 7), Point(3, 10),
Point(4, 2), Point(4, 5), Point(4, 8), Point(4, 11),
Point(5, 4), Point(5, 9),
Point(6, 1), Point(6, 3), Point(6, 10), Point(6, 12),
Point(7, 1), Point(7, 3), Point(7, 10), Point(7, 12),
Point(8, 4), Point(8, 9),
Point(9, 2), Point(9, 5), Point(9, 8), Point(9, 11),
Point(10, 3), Point(10, 6), Point(10, 7), Point(10, 10),
Point(11, 1), Point(11, 4), Point(11, 9), Point(11, 12),
Point(12, 2), Point(12, 6), Point(12, 7), Point(12, 11)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_6(self):
wall_pos = [Point(1, 3), Point(1, 6), Point(1, 9),
Point(2, 2), Point(2, 5), Point(2, 8), Point(2, 11),
Point(3, 1), Point(3, 4), Point(3, 7), Point(3, 10), Point(3, 12),
Point(4, 3), Point(4, 6), Point(4, 9),
Point(5, 2), Point(5, 8), Point(5, 11),
Point(6, 1), Point(6, 4), Point(6, 12),
Point(7, 3), Point(7, 10),
Point(8, 2), Point(8, 5), Point(8, 8), Point(8, 11),
Point(9, 1), Point(9, 4), Point(9, 6), Point(9, 9), Point(9, 12),
Point(10, 3), Point(10, 7), Point(10, 10),
Point(11, 2), Point(11, 5), Point(11, 8), Point(11, 11),
Point(12, 3), Point(12, 6), Point(12, 9), Point(12, 12)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_7(self):
wall_pos = [Point(2, 2), Point(2, 11),
Point(3, 3), Point(3, 4), Point(3, 5), Point(3, 6), Point(3, 7), Point(3, 8), Point(3, 9),
Point(3, 10),
Point(5, 3), Point(5, 10),
Point(6, 3), Point(6, 10),
Point(7, 3), Point(7, 10),
Point(8, 3), Point(8, 10),
Point(10, 3), Point(10, 4), Point(10, 5), Point(10, 6), Point(10, 7), Point(10, 8), Point(10, 9),
Point(10, 10),
Point(11, 2), Point(11, 11)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_8(self):
wall_pos = [Point(1, 3), Point(1, 4), Point(1, 9), Point(1, 10),
Point(2, 3), Point(2, 4), Point(2, 9), Point(2, 10),
Point(3, 3), Point(3, 4), Point(3, 9), Point(3, 10),
Point(6, 1), Point(6, 2), Point(6, 3), Point(6, 4), Point(6, 9), Point(6, 10), Point(6, 11),
Point(6, 12),
Point(7, 1), Point(7, 2), Point(7, 3), Point(7, 4), Point(7, 9), Point(7, 10), Point(7, 11),
Point(7, 12),
Point(10, 3), Point(10, 4), Point(10, 9), Point(10, 10),
Point(11, 3), Point(11, 4), Point(11, 9), Point(11, 10),
Point(12, 3), Point(12, 4), Point(12, 9), Point(12, 10)]
for pos in wall_pos:
self.create_wall(pos)
def create_fix_wall_9(self):
wall_pos = [Point(3, 5), Point(3, 6),
Point(4, 4), Point(4, 10),
Point(5, 4), Point(5, 5), Point(5, 7), Point(5, 8), Point(5, 9),
Point(6, 4), Point(6, 9),
Point(7, 3),
Point(8, 5), Point(8, 6), Point(8, 7), Point(8, 8),
Point(9, 2), Point(9, 6),
Point(10, 3), Point(10, 4), Point(10, 6), Point(10, 10),
Point(11, 2), Point(11, 9), Point(11, 11),
Point(12, 10)]
for pos in wall_pos:
self.create_wall(pos)
def generate_rand_wall(self):
fixnum = np.random.uniform()
if fixnum < 0.5:
randomnum = np.random.randint(1, 9)
funlist = {1: self.create_fix_wall_1, 2: self.create_fix_wall_2, 3: self.create_fix_wall_3,
4: self.create_fix_wall_4, 5: self.create_fix_wall_5, 6: self.create_fix_wall_6,
7: self.create_fix_wall_7, 8: self.create_fix_wall_8, 9: self.create_fix_wall_9}
funlist[randomnum]()
return
self.generate_wall()
def generate_wall(self):
randnum = np.random.randint(10, 60)
i=0
while(i<randnum):
pos = random.choice(self.field.get_empty_cell())
i+=1
self.field[pos] = CellType.WALL
def generate_emeny(self, position=None):
if position is None:
position = self.field.get_random_empty_cell()
self.enemy = position
self.field[position] = CellType.SNAKE_BODY
if np.random.random() > 0.2:
if (self.field[position + SnakeDirection.NORTH] == CellType.EMPTY):
self.field[position + SnakeDirection.NORTH] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.NORTH)
if (self.field[position + SnakeDirection.SOUTH] == CellType.EMPTY):
self.field[position + SnakeDirection.SOUTH] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.SOUTH)
if (self.field[position + SnakeDirection.WEST] == CellType.EMPTY):
self.field[position + SnakeDirection.WEST] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.WEST)
if (self.field[position + SnakeDirection.EAST] == CellType.EMPTY):
self.field[position + SnakeDirection.EAST] = CellType.FRUIT
self.fruit.append(position + SnakeDirection.EAST)
if np.random.random() < 0.1:
position = self.field.get_random_empty_cell()
self.field[position] = CellType.FRUIT
self.fruit.append(position)
if np.random.random() < 0.1:
position = self.field.get_random_empty_cell()
self.field[position] = CellType.FRUIT
self.fruit.append(position)
def generate_snake(self, snake=None):
self.snake = snake
self.field.place_snake(self.snake)
def generate_poison(self):
if np.random.random() < 0:
self.poison_num = random.Random().choice([1, 2, 3])
for position in self.field.get_empty_cell():
if (0 < position.x <= self.poison_num or 0 < position.y <= self.poison_num or (
position.x + self.poison_num) >= (self.field.size - 1) or (position.y + self.poison_num) >= (
self.field.size - 1)):
self.field[position] = CellType.POISON
self.poison.append(position)
def be_poison(self, position):
if (0 < position.x <= self.poison_num or 0 < position.y <= self.poison_num or (
position.x + self.poison_num) >= (self.field.size - 1) or (position.y + self.poison_num) >= (
self.field.size - 1)):
return True
return False
def timestep(self):
self.timestep_index += 1
reward = 0
isdie = False
old_head = self.snake.head
old_tail = self.snake.tail
if self.fruit.__contains__(self.snake.peek_next_move()):
self.fruit.remove(self.snake.peek_next_move())
reward += self.rewards['ate_fruit']
self.stats.fruits_eaten += 1
elif self.be_poison(self.snake.peek_next_move()):
self.stats.poisons_eaten += 1
self.snake.move()
self.field.update_snake_footprint(old_head, old_tail, self.snake.head)
if not self.is_alive():
if self.has_hit_wall() or self.has_hit_own_body():
self.stats.termination_reason = 'hit_wall'
reward -= 0.7
isdie = True
self.field[self.snake.head] = CellType.SNAKE_HEAD
self.is_game_over = True
reward += (self.get_wall_num(old_head) - 1.5)
if self.snake.length == 2 or self.snake.length == 1:
reward -= 2
if self.stats.poisons_eaten != 0:
reward -= 2
if (self.be_poison(old_head)):
reward -= 1
if self.timestep_index >= self.max_step_limit:
self.is_game_over = True
self.stats.termination_reason = 'timestep_limit_exceeded'
result = TimestepResult(
observation=self.get_observation(),
reward=reward,
is_episode_end=self.is_game_over
)
self.record_timestep_stats(result)
return result
def get_wall_num(self, position=None):
num = 0
if self.field[position + SnakeDirection.NORTH] == CellType.WALL:
num += 1
if self.field[position + SnakeDirection.SOUTH] == CellType.WALL:
num += 1
if self.field[position + SnakeDirection.WEST] == CellType.WALL:
num += 1
if self.field[position + SnakeDirection.EAST] == CellType.WALL:
num += 1
if self.field[
position + SnakeDirection.NORTH] == CellType.POISON:
num += 0.5
if self.field[
position + SnakeDirection.SOUTH] == CellType.POISON:
num += 0.5
if self.field[
position + SnakeDirection.WEST] == CellType.POISON:
num += 0.5
if self.field[
position + SnakeDirection.EAST] == CellType.POISON:
num += 0.5
return num
def generate_fruit(self, position=None):
if position is None:
position = self.field.get_random_empty_cell()
self.field[position] = CellType.FRUIT
self.fruit.append(position)
def has_hit_wall(self):
return self.field[self.snake.head] == CellType.WALL
def has_hit_own_body(self):
return self.field[self.snake.head] == CellType.SNAKE_BODY
def is_alive(self):
return not self.has_hit_wall() and not self.has_hit_own_body()
class TimestepResult(object):
def __init__(self, observation, reward, is_episode_end):
self.observation = observation
self.reward = reward
self.is_episode_end = is_episode_end
def __str__(self):
field_map = '\n'.join([
''.join(str(cell) for cell in row)
for row in self.observation
])
return '{field_map}\nR = {self.reward} {self.is_episode_end}\n'
class EpisodeStatistics(object):
def __init__(self):
self.reset()
def reset(self):
self.timesteps_survived = 0
self.sum_episode_rewards = 0
self.fruits_eaten = 0
self.poisons_eaten = 0
self.termination_reason = None
self.action_counter = {
action: 0
for action in ALL_SNAKE_ACTIONS
}
def record_timestep(self, action, result):
self.sum_episode_rewards += result.reward
if action is not None:
self.action_counter[action] += 1
def flatten(self):
flat_stats = {
'timesteps_survived': self.timesteps_survived,
'sum_episode_rewards': self.sum_episode_rewards,
'mean_reward': self.sum_episode_rewards / self.timesteps_survived if self.timesteps_survived else None,
'fruits_eaten': self.fruits_eaten,
'termination_reason': self.termination_reason,
}
flat_stats.update({
'action_counter_{action}': self.action_counter.get(action, 0)
for action in ALL_SNAKE_ACTIONS
})
return flat_stats
def to_dataframe(self):
return pd.DataFrame([self.flatten()])
def __str__(self):
return pprint.pformat(self.flatten())
| true | true |
f738b22029dc69219a26c4c48c3bb6178ab5a0cd | 934 | py | Python | homepairs/HomepairsApp/Apps/ServiceRequest/models.py | YellowRainBoots/2.0 | bf215350c2da0ab28ad2ec6f9338fb1b73b3f2e5 | [
"MIT"
] | 1 | 2021-01-19T00:48:10.000Z | 2021-01-19T00:48:10.000Z | homepairs/HomepairsApp/Apps/ServiceRequest/models.py | YellowRainBoots/2.0 | bf215350c2da0ab28ad2ec6f9338fb1b73b3f2e5 | [
"MIT"
] | 17 | 2020-01-23T05:51:18.000Z | 2020-06-16T02:33:41.000Z | homepairs/HomepairsApp/Apps/ServiceRequest/models.py | YellowRainBoots/2.0 | bf215350c2da0ab28ad2ec6f9338fb1b73b3f2e5 | [
"MIT"
] | 1 | 2020-08-06T02:10:58.000Z | 2020-08-06T02:10:58.000Z | from django.db import models
from django.db.models.fields import CharField, DateTimeField
from ..Appliances.models import Appliance
from ..Properties.models import Property
class ServiceRequest(models.Model):
job = CharField(max_length=100)
details = CharField(max_length=300)
serviceCompany = CharField(max_length=100)
client = CharField(max_length=100)
status = CharField(max_length=20)
dayStarted = DateTimeField()
appFixed = models.ForeignKey(Appliance, on_delete=models.CASCADE)
location = models.ForeignKey(Property, on_delete=models.CASCADE)
def __str__(self):
return self.job + " " + self.details
def toDict(self):
return {
"job": self.job,
"details": self.details,
"serviceCompany": self.serviceCompany,
"client": self.client,
"status": self.status,
"dayStarted": self.dayStarted,
}
| 29.1875 | 69 | 0.669165 | from django.db import models
from django.db.models.fields import CharField, DateTimeField
from ..Appliances.models import Appliance
from ..Properties.models import Property
class ServiceRequest(models.Model):
job = CharField(max_length=100)
details = CharField(max_length=300)
serviceCompany = CharField(max_length=100)
client = CharField(max_length=100)
status = CharField(max_length=20)
dayStarted = DateTimeField()
appFixed = models.ForeignKey(Appliance, on_delete=models.CASCADE)
location = models.ForeignKey(Property, on_delete=models.CASCADE)
def __str__(self):
return self.job + " " + self.details
def toDict(self):
return {
"job": self.job,
"details": self.details,
"serviceCompany": self.serviceCompany,
"client": self.client,
"status": self.status,
"dayStarted": self.dayStarted,
}
| true | true |
f738b4c36c04a658f2f353f2e3fd72cdeb52bc0e | 3,413 | py | Python | app/app/settings.py | Khande1n/recipe-app-api | 0b2f1f91d18ab51f73b957a92ba46287ce958867 | [
"MIT"
] | null | null | null | app/app/settings.py | Khande1n/recipe-app-api | 0b2f1f91d18ab51f73b957a92ba46287ce958867 | [
"MIT"
] | null | null | null | app/app/settings.py | Khande1n/recipe-app-api | 0b2f1f91d18ab51f73b957a92ba46287ce958867 | [
"MIT"
] | null | null | null | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'k=k+n1wya85+_r@t#er))vo4-yq80*qiqv&t&8##ou5r(9i*8+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.environ.get('DB_NAME'),
'HOST': os.environ.get('DB_HOST'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = '/vol/web/media'
STATIC_ROOT = '/vol/web/static'
AUTH_USER_MODEL = 'core.User'
| 25.281481 | 91 | 0.684735 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'k=k+n1wya85+_r@t#er))vo4-yq80*qiqv&t&8##ou5r(9i*8+'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.environ.get('DB_NAME'),
'HOST': os.environ.get('DB_HOST'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = '/vol/web/media'
STATIC_ROOT = '/vol/web/static'
AUTH_USER_MODEL = 'core.User'
| true | true |
f738b5189dfa185778930199175713cfeabf6d2c | 8,011 | py | Python | galaxy/accounts/admin.py | tima/galaxy | b371b973e0e9150f3e8b9b08068828b092982f62 | [
"Apache-2.0"
] | null | null | null | galaxy/accounts/admin.py | tima/galaxy | b371b973e0e9150f3e8b9b08068828b092982f62 | [
"Apache-2.0"
] | null | null | null | galaxy/accounts/admin.py | tima/galaxy | b371b973e0e9150f3e8b9b08068828b092982f62 | [
"Apache-2.0"
] | null | null | null | # (c) 2012-2018, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
from django.db import transaction
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.forms import AdminPasswordChangeForm
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from django.utils.html import escape
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext, ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from .forms import CustomUserChangeForm, CustomUserCreationForm
from .models import CustomUser
csrf_protect_m = method_decorator(csrf_protect)
sensitive_post_parameters_m = method_decorator(sensitive_post_parameters())
class CustomUserAdmin(admin.ModelAdmin):
"""
The default UserAdmin class, but with changes for our CustomUser
where `first_name` and `last_name` are replaced by `full_name` and
`short_name`
"""
add_form_template = 'admin/auth/user/add_form.html'
change_user_password_template = None
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('full_name', 'short_name', 'email')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {'classes': ('wide',),
'fields': ('username', 'password1', 'password2')}),
)
form = CustomUserChangeForm
add_form = CustomUserCreationForm
change_password_form = AdminPasswordChangeForm
list_display = ('username', 'email', 'full_name', 'short_name', 'is_staff')
list_filter = ('is_staff', 'is_superuser', 'is_active', 'groups')
search_fields = ('username', 'full_name', 'short_name', 'email')
ordering = ('username',)
filter_horizontal = ('groups', 'user_permissions',)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(CustomUserAdmin, self).get_fieldsets(request, obj)
def get_form(self, request, obj=None, **kwargs):
"""
Use special form during user creation
"""
defaults = {}
if obj is None:
defaults.update({
'form': self.add_form,
'fields': admin.util.flatten_fieldsets(self.add_fieldsets),
})
defaults.update(kwargs)
return super(CustomUserAdmin, self).get_form(request, obj, **defaults)
def get_urls(self):
from django.conf.urls import url
return [
url(r'^(\d+)/password/$', self.admin_site.admin_view(
self.user_change_password))
] + super(CustomUserAdmin, self).get_urls()
def lookup_allowed(self, lookup, value):
# See #20078: we don't want to allow any lookups involving passwords.
if lookup.startswith('password'):
return False
return super(CustomUserAdmin, self).lookup_allowed(lookup, value)
@sensitive_post_parameters_m
@csrf_protect_m
@transaction.atomic()
def add_view(self, request, form_url='', extra_context=None):
# It's an error for a user to have add permission but NOT change
# permission for users. If we allowed such users to add users, they
# could create superusers, which would mean they would essentially have
# the permission to change users. To avoid the problem entirely, we
# disallow users from adding users if they don't have change
# permission.
if not self.has_change_permission(request):
if self.has_add_permission(request) and settings.DEBUG:
# Raise Http404 in debug mode so that the user gets a helpful
# error message.
raise Http404(
'Your user does not have the "Change user" permission. In '
'order to add users, Django requires that your user '
'account have both the "Add user" and "Change user" '
'permissions set.')
raise PermissionDenied
if extra_context is None:
extra_context = {}
username_field = self.model._meta.get_field(self.model.USERNAME_FIELD)
defaults = {
'auto_populated_fields': (),
'username_help_text': username_field.help_text,
}
extra_context.update(defaults)
return super(CustomUserAdmin, self).add_view(request, form_url,
extra_context)
@sensitive_post_parameters_m
def user_change_password(self, request, id, form_url=''):
if not self.has_change_permission(request):
raise PermissionDenied
user = get_object_or_404(self.queryset(request), pk=id)
if request.method == 'POST':
form = self.change_password_form(user, request.POST)
if form.is_valid():
form.save()
msg = ugettext('Password changed successfully.')
messages.success(request, msg)
return HttpResponseRedirect('..')
else:
form = self.change_password_form(user)
fieldsets = [(None, {'fields': list(form.base_fields)})]
adminForm = admin.helpers.AdminForm(form, fieldsets, {})
context = {
'title': _('Change password: %s') % escape(user.get_username()),
'adminForm': adminForm,
'form_url': form_url,
'form': form,
'is_popup': '_popup' in request.REQUEST,
'add': True,
'change': False,
'has_delete_permission': False,
'has_change_permission': True,
'has_absolute_url': False,
'opts': self.model._meta,
'original': user,
'save_as': False,
'show_save': True,
}
return TemplateResponse(request, self.change_user_password_template or
'admin/auth/user/change_password.html',
context, current_app=self.admin_site.name)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage. It mostly defers to
its superclass implementation but is customized because the User model
has a slightly different workflow.
"""
# We should allow further modification of the user just added i.e. the
# 'Save' button should behave like the 'Save and continue editing'
# button except in two scenarios:
# * The user has pressed the 'Save and add another' button
# * We are adding a user in a popup
if '_addanother' not in request.POST and '_popup' not in request.POST:
request.POST['_continue'] = 1
return super(CustomUserAdmin, self).response_add(request, obj,
post_url_continue)
admin.site.register(CustomUser, CustomUserAdmin)
| 43.069892 | 79 | 0.641992 |
from django.db import transaction
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.forms import AdminPasswordChangeForm
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from django.utils.html import escape
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext, ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from .forms import CustomUserChangeForm, CustomUserCreationForm
from .models import CustomUser
csrf_protect_m = method_decorator(csrf_protect)
sensitive_post_parameters_m = method_decorator(sensitive_post_parameters())
class CustomUserAdmin(admin.ModelAdmin):
add_form_template = 'admin/auth/user/add_form.html'
change_user_password_template = None
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('full_name', 'short_name', 'email')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {'classes': ('wide',),
'fields': ('username', 'password1', 'password2')}),
)
form = CustomUserChangeForm
add_form = CustomUserCreationForm
change_password_form = AdminPasswordChangeForm
list_display = ('username', 'email', 'full_name', 'short_name', 'is_staff')
list_filter = ('is_staff', 'is_superuser', 'is_active', 'groups')
search_fields = ('username', 'full_name', 'short_name', 'email')
ordering = ('username',)
filter_horizontal = ('groups', 'user_permissions',)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(CustomUserAdmin, self).get_fieldsets(request, obj)
def get_form(self, request, obj=None, **kwargs):
defaults = {}
if obj is None:
defaults.update({
'form': self.add_form,
'fields': admin.util.flatten_fieldsets(self.add_fieldsets),
})
defaults.update(kwargs)
return super(CustomUserAdmin, self).get_form(request, obj, **defaults)
def get_urls(self):
from django.conf.urls import url
return [
url(r'^(\d+)/password/$', self.admin_site.admin_view(
self.user_change_password))
] + super(CustomUserAdmin, self).get_urls()
def lookup_allowed(self, lookup, value):
alse
return super(CustomUserAdmin, self).lookup_allowed(lookup, value)
@sensitive_post_parameters_m
@csrf_protect_m
@transaction.atomic()
def add_view(self, request, form_url='', extra_context=None):
# It's an error for a user to have add permission but NOT change
# permission.
if not self.has_change_permission(request):
if self.has_add_permission(request) and settings.DEBUG:
# Raise Http404 in debug mode so that the user gets a helpful
# error message.
raise Http404(
'Your user does not have the "Change user" permission. In '
'order to add users, Django requires that your user '
'account have both the "Add user" and "Change user" '
'permissions set.')
raise PermissionDenied
if extra_context is None:
extra_context = {}
username_field = self.model._meta.get_field(self.model.USERNAME_FIELD)
defaults = {
'auto_populated_fields': (),
'username_help_text': username_field.help_text,
}
extra_context.update(defaults)
return super(CustomUserAdmin, self).add_view(request, form_url,
extra_context)
@sensitive_post_parameters_m
def user_change_password(self, request, id, form_url=''):
if not self.has_change_permission(request):
raise PermissionDenied
user = get_object_or_404(self.queryset(request), pk=id)
if request.method == 'POST':
form = self.change_password_form(user, request.POST)
if form.is_valid():
form.save()
msg = ugettext('Password changed successfully.')
messages.success(request, msg)
return HttpResponseRedirect('..')
else:
form = self.change_password_form(user)
fieldsets = [(None, {'fields': list(form.base_fields)})]
adminForm = admin.helpers.AdminForm(form, fieldsets, {})
context = {
'title': _('Change password: %s') % escape(user.get_username()),
'adminForm': adminForm,
'form_url': form_url,
'form': form,
'is_popup': '_popup' in request.REQUEST,
'add': True,
'change': False,
'has_delete_permission': False,
'has_change_permission': True,
'has_absolute_url': False,
'opts': self.model._meta,
'original': user,
'save_as': False,
'show_save': True,
}
return TemplateResponse(request, self.change_user_password_template or
'admin/auth/user/change_password.html',
context, current_app=self.admin_site.name)
def response_add(self, request, obj, post_url_continue=None):
# We should allow further modification of the user just added i.e. the
# 'Save' button should behave like the 'Save and continue editing'
# button except in two scenarios:
# * The user has pressed the 'Save and add another' button
# * We are adding a user in a popup
if '_addanother' not in request.POST and '_popup' not in request.POST:
request.POST['_continue'] = 1
return super(CustomUserAdmin, self).response_add(request, obj,
post_url_continue)
admin.site.register(CustomUser, CustomUserAdmin)
| true | true |
f738b5569e02938bb525d44a2a327da0723323ba | 1,273 | py | Python | paddlespeech/t2s/modules/transformer/repeat.py | xuesu/PaddleSpeech | 89e69ee10ee02b875af663146bc46fcf095e812a | [
"Apache-2.0"
] | 1 | 2022-02-07T02:53:58.000Z | 2022-02-07T02:53:58.000Z | paddlespeech/t2s/modules/transformer/repeat.py | ziwenag/PaddleSpeech | 89e69ee10ee02b875af663146bc46fcf095e812a | [
"Apache-2.0"
] | null | null | null | paddlespeech/t2s/modules/transformer/repeat.py | ziwenag/PaddleSpeech | 89e69ee10ee02b875af663146bc46fcf095e812a | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Modified from espnet(https://github.com/espnet/espnet)
"""Repeat the same layer definition."""
import paddle
class MultiSequential(paddle.nn.Sequential):
"""Multi-input multi-output paddle.nn.Sequential."""
def forward(self, *args):
"""Repeat."""
for m in self:
args = m(*args)
return args
def repeat(N, fn):
"""Repeat module N times.
Parameters
----------
N : int
Number of repeat time.
fn : Callable
Function to generate module.
Returns
----------
MultiSequential
Repeated model instance.
"""
return MultiSequential(*[fn(n) for n in range(N)])
| 28.288889 | 74 | 0.671642 |
import paddle
class MultiSequential(paddle.nn.Sequential):
def forward(self, *args):
for m in self:
args = m(*args)
return args
def repeat(N, fn):
return MultiSequential(*[fn(n) for n in range(N)])
| true | true |
f738b58dd1e7432a7a1618a360877d41411e8813 | 1,199 | py | Python | wsitools/examples/patch_extraction_pl.py | m081429/wsitools | d521b4b1484aaf96628b3d83e51388cb699aa11d | [
"Apache-2.0"
] | 5 | 2020-01-24T02:58:25.000Z | 2022-02-06T08:14:32.000Z | wsitools/examples/patch_extraction_pl.py | m081429/wsitools | d521b4b1484aaf96628b3d83e51388cb699aa11d | [
"Apache-2.0"
] | null | null | null | wsitools/examples/patch_extraction_pl.py | m081429/wsitools | d521b4b1484aaf96628b3d83e51388cb699aa11d | [
"Apache-2.0"
] | null | null | null | from wsitools.tissue_detection.tissue_detector import TissueDetector
from wsitools.patch_extraction.feature_map_creator import FeatureMapCreator
from wsitools.wsi_annotation.region_annotation import AnnotationRegions
from wsitools.patch_extraction.patch_extractor import ExtractorParameters, PatchExtractor
wsi_fn = "/projects/shart/digital_pathology/data/PenMarking/WSIs/MELF/e39a8d60a56844d695e9579bce8f0335.tiff" # WSI file name
output_dir = "/projects/shart/digital_pathology/data/PenMarking/temp"
tissue_detector = TissueDetector("LAB_Threshold", threshold=85) #
fm = FeatureMapCreator("../patch_extraction/feature_maps/basic_fm_PL_eval.csv") # use this template to create feature map
xml_fn = "../wsi_annotation/examples/e39a8d60a56844d695e9579bce8f0335.xml"
class_label_id_csv = "../wsi_annotation/examples/class_label_id.csv"
annotations = AnnotationRegions(xml_fn, class_label_id_csv)
parameters = ExtractorParameters(output_dir, save_format='.tfrecord', sample_cnt=-1)
patch_extractor = PatchExtractor(tissue_detector, parameters, feature_map=fm, annotations=annotations)
patch_num = patch_extractor.extract(wsi_fn)
print("%d Patches have been save to %s" % (patch_num, output_dir))
| 63.105263 | 125 | 0.842369 | from wsitools.tissue_detection.tissue_detector import TissueDetector
from wsitools.patch_extraction.feature_map_creator import FeatureMapCreator
from wsitools.wsi_annotation.region_annotation import AnnotationRegions
from wsitools.patch_extraction.patch_extractor import ExtractorParameters, PatchExtractor
wsi_fn = "/projects/shart/digital_pathology/data/PenMarking/WSIs/MELF/e39a8d60a56844d695e9579bce8f0335.tiff"
output_dir = "/projects/shart/digital_pathology/data/PenMarking/temp"
tissue_detector = TissueDetector("LAB_Threshold", threshold=85)
fm = FeatureMapCreator("../patch_extraction/feature_maps/basic_fm_PL_eval.csv")
xml_fn = "../wsi_annotation/examples/e39a8d60a56844d695e9579bce8f0335.xml"
class_label_id_csv = "../wsi_annotation/examples/class_label_id.csv"
annotations = AnnotationRegions(xml_fn, class_label_id_csv)
parameters = ExtractorParameters(output_dir, save_format='.tfrecord', sample_cnt=-1)
patch_extractor = PatchExtractor(tissue_detector, parameters, feature_map=fm, annotations=annotations)
patch_num = patch_extractor.extract(wsi_fn)
print("%d Patches have been save to %s" % (patch_num, output_dir))
| true | true |
f738b672b45295be65688dc6d316c28ccad69603 | 28,020 | py | Python | tests/test_bndfun.py | KennyKangMPC/chebpy | 5ad603b15f90a0f36093f1705e3e08d090330cef | [
"BSD-3-Clause"
] | 2 | 2020-05-06T00:14:59.000Z | 2021-02-15T03:18:16.000Z | tests/test_bndfun.py | KennyKangMPC/chebpy | 5ad603b15f90a0f36093f1705e3e08d090330cef | [
"BSD-3-Clause"
] | null | null | null | tests/test_bndfun.py | KennyKangMPC/chebpy | 5ad603b15f90a0f36093f1705e3e08d090330cef | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Unit-tests for pyfun/core/bndfun.py"""
from __future__ import division
import itertools
import operator
import unittest
import numpy as np
import matplotlib.pyplot as plt
from chebpy.core.bndfun import Bndfun
from chebpy.core.chebtech import Chebtech2
from chebpy.core.settings import DefaultPrefs
from chebpy.core.utilities import Interval
from chebpy.core.algorithms import standard_chop
from tests.utilities import testfunctions, infnorm
# aliases
pi = np.pi
sin = np.sin
cos = np.cos
exp = np.exp
eps = DefaultPrefs.eps
# NOTE: since (Fun/ClassicFun/)Bndfun is not a user-facing class (although it
# is not abstract) we will test the interface in the way Chebfun will interact
# with it, which means working explcitly with Interval objects. Furthermore,
# since we have already tested the adaptive constructor in the Chebtech-level
# tests, we just use the adaptive constructor in these tests.
class ClassUsage(unittest.TestCase):
"""Unit-tests for miscelaneous Bndfun class usage"""
def setUp(self):
f = lambda x: sin(30*x)
subinterval = Interval(-2,3)
self.f = f
self.ff = Bndfun.initfun_adaptive(f, subinterval)
self.xx = subinterval(np.linspace(-1,1,100))
self.emptyfun = Bndfun(Chebtech2.initempty(), subinterval)
self.constfun = Bndfun(Chebtech2.initconst(1.), subinterval)
# tests for emptiness of Bndfun objects
def test_isempty_True(self):
self.assertTrue(self.emptyfun.isempty)
self.assertFalse(not self.emptyfun.isempty)
def test_isempty_False(self):
self.assertFalse(self.constfun.isempty)
self.assertTrue(not self.constfun.isempty)
# tests for constantness of Bndfun objects
def test_isconst_True(self):
self.assertTrue(self.constfun.isconst)
self.assertFalse(not self.constfun.isconst)
def test_isconst_False(self):
self.assertFalse(self.emptyfun.isconst)
self.assertTrue(not self.emptyfun.isconst)
# check the size() method is working properly
def test_size(self):
cfs = np.random.rand(10)
subinterval = Interval()
b0 = Bndfun(Chebtech2(np.array([])), subinterval)
b1 = Bndfun(Chebtech2(np.array([1.])), subinterval)
b2 = Bndfun(Chebtech2(cfs), subinterval)
self.assertEquals(b0.size, 0)
self.assertEquals(b1.size, 1)
self.assertEquals(b2.size, cfs.size)
def test_support(self):
a, b = self.ff.support
self.assertEqual(a, -2)
self.assertEqual(b, 3)
def test_endvalues(self):
a, b = self.ff.support
fa, fb = self.ff.endvalues
self.assertLessEqual(abs(fa-self.f(a)), 2e1*eps)
self.assertLessEqual(abs(fb-self.f(b)), 2e1*eps)
# test the different permutations of self(xx, ..)
def test_call(self):
self.ff(self.xx)
def test_call_bary(self):
self.ff(self.xx, "bary")
self.ff(self.xx, how="bary")
def test_call_clenshaw(self):
self.ff(self.xx, "clenshaw")
self.ff(self.xx, how="clenshaw")
def test_call_bary_vs_clenshaw(self):
b = self.ff(self.xx, "clenshaw")
c = self.ff(self.xx, "bary")
self.assertLessEqual(infnorm(b-c), 2e2*eps)
def test_call_raises(self):
self.assertRaises(ValueError, self.ff, self.xx, "notamethod")
self.assertRaises(ValueError, self.ff, self.xx, how="notamethod")
def test_vscale_empty(self):
self.assertEquals(self.emptyfun.vscale, 0.)
def test_copy(self):
ff = self.ff
gg = self.ff.copy()
self.assertEquals(ff, ff)
self.assertEquals(gg, gg)
self.assertNotEquals(ff, gg)
self.assertEquals(infnorm(ff.coeffs-gg.coeffs), 0)
# check that the restricted fun matches self on the subinterval
def test_restrict(self):
i1 = Interval(-1,1)
gg = self.ff.restrict(i1)
yy = np.linspace(-1,1,1000)
self.assertLessEqual(infnorm(self.ff(yy)-gg(yy)), 1e2*eps)
# check that the restricted fun matches self on the subinterval
def test_simplify(self):
interval = Interval(-2,1)
ff = Bndfun.initfun_fixedlen(self.f, interval, 1000)
gg = ff.simplify()
self.assertEqual(gg.size, standard_chop(ff.onefun.coeffs))
self.assertEqual(infnorm(ff.coeffs[:gg.size]-gg.coeffs), 0)
self.assertEqual(ff.interval, gg.interval)
# --------------------------------------
# vscale estimates
# --------------------------------------
vscales = [
# (function, number of points, vscale)
(lambda x: sin(4*pi*x), [-2, 2], 1),
(lambda x: cos(x), [-10, 1], 1),
(lambda x: cos(4*pi*x), [-100, 100], 1),
(lambda x: exp(cos(4*pi*x)), [-1,1], exp(1)),
(lambda x: cos(3244*x), [-2,0], 1),
(lambda x: exp(x), [-1,2], exp(2)),
(lambda x: 1e10*exp(x), [-1,1], 1e10*exp(1)),
(lambda x: 0*x+1., [-1e5,1e4], 1),
]
def definiteIntegralTester(fun, interval, vscale):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
def tester(self):
absdiff = abs(ff.vscale-vscale)
self.assertLessEqual(absdiff, .1*vscale)
return tester
for k, args in enumerate(vscales):
_testfun_ = definiteIntegralTester(*args)
_testfun_.__name__ = "test_vscale_{:02}".format(k)
setattr(ClassUsage, _testfun_.__name__, _testfun_)
class Plotting(unittest.TestCase):
"""Unit-tests for Bndfun plotting methods"""
def setUp(self):
f = lambda x: sin(1*x) + 5e-1*cos(10*x) + 5e-3*sin(100*x)
subinterval = Interval(-6, 10)
self.f0 = Bndfun.initfun_fixedlen(f, subinterval, 1000)
self.f1 = Bndfun.initfun_adaptive(f, subinterval)
def test_plot(self):
fig, ax = plt.subplots()
self.f0.plot(ax=ax, color="g", marker="o", markersize=2, linestyle="")
def test_plotcoeffs(self):
fig, ax = plt.subplots()
self.f0.plotcoeffs(ax=ax)
self.f1.plotcoeffs(ax=ax, color="r")
class Calculus(unittest.TestCase):
"""Unit-tests for Bndfun calculus operations"""
def setUp(self):
self.emptyfun = Bndfun(Chebtech2.initempty(), Interval())
self.yy = np.linspace(-1,1,2000)
# self.constfun = Bndfun(Chebtech2.initconst(1.), subinterval)
# tests for the correct results in the empty cases
def test_sum_empty(self):
self.assertEqual(self.emptyfun.sum(), 0)
def test_cumsum_empty(self):
self.assertTrue(self.emptyfun.cumsum().isempty)
def test_diff_empty(self):
self.assertTrue(self.emptyfun.diff().isempty)
# --------------------------------------
# definite integrals
# --------------------------------------
def_integrals = [
# (function, interval, integral, tolerance)
(lambda x: sin(x), [-2,2], .0, 2*eps),
(lambda x: sin(4*pi*x), [-.1, .7], 0.088970317927147, 1e1*eps),
(lambda x: cos(x), [-100,203], 0.426944059057085, 4e2*eps),
(lambda x: cos(4*pi*x), [-1e-1,-1e-3], 0.074682699182803, 2*eps),
(lambda x: exp(cos(4*pi*x)), [-3,1], 5.064263511008033, 4*eps),
(lambda x: cos(3244*x), [0,0.4], -3.758628487169980e-05, 5e2*eps),
(lambda x: exp(x), [-2,-1], exp(-1)-exp(-2), 2*eps),
(lambda x: 1e10*exp(x), [-1,2], 1e10*(exp(2)-exp(-1)), 2e10*eps),
(lambda x: 0*x+1., [-100,300], 400, eps),
]
def definiteIntegralTester(fun, interval, integral, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
def tester(self):
absdiff = abs(ff.sum()-integral)
self.assertLessEqual(absdiff, tol)
return tester
for k, (fun, n, integral, tol) in enumerate(def_integrals):
_testfun_ = definiteIntegralTester(fun, n, integral, tol)
_testfun_.__name__ = "test_sum_{:02}".format(k)
setattr(Calculus, _testfun_.__name__, _testfun_)
# --------------------------------------
# indefinite integrals
# --------------------------------------
indef_integrals = [
# (function, indefinite integral, interval, tolerance)
(lambda x: 0*x+1., lambda x: x, [-2,3], eps),
(lambda x: x, lambda x: 1/2*x**2, [-5,0], 4*eps),
(lambda x: x**2, lambda x: 1/3*x**3, [1,10], 2e2*eps),
(lambda x: x**3, lambda x: 1/4*x**4, [-1e-2,4e-1], 2*eps),
(lambda x: x**4, lambda x: 1/5*x**5, [-3,-2], 3e2*eps),
(lambda x: x**5, lambda x: 1/6*x**6, [-1e-10,1], 4*eps),
(lambda x: sin(x), lambda x: -cos(x), [-10,22], 3e1*eps),
(lambda x: cos(3*x), lambda x: 1./3*sin(3*x), [-3,4], 2*eps),
(lambda x: exp(x), lambda x: exp(x), [-60,1], 1e1*eps),
(lambda x: 1e10*exp(x), lambda x: 1e10*exp(x), [-1,1], 1e10*(3*eps)),
]
def indefiniteIntegralTester(fun, ifn, interval, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
gg = Bndfun.initfun_fixedlen(ifn, subinterval, ff.size+1)
coeffs = gg.coeffs
coeffs[0] = coeffs[0] - ifn(np.array([interval[0]]))
def tester(self):
absdiff = infnorm(ff.cumsum().coeffs - coeffs)
self.assertLessEqual(absdiff, tol)
return tester
for k, (fun, dfn, n, tol) in enumerate(indef_integrals):
_testfun_ = indefiniteIntegralTester(fun, dfn, n, tol)
_testfun_.__name__ = "test_cumsum_{:02}".format(k)
setattr(Calculus, _testfun_.__name__, _testfun_)
# --------------------------------------
# derivatives
# --------------------------------------
derivatives = [
# (function, derivative, number of points, tolerance)
(lambda x: 0*x+1., lambda x: 0*x+0, [-2,3], eps),
(lambda x: x, lambda x: 0*x+1, [-5,0], 2e1*eps),
(lambda x: x**2, lambda x: 2*x, [1,10], 2e2*eps),
(lambda x: x**3, lambda x: 3*x**2, [-1e-2,4e-1], 3*eps),
(lambda x: x**4, lambda x: 4*x**3, [-3,-2], 1e3*eps),
(lambda x: x**5, lambda x: 5*x**4, [-1e-10,1], 4e1*eps),
(lambda x: sin(x), lambda x: cos(x), [-10,22], 5e2*eps),
(lambda x: cos(3*x), lambda x: -3*sin(3*x), [-3,4], 5e2*eps),
(lambda x: exp(x), lambda x: exp(x), [-60,1], 2e2*eps),
(lambda x: 1e10*exp(x), lambda x: 1e10*exp(x), [-1,1], 1e10*2e2*eps),
]
def derivativeTester(fun, ifn, interval, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
gg = Bndfun.initfun_fixedlen(ifn, subinterval, max(ff.size-1,1))
def tester(self):
absdiff = infnorm(ff.diff().coeffs - gg.coeffs)
self.assertLessEqual(absdiff, tol)
return tester
for k, (fun, der, n, tol) in enumerate(derivatives):
_testfun_ = derivativeTester(fun, der, n, tol)
_testfun_.__name__ = "test_diff_{:02}".format(k)
setattr(Calculus, _testfun_.__name__, _testfun_)
class Construction(unittest.TestCase):
"""Unit-tests for construction of Bndfun objects"""
def test_onefun_construction(self):
coeffs = np.random.rand(10)
subinterval = Interval()
onefun = Chebtech2(coeffs)
f = Bndfun(onefun, subinterval)
self.assertIsInstance(f, Bndfun)
self.assertLess(infnorm(f.coeffs-coeffs), eps)
def test_const_construction(self):
subinterval = Interval()
ff = Bndfun.initconst(1., subinterval)
self.assertEquals(ff.size, 1)
self.assertTrue(ff.isconst)
self.assertFalse(ff.isempty)
self.assertRaises(ValueError, Bndfun.initconst, [1.], subinterval)
def test_empty_construction(self):
ff = Bndfun.initempty()
self.assertEquals(ff.size, 0)
self.assertFalse(ff.isconst)
self.assertTrue(ff.isempty)
self.assertRaises(TypeError, Bndfun.initempty, [1.])
def test_identity_construction(self):
for (a,b) in [(-1,1), (-10,-2), (-2.3, 1.24), (20,2000)]:
itvl = Interval(a,b)
ff = Bndfun.initidentity(itvl)
self.assertEquals(ff.size, 2)
xx = np.linspace(a,b,1001)
tol = eps * abs(itvl).max()
self.assertLessEqual(infnorm(ff(xx)-xx), tol)
def adaptiveTester(fun, subinterval, funlen):
ff = Bndfun.initfun_adaptive(fun, subinterval)
def tester(self):
self.assertEquals(ff.size, funlen)
return tester
def fixedlenTester(fun, subinterval, n):
ff = Bndfun.initfun_fixedlen(fun, subinterval, n)
def tester(self):
self.assertEquals(ff.size, n)
return tester
funs = []
fun_details = [
# (function, name for the test printouts,
# Matlab chebfun adaptive degree on [-2,3])
(lambda x: x**3 + x**2 + x + 1, "poly3(x)", [-2,3], 4),
(lambda x: exp(x), "exp(x)", [-2,3], 20),
(lambda x: sin(x), "sin(x)", [-2,3], 20),
(lambda x: cos(20*x), "cos(20x)", [-2,3], 90),
(lambda x: 0.*x+1., "constfun", [-2,3], 1),
(lambda x: 0.*x, "zerofun", [-2,3], 1),
]
for k, (fun, name, interval, funlen) in enumerate(fun_details):
fun.__name__ = name
subinterval = Interval(*interval)
# add the adaptive tests
_testfun_ = adaptiveTester(fun, subinterval, funlen)
_testfun_.__name__ = "test_adaptive_{}".format(fun.__name__)
setattr(Construction, _testfun_.__name__, _testfun_)
# add the fixedlen tests
for n in np.array([100]):
_testfun_ = fixedlenTester(fun, subinterval, n)
_testfun_.__name__ = \
"test_fixedlen_{}_{:003}pts".format(fun.__name__, n)
setattr(Construction, _testfun_.__name__, _testfun_)
class Algebra(unittest.TestCase):
"""Unit-tests for Bndfun algebraic operations"""
def setUp(self):
self.yy = np.linspace(-1,1,1000)
self.emptyfun = Bndfun.initempty()
# check (empty Bndfun) + (Bndfun) = (empty Bndfun)
# and (Bndfun) + (empty Bndfun) = (empty Bndfun)
def test__add__radd__empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
chebtech = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue((self.emptyfun+chebtech).isempty)
self.assertTrue((chebtech+self.emptyfun).isempty)
# check the output of (constant + Bndfun)
# and (Bndfun + constant)
def test__add__radd__constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, _) in testfunctions:
for const in (-1, 1, 10, -1e5):
f = lambda x: const + fun(x)
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
f1 = const + bndfun
f2 = bndfun + const
tol = 4e1 * eps * abs(const)
self.assertLessEqual(infnorm(f(xx)-f1(xx)), tol)
self.assertLessEqual(infnorm(f(xx)-f2(xx)), tol)
# check (empty Bndfun) - (Bndfun) = (empty Bndfun)
# and (Bndfun) - (empty Bndfun) = (empty Bndfun)
def test__sub__rsub__empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
chebtech = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue((self.emptyfun-chebtech).isempty)
self.assertTrue((chebtech-self.emptyfun).isempty)
# check the output of constant - Bndfun
# and Bndfun - constant
def test__sub__rsub__constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, _) in testfunctions:
for const in (-1, 1, 10, -1e5):
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
f = lambda x: const - fun(x)
g = lambda x: fun(x) - const
ff = const - bndfun
gg = bndfun - const
tol = 5e1 * eps * abs(const)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
self.assertLessEqual(infnorm(g(xx)-gg(xx)), tol)
# check (empty Bndfun) * (Bndfun) = (empty Bndfun)
# and (Bndfun) * (empty Bndfun) = (empty Bndfun)
def test__mul__rmul__empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
chebtech = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue((self.emptyfun*chebtech).isempty)
self.assertTrue((chebtech*self.emptyfun).isempty)
# check the output of constant * Bndfun
# and Bndfun * constant
def test__mul__rmul__constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, _) in testfunctions:
for const in (-1, 1, 10, -1e5):
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
f = lambda x: const * fun(x)
g = lambda x: fun(x) * const
ff = const * bndfun
gg = bndfun * const
tol = 4e1 * eps * abs(const)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
self.assertLessEqual(infnorm(g(xx)-gg(xx)), tol)
# check (empty Bndfun) / (Bndfun) = (empty Bndfun)
# and (Bndfun) / (empty Bndfun) = (empty Bndfun)
def test_truediv_empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue(operator.truediv(self.emptyfun, bndfun).isempty)
self.assertTrue(operator.truediv(self.emptyfun, bndfun).isempty)
# __truediv__
self.assertTrue((self.emptyfun/bndfun).isempty)
self.assertTrue((bndfun/self.emptyfun).isempty)
# check the output of constant / Bndfun
# and Bndfun / constant
def test_truediv_constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, hasRoots) in testfunctions:
for const in (-1, 1, 10, -1e5):
hscl = abs(subinterval).max()
tol = hscl * eps * abs(const)
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
g = lambda x: fun(x) / const
gg = bndfun / const
self.assertLessEqual(infnorm(g(xx)-gg(xx)), 3*gg.size*tol)
# don't do the following test for functions with roots
if not hasRoots:
f = lambda x: const / fun(x)
ff = const / bndfun
self.assertLessEqual(infnorm(f(xx)-ff(xx)), 2*ff.size*tol)
# check +(empty Bndfun) = (empty Bndfun)
def test__pos__empty(self):
self.assertTrue((+self.emptyfun).isempty)
# check -(empty Bndfun) = (empty Bndfun)
def test__neg__empty(self):
self.assertTrue((-self.emptyfun).isempty)
# check (empty Bndfun) ** c = (empty Bndfun)
def test_pow_empty(self):
for c in range(10):
self.assertTrue((self.emptyfun**c).isempty)
# check c ** (empty Bndfun) = (empty Bndfun)
def test_rpow_empty(self):
for c in range(10):
self.assertTrue((c**self.emptyfun).isempty)
# check the output of Bndfun ** constant
def test_pow_const(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for func in (np.sin, np.exp, np.cos):
for c in (1, 2):
f = lambda x: func(x) ** c
ff = Bndfun.initfun_adaptive(func, subinterval) ** c
tol = 2e1 * eps * abs(c)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
# check the output of constant ** Bndfun
def test_rpow_const(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for func in (np.sin, np.exp, np.cos):
for c in (1, 2):
f = lambda x: c ** func(x)
ff = c ** Bndfun.initfun_adaptive(func, subinterval)
tol = 1e1 * eps * abs(c)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
binops = (operator.add, operator.mul, operator.sub, operator.truediv)
# add tests for the binary operators
def binaryOpTester(f, g, subinterval, binop):
ff = Bndfun.initfun_adaptive(f, subinterval)
gg = Bndfun.initfun_adaptive(g, subinterval)
FG = lambda x: binop(f(x),g(x))
fg = binop(ff, gg)
def tester(self):
vscl = max([ff.vscale, gg.vscale])
lscl = max([ff.size, gg.size])
xx = subinterval(self.yy)
self.assertLessEqual(infnorm(fg(xx)-FG(xx)), 6*vscl*lscl*eps)
return tester
# Note: defining __radd__(a,b) = __add__(b,a) and feeding this into the
# test will not in fact test the __radd__ functionality of the class.
# These tests will need to be added manually.
subintervals = (
Interval(-.5,.9),
Interval(-1.2, 1.3),
Interval(-2.2, -1.9),
Interval(0.4, 1.3),
)
for binop in binops:
# add the generic binary operator tests
for (f, _, _), (g, _, denomRoots) in \
itertools.combinations(testfunctions, 2):
for subinterval in subintervals:
if binop is operator.truediv and denomRoots:
# skip truediv test if denominator has roots on the real line
pass
else:
_testfun_ = binaryOpTester(f, g, subinterval, binop)
a, b = subinterval
_testfun_.__name__ = \
"test_{}_{}_{}_[{:.1f},{:.1f}]".format(
binop.__name__, f.__name__, g.__name__, a, b)
setattr(Algebra, _testfun_.__name__, _testfun_)
powtestfuns = (
[(np.exp, 'exp'), (np.sin, 'sin')],
[(np.exp, 'exp'), (lambda x: 2-x, 'linear')],
[(lambda x: 2-x, 'linear'), (np.exp, 'exp')],
)
# add operator.power tests
for (f, namef), (g, nameg) in powtestfuns:
for subinterval in subintervals:
_testfun_ = binaryOpTester(f, g, subinterval, operator.pow)
a, b = subinterval
_testfun_.__name__ = \
"test_{}_{}_{}_[{:.1f},{:.1f}]".format(
'pow', namef, nameg, a, b)
setattr(Algebra, _testfun_.__name__, _testfun_)
unaryops = (operator.pos, operator.neg)
# add tests for the unary operators
def unaryOpTester(unaryop, f, subinterval):
ff = Bndfun.initfun_adaptive(f, subinterval)
gg = lambda x: unaryop(f(x))
GG = unaryop(ff)
def tester(self):
xx = subinterval(self.yy)
self.assertLessEqual(infnorm(gg(xx)-GG(xx)), 4e1*eps)
return tester
for unaryop in unaryops:
for (f, _, _) in testfunctions:
subinterval = Interval(-.5,.9)
_testfun_ = unaryOpTester(unaryop, f, subinterval)
_testfun_.__name__ = \
"test_{}_{}".format(unaryop.__name__, f.__name__)
setattr(Algebra, _testfun_.__name__, _testfun_)
class Ufuncs(unittest.TestCase):
"""Unit-tests for Bndfun numpy ufunc overloads"""
def setUp(self):
self.yy = np.linspace(-1,1,1000)
self.emptyfun = Bndfun.initempty()
ufuncs = (np.absolute, np.arccos, np.arccosh, np.arcsin, np.arcsinh, np.arctan,
np.arctanh, np.cos, np.cosh, np.exp, np.exp2, np.expm1, np.log,
np.log2, np.log10, np.log1p, np.sinh, np.sin, np.tan, np.tanh,
np.sqrt)
# empty-case tests
def ufuncEmptyCaseTester(ufunc):
def tester(self):
self.assertTrue(getattr(self.emptyfun, ufunc.__name__)().isempty)
return tester
for ufunc in ufuncs:
_testfun_ = ufuncEmptyCaseTester(ufunc)
_testfun_.__name__ = "test_emptycase_{}".format(ufunc.__name__)
setattr(Ufuncs, _testfun_.__name__, _testfun_)
# TODO: Add more test cases
# add ufunc tests:
# (ufunc, [([fun1, interval1], tol1), ([fun2, interval2], tol2), ... ])
uf1 = lambda x: x
uf1.__name__ = "x"
uf2 = lambda x: sin(x-.5)
uf2.__name__ = "sin(x-.5)"
uf3 = lambda x: sin(25*x-1)
uf3.__name__ = "sin(25*x-1)"
ufunc_test_params = [
(np.absolute, [([uf1, (-3,-.5)], eps), ]),
(np.arccos, [([uf1, (-.8,.8)], eps), ]),
(np.arccosh, [([uf1, (2,3) ], eps), ]),
(np.arcsin, [([uf1, (-.8,.8)], eps), ]),
(np.arcsinh, [([uf1, (2,3) ], eps), ]),
(np.arctan, [([uf1, (-.8,.8)], eps), ]),
(np.arctanh, [([uf1, (-.8,.8)], eps), ]),
(np.cos, [([uf1, (-3,3) ], eps), ]),
(np.cosh, [([uf1, (-3,3) ], eps), ]),
(np.exp, [([uf1, (-3,3) ], eps), ]),
(np.exp2, [([uf1, (-3,3) ], eps), ]),
(np.expm1, [([uf1, (-3,3) ], eps), ]),
(np.log, [([uf1, (2,3) ], eps), ]),
(np.log2, [([uf1, (2,3) ], eps), ]),
(np.log10, [([uf1, (2,3) ], eps), ]),
(np.log1p, [([uf1, (-.8,.8)], eps), ]),
(np.sinh, [([uf1, (-3,3) ], eps), ]),
(np.sin, [([uf1, (-3,3) ], eps), ]),
(np.tan, [([uf1, (-.8,.8)], eps), ]),
(np.tanh, [([uf1, (-3,3) ], eps), ]),
(np.sqrt, [([uf1, (2,3) ], eps), ]),
(np.cos, [([uf2, (-3,3) ], eps), ]),
(np.cosh, [([uf2, (-3,3) ], eps), ]),
(np.exp, [([uf2, (-3,3) ], eps), ]),
(np.expm1, [([uf2, (-3,3) ], eps), ]),
(np.sinh, [([uf2, (-3,3) ], eps), ]),
(np.sin, [([uf2, (-3,3) ], eps), ]),
(np.tan, [([uf2, (-.8,.8)], eps), ]),
(np.tanh, [([uf2, (-3,3) ], eps), ]),
(np.cos, [([uf3, (-3,3) ], eps), ]),
(np.cosh, [([uf3, (-3,3) ], eps), ]),
(np.exp, [([uf3, (-3,3) ], eps), ]),
(np.expm1, [([uf3, (-3,3) ], eps), ]),
(np.sinh, [([uf3, (-3,3) ], eps), ]),
(np.sin, [([uf3, (-3,3) ], eps), ]),
(np.tan, [([uf3, (-.8,.8)], eps), ]),
(np.tanh, [([uf3, (-3,3) ], eps), ]),
]
def ufuncTester(ufunc, f, interval, tol):
ff = Bndfun.initfun_adaptive(f, interval)
gg = lambda x: ufunc(f(x))
GG = getattr(ff, ufunc.__name__)()
def tester(self):
xx = interval(self.yy)
vscl = GG.vscale
lscl = GG.size
self.assertLessEqual(infnorm(gg(xx)-GG(xx)), vscl*lscl*tol)
return tester
for (ufunc, [([f, intvl], tol), ]) in ufunc_test_params:
interval = Interval(*intvl)
_testfun_ = ufuncTester(ufunc, f, interval, tol)
_testfun_.__name__ = \
"test_{}_{}_[{:.1f},{:.1f}]".format(
ufunc.__name__, f.__name__, *intvl)
setattr(Ufuncs, _testfun_.__name__, _testfun_)
class Roots(unittest.TestCase):
def test_empty(self):
ff = Bndfun.initempty()
self.assertEquals(ff.roots().size, 0)
def test_const(self):
ff = Bndfun.initconst(0., Interval(-2,3))
gg = Bndfun.initconst(2., Interval(-2,3))
self.assertEquals(ff.roots().size, 0)
self.assertEquals(gg.roots().size, 0)
# add tests for roots
def rootsTester(f, interval, roots, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(f, subinterval)
rts = ff.roots()
def tester(self):
self.assertLessEqual(infnorm(rts-roots), tol)
return tester
rootstestfuns = (
(lambda x: 3*x+2., [-2,3], np.array([-2/3]), eps),
(lambda x: x**2+.2*x-.08, [-2,5], np.array([-.4, .2]), 3e1*eps),
(lambda x: sin(x), [-7,7], pi*np.linspace(-2,2,5), 1e1*eps),
(lambda x: cos(2*pi*x), [-20,10], np.linspace(-19.75, 9.75, 60), 3e1*eps),
(lambda x: sin(100*pi*x), [-0.5,0.5], np.linspace(-.5,.5,101), eps),
(lambda x: sin(5*pi/2*x), [-1,1], np.array([-.8, -.4, 0, .4, .8]), eps)
)
for k, args in enumerate(rootstestfuns):
_testfun_ = rootsTester(*args)
_testfun_.__name__ = "test_roots_{}".format(k)
setattr(Roots, _testfun_.__name__, _testfun_)
# reset the testsfun variable so it doesn't get picked up by nose
_testfun_ = None
| 38.331053 | 82 | 0.569379 |
from __future__ import division
import itertools
import operator
import unittest
import numpy as np
import matplotlib.pyplot as plt
from chebpy.core.bndfun import Bndfun
from chebpy.core.chebtech import Chebtech2
from chebpy.core.settings import DefaultPrefs
from chebpy.core.utilities import Interval
from chebpy.core.algorithms import standard_chop
from tests.utilities import testfunctions, infnorm
pi = np.pi
sin = np.sin
cos = np.cos
exp = np.exp
eps = DefaultPrefs.eps
class ClassUsage(unittest.TestCase):
def setUp(self):
f = lambda x: sin(30*x)
subinterval = Interval(-2,3)
self.f = f
self.ff = Bndfun.initfun_adaptive(f, subinterval)
self.xx = subinterval(np.linspace(-1,1,100))
self.emptyfun = Bndfun(Chebtech2.initempty(), subinterval)
self.constfun = Bndfun(Chebtech2.initconst(1.), subinterval)
def test_isempty_True(self):
self.assertTrue(self.emptyfun.isempty)
self.assertFalse(not self.emptyfun.isempty)
def test_isempty_False(self):
self.assertFalse(self.constfun.isempty)
self.assertTrue(not self.constfun.isempty)
def test_isconst_True(self):
self.assertTrue(self.constfun.isconst)
self.assertFalse(not self.constfun.isconst)
def test_isconst_False(self):
self.assertFalse(self.emptyfun.isconst)
self.assertTrue(not self.emptyfun.isconst)
def test_size(self):
cfs = np.random.rand(10)
subinterval = Interval()
b0 = Bndfun(Chebtech2(np.array([])), subinterval)
b1 = Bndfun(Chebtech2(np.array([1.])), subinterval)
b2 = Bndfun(Chebtech2(cfs), subinterval)
self.assertEquals(b0.size, 0)
self.assertEquals(b1.size, 1)
self.assertEquals(b2.size, cfs.size)
def test_support(self):
a, b = self.ff.support
self.assertEqual(a, -2)
self.assertEqual(b, 3)
def test_endvalues(self):
a, b = self.ff.support
fa, fb = self.ff.endvalues
self.assertLessEqual(abs(fa-self.f(a)), 2e1*eps)
self.assertLessEqual(abs(fb-self.f(b)), 2e1*eps)
def test_call(self):
self.ff(self.xx)
def test_call_bary(self):
self.ff(self.xx, "bary")
self.ff(self.xx, how="bary")
def test_call_clenshaw(self):
self.ff(self.xx, "clenshaw")
self.ff(self.xx, how="clenshaw")
def test_call_bary_vs_clenshaw(self):
b = self.ff(self.xx, "clenshaw")
c = self.ff(self.xx, "bary")
self.assertLessEqual(infnorm(b-c), 2e2*eps)
def test_call_raises(self):
self.assertRaises(ValueError, self.ff, self.xx, "notamethod")
self.assertRaises(ValueError, self.ff, self.xx, how="notamethod")
def test_vscale_empty(self):
self.assertEquals(self.emptyfun.vscale, 0.)
def test_copy(self):
ff = self.ff
gg = self.ff.copy()
self.assertEquals(ff, ff)
self.assertEquals(gg, gg)
self.assertNotEquals(ff, gg)
self.assertEquals(infnorm(ff.coeffs-gg.coeffs), 0)
def test_restrict(self):
i1 = Interval(-1,1)
gg = self.ff.restrict(i1)
yy = np.linspace(-1,1,1000)
self.assertLessEqual(infnorm(self.ff(yy)-gg(yy)), 1e2*eps)
def test_simplify(self):
interval = Interval(-2,1)
ff = Bndfun.initfun_fixedlen(self.f, interval, 1000)
gg = ff.simplify()
self.assertEqual(gg.size, standard_chop(ff.onefun.coeffs))
self.assertEqual(infnorm(ff.coeffs[:gg.size]-gg.coeffs), 0)
self.assertEqual(ff.interval, gg.interval)
vscales = [
(lambda x: sin(4*pi*x), [-2, 2], 1),
(lambda x: cos(x), [-10, 1], 1),
(lambda x: cos(4*pi*x), [-100, 100], 1),
(lambda x: exp(cos(4*pi*x)), [-1,1], exp(1)),
(lambda x: cos(3244*x), [-2,0], 1),
(lambda x: exp(x), [-1,2], exp(2)),
(lambda x: 1e10*exp(x), [-1,1], 1e10*exp(1)),
(lambda x: 0*x+1., [-1e5,1e4], 1),
]
def definiteIntegralTester(fun, interval, vscale):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
def tester(self):
absdiff = abs(ff.vscale-vscale)
self.assertLessEqual(absdiff, .1*vscale)
return tester
for k, args in enumerate(vscales):
_testfun_ = definiteIntegralTester(*args)
_testfun_.__name__ = "test_vscale_{:02}".format(k)
setattr(ClassUsage, _testfun_.__name__, _testfun_)
class Plotting(unittest.TestCase):
def setUp(self):
f = lambda x: sin(1*x) + 5e-1*cos(10*x) + 5e-3*sin(100*x)
subinterval = Interval(-6, 10)
self.f0 = Bndfun.initfun_fixedlen(f, subinterval, 1000)
self.f1 = Bndfun.initfun_adaptive(f, subinterval)
def test_plot(self):
fig, ax = plt.subplots()
self.f0.plot(ax=ax, color="g", marker="o", markersize=2, linestyle="")
def test_plotcoeffs(self):
fig, ax = plt.subplots()
self.f0.plotcoeffs(ax=ax)
self.f1.plotcoeffs(ax=ax, color="r")
class Calculus(unittest.TestCase):
def setUp(self):
self.emptyfun = Bndfun(Chebtech2.initempty(), Interval())
self.yy = np.linspace(-1,1,2000)
def test_sum_empty(self):
self.assertEqual(self.emptyfun.sum(), 0)
def test_cumsum_empty(self):
self.assertTrue(self.emptyfun.cumsum().isempty)
def test_diff_empty(self):
self.assertTrue(self.emptyfun.diff().isempty)
def_integrals = [
(lambda x: sin(x), [-2,2], .0, 2*eps),
(lambda x: sin(4*pi*x), [-.1, .7], 0.088970317927147, 1e1*eps),
(lambda x: cos(x), [-100,203], 0.426944059057085, 4e2*eps),
(lambda x: cos(4*pi*x), [-1e-1,-1e-3], 0.074682699182803, 2*eps),
(lambda x: exp(cos(4*pi*x)), [-3,1], 5.064263511008033, 4*eps),
(lambda x: cos(3244*x), [0,0.4], -3.758628487169980e-05, 5e2*eps),
(lambda x: exp(x), [-2,-1], exp(-1)-exp(-2), 2*eps),
(lambda x: 1e10*exp(x), [-1,2], 1e10*(exp(2)-exp(-1)), 2e10*eps),
(lambda x: 0*x+1., [-100,300], 400, eps),
]
def definiteIntegralTester(fun, interval, integral, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
def tester(self):
absdiff = abs(ff.sum()-integral)
self.assertLessEqual(absdiff, tol)
return tester
for k, (fun, n, integral, tol) in enumerate(def_integrals):
_testfun_ = definiteIntegralTester(fun, n, integral, tol)
_testfun_.__name__ = "test_sum_{:02}".format(k)
setattr(Calculus, _testfun_.__name__, _testfun_)
indef_integrals = [
(lambda x: 0*x+1., lambda x: x, [-2,3], eps),
(lambda x: x, lambda x: 1/2*x**2, [-5,0], 4*eps),
(lambda x: x**2, lambda x: 1/3*x**3, [1,10], 2e2*eps),
(lambda x: x**3, lambda x: 1/4*x**4, [-1e-2,4e-1], 2*eps),
(lambda x: x**4, lambda x: 1/5*x**5, [-3,-2], 3e2*eps),
(lambda x: x**5, lambda x: 1/6*x**6, [-1e-10,1], 4*eps),
(lambda x: sin(x), lambda x: -cos(x), [-10,22], 3e1*eps),
(lambda x: cos(3*x), lambda x: 1./3*sin(3*x), [-3,4], 2*eps),
(lambda x: exp(x), lambda x: exp(x), [-60,1], 1e1*eps),
(lambda x: 1e10*exp(x), lambda x: 1e10*exp(x), [-1,1], 1e10*(3*eps)),
]
def indefiniteIntegralTester(fun, ifn, interval, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
gg = Bndfun.initfun_fixedlen(ifn, subinterval, ff.size+1)
coeffs = gg.coeffs
coeffs[0] = coeffs[0] - ifn(np.array([interval[0]]))
def tester(self):
absdiff = infnorm(ff.cumsum().coeffs - coeffs)
self.assertLessEqual(absdiff, tol)
return tester
for k, (fun, dfn, n, tol) in enumerate(indef_integrals):
_testfun_ = indefiniteIntegralTester(fun, dfn, n, tol)
_testfun_.__name__ = "test_cumsum_{:02}".format(k)
setattr(Calculus, _testfun_.__name__, _testfun_)
derivatives = [
(lambda x: 0*x+1., lambda x: 0*x+0, [-2,3], eps),
(lambda x: x, lambda x: 0*x+1, [-5,0], 2e1*eps),
(lambda x: x**2, lambda x: 2*x, [1,10], 2e2*eps),
(lambda x: x**3, lambda x: 3*x**2, [-1e-2,4e-1], 3*eps),
(lambda x: x**4, lambda x: 4*x**3, [-3,-2], 1e3*eps),
(lambda x: x**5, lambda x: 5*x**4, [-1e-10,1], 4e1*eps),
(lambda x: sin(x), lambda x: cos(x), [-10,22], 5e2*eps),
(lambda x: cos(3*x), lambda x: -3*sin(3*x), [-3,4], 5e2*eps),
(lambda x: exp(x), lambda x: exp(x), [-60,1], 2e2*eps),
(lambda x: 1e10*exp(x), lambda x: 1e10*exp(x), [-1,1], 1e10*2e2*eps),
]
def derivativeTester(fun, ifn, interval, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(fun, subinterval)
gg = Bndfun.initfun_fixedlen(ifn, subinterval, max(ff.size-1,1))
def tester(self):
absdiff = infnorm(ff.diff().coeffs - gg.coeffs)
self.assertLessEqual(absdiff, tol)
return tester
for k, (fun, der, n, tol) in enumerate(derivatives):
_testfun_ = derivativeTester(fun, der, n, tol)
_testfun_.__name__ = "test_diff_{:02}".format(k)
setattr(Calculus, _testfun_.__name__, _testfun_)
class Construction(unittest.TestCase):
def test_onefun_construction(self):
coeffs = np.random.rand(10)
subinterval = Interval()
onefun = Chebtech2(coeffs)
f = Bndfun(onefun, subinterval)
self.assertIsInstance(f, Bndfun)
self.assertLess(infnorm(f.coeffs-coeffs), eps)
def test_const_construction(self):
subinterval = Interval()
ff = Bndfun.initconst(1., subinterval)
self.assertEquals(ff.size, 1)
self.assertTrue(ff.isconst)
self.assertFalse(ff.isempty)
self.assertRaises(ValueError, Bndfun.initconst, [1.], subinterval)
def test_empty_construction(self):
ff = Bndfun.initempty()
self.assertEquals(ff.size, 0)
self.assertFalse(ff.isconst)
self.assertTrue(ff.isempty)
self.assertRaises(TypeError, Bndfun.initempty, [1.])
def test_identity_construction(self):
for (a,b) in [(-1,1), (-10,-2), (-2.3, 1.24), (20,2000)]:
itvl = Interval(a,b)
ff = Bndfun.initidentity(itvl)
self.assertEquals(ff.size, 2)
xx = np.linspace(a,b,1001)
tol = eps * abs(itvl).max()
self.assertLessEqual(infnorm(ff(xx)-xx), tol)
def adaptiveTester(fun, subinterval, funlen):
ff = Bndfun.initfun_adaptive(fun, subinterval)
def tester(self):
self.assertEquals(ff.size, funlen)
return tester
def fixedlenTester(fun, subinterval, n):
ff = Bndfun.initfun_fixedlen(fun, subinterval, n)
def tester(self):
self.assertEquals(ff.size, n)
return tester
funs = []
fun_details = [
(lambda x: x**3 + x**2 + x + 1, "poly3(x)", [-2,3], 4),
(lambda x: exp(x), "exp(x)", [-2,3], 20),
(lambda x: sin(x), "sin(x)", [-2,3], 20),
(lambda x: cos(20*x), "cos(20x)", [-2,3], 90),
(lambda x: 0.*x+1., "constfun", [-2,3], 1),
(lambda x: 0.*x, "zerofun", [-2,3], 1),
]
for k, (fun, name, interval, funlen) in enumerate(fun_details):
fun.__name__ = name
subinterval = Interval(*interval)
_testfun_ = adaptiveTester(fun, subinterval, funlen)
_testfun_.__name__ = "test_adaptive_{}".format(fun.__name__)
setattr(Construction, _testfun_.__name__, _testfun_)
for n in np.array([100]):
_testfun_ = fixedlenTester(fun, subinterval, n)
_testfun_.__name__ = \
"test_fixedlen_{}_{:003}pts".format(fun.__name__, n)
setattr(Construction, _testfun_.__name__, _testfun_)
class Algebra(unittest.TestCase):
def setUp(self):
self.yy = np.linspace(-1,1,1000)
self.emptyfun = Bndfun.initempty()
def test__add__radd__empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
chebtech = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue((self.emptyfun+chebtech).isempty)
self.assertTrue((chebtech+self.emptyfun).isempty)
def test__add__radd__constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, _) in testfunctions:
for const in (-1, 1, 10, -1e5):
f = lambda x: const + fun(x)
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
f1 = const + bndfun
f2 = bndfun + const
tol = 4e1 * eps * abs(const)
self.assertLessEqual(infnorm(f(xx)-f1(xx)), tol)
self.assertLessEqual(infnorm(f(xx)-f2(xx)), tol)
def test__sub__rsub__empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
chebtech = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue((self.emptyfun-chebtech).isempty)
self.assertTrue((chebtech-self.emptyfun).isempty)
def test__sub__rsub__constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, _) in testfunctions:
for const in (-1, 1, 10, -1e5):
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
f = lambda x: const - fun(x)
g = lambda x: fun(x) - const
ff = const - bndfun
gg = bndfun - const
tol = 5e1 * eps * abs(const)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
self.assertLessEqual(infnorm(g(xx)-gg(xx)), tol)
def test__mul__rmul__empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
chebtech = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue((self.emptyfun*chebtech).isempty)
self.assertTrue((chebtech*self.emptyfun).isempty)
def test__mul__rmul__constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, _) in testfunctions:
for const in (-1, 1, 10, -1e5):
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
f = lambda x: const * fun(x)
g = lambda x: fun(x) * const
ff = const * bndfun
gg = bndfun * const
tol = 4e1 * eps * abs(const)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
self.assertLessEqual(infnorm(g(xx)-gg(xx)), tol)
def test_truediv_empty(self):
subinterval = Interval(-2,3)
for (fun, _, _) in testfunctions:
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
self.assertTrue(operator.truediv(self.emptyfun, bndfun).isempty)
self.assertTrue(operator.truediv(self.emptyfun, bndfun).isempty)
self.assertTrue((self.emptyfun/bndfun).isempty)
self.assertTrue((bndfun/self.emptyfun).isempty)
def test_truediv_constant(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for (fun, _, hasRoots) in testfunctions:
for const in (-1, 1, 10, -1e5):
hscl = abs(subinterval).max()
tol = hscl * eps * abs(const)
bndfun = Bndfun.initfun_adaptive(fun, subinterval)
g = lambda x: fun(x) / const
gg = bndfun / const
self.assertLessEqual(infnorm(g(xx)-gg(xx)), 3*gg.size*tol)
if not hasRoots:
f = lambda x: const / fun(x)
ff = const / bndfun
self.assertLessEqual(infnorm(f(xx)-ff(xx)), 2*ff.size*tol)
# check +(empty Bndfun) = (empty Bndfun)
def test__pos__empty(self):
self.assertTrue((+self.emptyfun).isempty)
# check -(empty Bndfun) = (empty Bndfun)
def test__neg__empty(self):
self.assertTrue((-self.emptyfun).isempty)
# check (empty Bndfun) ** c = (empty Bndfun)
def test_pow_empty(self):
for c in range(10):
self.assertTrue((self.emptyfun**c).isempty)
# check c ** (empty Bndfun) = (empty Bndfun)
def test_rpow_empty(self):
for c in range(10):
self.assertTrue((c**self.emptyfun).isempty)
# check the output of Bndfun ** constant
def test_pow_const(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for func in (np.sin, np.exp, np.cos):
for c in (1, 2):
f = lambda x: func(x) ** c
ff = Bndfun.initfun_adaptive(func, subinterval) ** c
tol = 2e1 * eps * abs(c)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
# check the output of constant ** Bndfun
def test_rpow_const(self):
subinterval = Interval(-.5,.9)
xx = subinterval(self.yy)
for func in (np.sin, np.exp, np.cos):
for c in (1, 2):
f = lambda x: c ** func(x)
ff = c ** Bndfun.initfun_adaptive(func, subinterval)
tol = 1e1 * eps * abs(c)
self.assertLessEqual(infnorm(f(xx)-ff(xx)), tol)
binops = (operator.add, operator.mul, operator.sub, operator.truediv)
# add tests for the binary operators
def binaryOpTester(f, g, subinterval, binop):
ff = Bndfun.initfun_adaptive(f, subinterval)
gg = Bndfun.initfun_adaptive(g, subinterval)
FG = lambda x: binop(f(x),g(x))
fg = binop(ff, gg)
def tester(self):
vscl = max([ff.vscale, gg.vscale])
lscl = max([ff.size, gg.size])
xx = subinterval(self.yy)
self.assertLessEqual(infnorm(fg(xx)-FG(xx)), 6*vscl*lscl*eps)
return tester
# Note: defining __radd__(a,b) = __add__(b,a) and feeding this into the
# test will not in fact test the __radd__ functionality of the class.
# These tests will need to be added manually.
subintervals = (
Interval(-.5,.9),
Interval(-1.2, 1.3),
Interval(-2.2, -1.9),
Interval(0.4, 1.3),
)
for binop in binops:
# add the generic binary operator tests
for (f, _, _), (g, _, denomRoots) in \
itertools.combinations(testfunctions, 2):
for subinterval in subintervals:
if binop is operator.truediv and denomRoots:
# skip truediv test if denominator has roots on the real line
pass
else:
_testfun_ = binaryOpTester(f, g, subinterval, binop)
a, b = subinterval
_testfun_.__name__ = \
"test_{}_{}_{}_[{:.1f},{:.1f}]".format(
binop.__name__, f.__name__, g.__name__, a, b)
setattr(Algebra, _testfun_.__name__, _testfun_)
powtestfuns = (
[(np.exp, 'exp'), (np.sin, 'sin')],
[(np.exp, 'exp'), (lambda x: 2-x, 'linear')],
[(lambda x: 2-x, 'linear'), (np.exp, 'exp')],
)
# add operator.power tests
for (f, namef), (g, nameg) in powtestfuns:
for subinterval in subintervals:
_testfun_ = binaryOpTester(f, g, subinterval, operator.pow)
a, b = subinterval
_testfun_.__name__ = \
"test_{}_{}_{}_[{:.1f},{:.1f}]".format(
'pow', namef, nameg, a, b)
setattr(Algebra, _testfun_.__name__, _testfun_)
unaryops = (operator.pos, operator.neg)
# add tests for the unary operators
def unaryOpTester(unaryop, f, subinterval):
ff = Bndfun.initfun_adaptive(f, subinterval)
gg = lambda x: unaryop(f(x))
GG = unaryop(ff)
def tester(self):
xx = subinterval(self.yy)
self.assertLessEqual(infnorm(gg(xx)-GG(xx)), 4e1*eps)
return tester
for unaryop in unaryops:
for (f, _, _) in testfunctions:
subinterval = Interval(-.5,.9)
_testfun_ = unaryOpTester(unaryop, f, subinterval)
_testfun_.__name__ = \
"test_{}_{}".format(unaryop.__name__, f.__name__)
setattr(Algebra, _testfun_.__name__, _testfun_)
class Ufuncs(unittest.TestCase):
def setUp(self):
self.yy = np.linspace(-1,1,1000)
self.emptyfun = Bndfun.initempty()
ufuncs = (np.absolute, np.arccos, np.arccosh, np.arcsin, np.arcsinh, np.arctan,
np.arctanh, np.cos, np.cosh, np.exp, np.exp2, np.expm1, np.log,
np.log2, np.log10, np.log1p, np.sinh, np.sin, np.tan, np.tanh,
np.sqrt)
# empty-case tests
def ufuncEmptyCaseTester(ufunc):
def tester(self):
self.assertTrue(getattr(self.emptyfun, ufunc.__name__)().isempty)
return tester
for ufunc in ufuncs:
_testfun_ = ufuncEmptyCaseTester(ufunc)
_testfun_.__name__ = "test_emptycase_{}".format(ufunc.__name__)
setattr(Ufuncs, _testfun_.__name__, _testfun_)
# TODO: Add more test cases
# add ufunc tests:
# (ufunc, [([fun1, interval1], tol1), ([fun2, interval2], tol2), ... ])
uf1 = lambda x: x
uf1.__name__ = "x"
uf2 = lambda x: sin(x-.5)
uf2.__name__ = "sin(x-.5)"
uf3 = lambda x: sin(25*x-1)
uf3.__name__ = "sin(25*x-1)"
ufunc_test_params = [
(np.absolute, [([uf1, (-3,-.5)], eps), ]),
(np.arccos, [([uf1, (-.8,.8)], eps), ]),
(np.arccosh, [([uf1, (2,3) ], eps), ]),
(np.arcsin, [([uf1, (-.8,.8)], eps), ]),
(np.arcsinh, [([uf1, (2,3) ], eps), ]),
(np.arctan, [([uf1, (-.8,.8)], eps), ]),
(np.arctanh, [([uf1, (-.8,.8)], eps), ]),
(np.cos, [([uf1, (-3,3) ], eps), ]),
(np.cosh, [([uf1, (-3,3) ], eps), ]),
(np.exp, [([uf1, (-3,3) ], eps), ]),
(np.exp2, [([uf1, (-3,3) ], eps), ]),
(np.expm1, [([uf1, (-3,3) ], eps), ]),
(np.log, [([uf1, (2,3) ], eps), ]),
(np.log2, [([uf1, (2,3) ], eps), ]),
(np.log10, [([uf1, (2,3) ], eps), ]),
(np.log1p, [([uf1, (-.8,.8)], eps), ]),
(np.sinh, [([uf1, (-3,3) ], eps), ]),
(np.sin, [([uf1, (-3,3) ], eps), ]),
(np.tan, [([uf1, (-.8,.8)], eps), ]),
(np.tanh, [([uf1, (-3,3) ], eps), ]),
(np.sqrt, [([uf1, (2,3) ], eps), ]),
(np.cos, [([uf2, (-3,3) ], eps), ]),
(np.cosh, [([uf2, (-3,3) ], eps), ]),
(np.exp, [([uf2, (-3,3) ], eps), ]),
(np.expm1, [([uf2, (-3,3) ], eps), ]),
(np.sinh, [([uf2, (-3,3) ], eps), ]),
(np.sin, [([uf2, (-3,3) ], eps), ]),
(np.tan, [([uf2, (-.8,.8)], eps), ]),
(np.tanh, [([uf2, (-3,3) ], eps), ]),
(np.cos, [([uf3, (-3,3) ], eps), ]),
(np.cosh, [([uf3, (-3,3) ], eps), ]),
(np.exp, [([uf3, (-3,3) ], eps), ]),
(np.expm1, [([uf3, (-3,3) ], eps), ]),
(np.sinh, [([uf3, (-3,3) ], eps), ]),
(np.sin, [([uf3, (-3,3) ], eps), ]),
(np.tan, [([uf3, (-.8,.8)], eps), ]),
(np.tanh, [([uf3, (-3,3) ], eps), ]),
]
def ufuncTester(ufunc, f, interval, tol):
ff = Bndfun.initfun_adaptive(f, interval)
gg = lambda x: ufunc(f(x))
GG = getattr(ff, ufunc.__name__)()
def tester(self):
xx = interval(self.yy)
vscl = GG.vscale
lscl = GG.size
self.assertLessEqual(infnorm(gg(xx)-GG(xx)), vscl*lscl*tol)
return tester
for (ufunc, [([f, intvl], tol), ]) in ufunc_test_params:
interval = Interval(*intvl)
_testfun_ = ufuncTester(ufunc, f, interval, tol)
_testfun_.__name__ = \
"test_{}_{}_[{:.1f},{:.1f}]".format(
ufunc.__name__, f.__name__, *intvl)
setattr(Ufuncs, _testfun_.__name__, _testfun_)
class Roots(unittest.TestCase):
def test_empty(self):
ff = Bndfun.initempty()
self.assertEquals(ff.roots().size, 0)
def test_const(self):
ff = Bndfun.initconst(0., Interval(-2,3))
gg = Bndfun.initconst(2., Interval(-2,3))
self.assertEquals(ff.roots().size, 0)
self.assertEquals(gg.roots().size, 0)
# add tests for roots
def rootsTester(f, interval, roots, tol):
subinterval = Interval(*interval)
ff = Bndfun.initfun_adaptive(f, subinterval)
rts = ff.roots()
def tester(self):
self.assertLessEqual(infnorm(rts-roots), tol)
return tester
rootstestfuns = (
(lambda x: 3*x+2., [-2,3], np.array([-2/3]), eps),
(lambda x: x**2+.2*x-.08, [-2,5], np.array([-.4, .2]), 3e1*eps),
(lambda x: sin(x), [-7,7], pi*np.linspace(-2,2,5), 1e1*eps),
(lambda x: cos(2*pi*x), [-20,10], np.linspace(-19.75, 9.75, 60), 3e1*eps),
(lambda x: sin(100*pi*x), [-0.5,0.5], np.linspace(-.5,.5,101), eps),
(lambda x: sin(5*pi/2*x), [-1,1], np.array([-.8, -.4, 0, .4, .8]), eps)
)
for k, args in enumerate(rootstestfuns):
_testfun_ = rootsTester(*args)
_testfun_.__name__ = "test_roots_{}".format(k)
setattr(Roots, _testfun_.__name__, _testfun_)
# reset the testsfun variable so it doesn't get picked up by nose
_testfun_ = None
| true | true |
f738b6daaa9a461eecc3aa0b6a12081fbe06512b | 670 | py | Python | blur.py | solderneer/opencv-adventures | 20abea930f44296367217145fab73866ea654084 | [
"MIT"
] | null | null | null | blur.py | solderneer/opencv-adventures | 20abea930f44296367217145fab73866ea654084 | [
"MIT"
] | null | null | null | blur.py | solderneer/opencv-adventures | 20abea930f44296367217145fab73866ea654084 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import cv2
import numpy as np
image = cv2.imread('../images/input.jpg')
blur = cv2.blur(image, (3,3))
gaussian_blur = cv2.GaussianBlur(image, (3,3), 0)
median = cv2.medianBlur(image, 5)
cv2.imshow("boxblux", blur)
cv2.waitKey()
cv2.imshow("gaussian", gaussian_blur)
cv2.waitKey()
cv2.imshow("median", median)
cv2.waitKey()
# should go look into image de-noising
# brighten needs an array [-1,-1,-1],[-1,9,-1],[-1,-1,-1]
# kernel/convolution matrix can even be used for edge detection
matrix = np.array([[-1,-1,-1],[-1,9,-1],[-1,-1,-1]])
sharp = cv2.filter2D(image, -1, matrix)
cv2.imshow("sharp", sharp)
cv2.waitKey()
cv2.destroyAllWindows()
| 23.928571 | 63 | 0.68209 |
import cv2
import numpy as np
image = cv2.imread('../images/input.jpg')
blur = cv2.blur(image, (3,3))
gaussian_blur = cv2.GaussianBlur(image, (3,3), 0)
median = cv2.medianBlur(image, 5)
cv2.imshow("boxblux", blur)
cv2.waitKey()
cv2.imshow("gaussian", gaussian_blur)
cv2.waitKey()
cv2.imshow("median", median)
cv2.waitKey()
matrix = np.array([[-1,-1,-1],[-1,9,-1],[-1,-1,-1]])
sharp = cv2.filter2D(image, -1, matrix)
cv2.imshow("sharp", sharp)
cv2.waitKey()
cv2.destroyAllWindows()
| true | true |
f738b7eb85617dbbd989f7cc5a00cd6defee1715 | 2,375 | py | Python | models/wingspan/wingspan_report_manager.py | shimech/board_game_reporter | e4ed3035e5adfdf0dadf514d2546f3cc29672007 | [
"MIT"
] | null | null | null | models/wingspan/wingspan_report_manager.py | shimech/board_game_reporter | e4ed3035e5adfdf0dadf514d2546f3cc29672007 | [
"MIT"
] | 1 | 2020-05-23T05:36:43.000Z | 2020-05-23T05:36:43.000Z | models/wingspan/wingspan_report_manager.py | shimech/board_game_reporter | e4ed3035e5adfdf0dadf514d2546f3cc29672007 | [
"MIT"
] | null | null | null | import os
import sys
sys.path.append(os.getcwd())
import gspread
from utils import Utils
from models.wingspan.wingspan_player import WingspanPlayer
class WingspanReportManager:
"""
WINGSPANのレポートを管理するクラス
"""
MAX_NUM_PLAYER = 30
def wingspan_players(self, workbook: gspread.models.Spreadsheet) -> list:
"""
すべてのプレイヤの情報を取得
@param workbook: ワークブックインスタンス
@return players: すべてのプレイヤリスト
"""
year, month = Utils.target_year_and_month()
sheet_name = "{}/{:02}".format(year, month)
try:
worksheet = Utils.get_worksheet(workbook, sheet_name)
players = self.init_players(worksheet)
players = self.get_result_of_players(players, worksheet)
players = self.make_messages(players)
players = sorted(players, key=lambda x: (-x.winning_rate, -float(x.average), -x.num_game))
except gspread.exceptions.WorksheetNotFound:
print("gspread.exceptions.WorksheetNotFound: {}".format(sheet_name))
players = list()
return players
def init_players(self, worksheet: gspread.models.Worksheet) -> list:
"""
すべての初期化されたプレイヤリストを取得
@param worksheet: ワークシートオブジェクト
@return players: すべてのプレイヤリスト
"""
players = list()
start = 3
for i in range(self.MAX_NUM_PLAYER):
row = start + i
player = WingspanPlayer(row)
name = player.get_name(worksheet)
if name != "":
players.append(player)
else:
break
return players
def get_result_of_players(self, players: list, worksheet: gspread.models.Worksheet) -> list:
"""
すべてのプレイヤの戦績を取得
対戦のないプレイヤは削除する。
@param players: プレイヤリスト
@param worksheet: ワークシートオブジェクト
@return players_new: 更新後プレイヤリスト
"""
players_new = list()
for player in players:
_, _, winning_rate, _ = player.get_results(worksheet)
if winning_rate is not None:
players_new.append(player)
return players_new
def make_messages(self, players: list) -> list:
"""
全プレイヤのメッセージを生成
@param @return players: 全プレイヤリスト
"""
for player in players:
_ = player.make_message()
return players
| 30.844156 | 102 | 0.600421 | import os
import sys
sys.path.append(os.getcwd())
import gspread
from utils import Utils
from models.wingspan.wingspan_player import WingspanPlayer
class WingspanReportManager:
MAX_NUM_PLAYER = 30
def wingspan_players(self, workbook: gspread.models.Spreadsheet) -> list:
year, month = Utils.target_year_and_month()
sheet_name = "{}/{:02}".format(year, month)
try:
worksheet = Utils.get_worksheet(workbook, sheet_name)
players = self.init_players(worksheet)
players = self.get_result_of_players(players, worksheet)
players = self.make_messages(players)
players = sorted(players, key=lambda x: (-x.winning_rate, -float(x.average), -x.num_game))
except gspread.exceptions.WorksheetNotFound:
print("gspread.exceptions.WorksheetNotFound: {}".format(sheet_name))
players = list()
return players
def init_players(self, worksheet: gspread.models.Worksheet) -> list:
players = list()
start = 3
for i in range(self.MAX_NUM_PLAYER):
row = start + i
player = WingspanPlayer(row)
name = player.get_name(worksheet)
if name != "":
players.append(player)
else:
break
return players
def get_result_of_players(self, players: list, worksheet: gspread.models.Worksheet) -> list:
players_new = list()
for player in players:
_, _, winning_rate, _ = player.get_results(worksheet)
if winning_rate is not None:
players_new.append(player)
return players_new
def make_messages(self, players: list) -> list:
for player in players:
_ = player.make_message()
return players
| true | true |
f738b8748ff190f8320e07643dfda2a23a205587 | 3,873 | py | Python | segment tree 1/2B.py | iammanish17/CodeforcesEdu | 961543b332c773010320bd0b2e9d4a4b1c8dc0ea | [
"MIT"
] | 6 | 2020-09-14T19:16:23.000Z | 2021-12-10T19:07:51.000Z | segment tree 1/2B.py | iammanish17/CodeforcesEdu | 961543b332c773010320bd0b2e9d4a4b1c8dc0ea | [
"MIT"
] | null | null | null | segment tree 1/2B.py | iammanish17/CodeforcesEdu | 961543b332c773010320bd0b2e9d4a4b1c8dc0ea | [
"MIT"
] | 1 | 2021-08-12T19:37:22.000Z | 2021-08-12T19:37:22.000Z | # By manish.17, contest: ITMO Academy. Дерево отрезков часть 1. 2, problem: (B) K-th one
# https://codeforces.com/profile/manish.17
# ------------------- fast io --------------------
import os
import sys
from io import BytesIO, IOBase
BUFSIZE = 8192
class FastIO(IOBase):
newlines = 0
def __init__(self, file):
self._fd = file.fileno()
self.buffer = BytesIO()
self.writable = "x" in file.mode or "r" not in file.mode
self.write = self.buffer.write if self.writable else None
def read(self):
while True:
b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
if not b:
break
ptr = self.buffer.tell()
self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
self.newlines = 0
return self.buffer.read()
def readline(self):
while self.newlines == 0:
b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
self.newlines = b.count(b"\n") + (not b)
ptr = self.buffer.tell()
self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
self.newlines -= 1
return self.buffer.readline()
def flush(self):
if self.writable:
os.write(self._fd, self.buffer.getvalue())
self.buffer.truncate(0), self.buffer.seek(0)
class IOWrapper(IOBase):
def __init__(self, file):
self.buffer = FastIO(file)
self.flush = self.buffer.flush
self.writable = self.buffer.writable
self.write = lambda s: self.buffer.write(s.encode("ascii"))
self.read = lambda: self.buffer.read().decode("ascii")
self.readline = lambda: self.buffer.readline().decode("ascii")
sys.stdin, sys.stdout = IOWrapper(sys.stdin), IOWrapper(sys.stdout)
input = lambda: sys.stdin.readline().rstrip("\r\n")
# ------------------- fast io --------------------
from math import inf, log2
class SegmentTree:
def __init__(self, array):
self.n = len(array)
self.size = 2**(int(log2(self.n-1))+1) if self.n != 1 else 1
self.func = lambda a,b: a+b
self.default = 0 if self.func != min else inf
self.data = [self.default] * (2 * self.size)
self.process(array)
def process(self, array):
self.data[self.size : self.size+self.n] = array
for i in range(self.size-1, -1, -1):
self.data[i] = self.func(self.data[2*i], self.data[2*i+1])
def query(self, alpha, omega):
"""Returns the result of function over the range (inclusive)!"""
if alpha == omega:
return self.data[alpha + self.size]
res = self.default
alpha += self.size
omega += self.size + 1
while alpha < omega:
if alpha & 1:
res = self.func(res, self.data[alpha])
alpha += 1
if omega & 1:
omega -= 1
res = self.func(res, self.data[omega])
alpha >>= 1
omega >>= 1
return res
def update(self, index):
"""Updates the element at index to given value!"""
index += self.size
self.data[index] = 1 - self.data[index]
index >>= 1
while index:
self.data[index] = self.func(self.data[2*index], self.data[2*index+1])
index >>= 1
n, m = map(int, input().split())
a = list(map(int, input().split()))
st = SegmentTree(a)
for i in range(m):
x, y = map(int, input().split())
if x == 1:
st.update(y)
else:
y += 1
alpha, omega = 0, n - 1
while alpha < omega:
mid = (alpha + omega) // 2
if st.query(alpha, mid) < y:
y -= st.query(alpha, mid)
alpha = mid + 1
else:
omega = mid
print(omega)
| 31.487805 | 88 | 0.53731 |
import os
import sys
from io import BytesIO, IOBase
BUFSIZE = 8192
class FastIO(IOBase):
newlines = 0
def __init__(self, file):
self._fd = file.fileno()
self.buffer = BytesIO()
self.writable = "x" in file.mode or "r" not in file.mode
self.write = self.buffer.write if self.writable else None
def read(self):
while True:
b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
if not b:
break
ptr = self.buffer.tell()
self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
self.newlines = 0
return self.buffer.read()
def readline(self):
while self.newlines == 0:
b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
self.newlines = b.count(b"\n") + (not b)
ptr = self.buffer.tell()
self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
self.newlines -= 1
return self.buffer.readline()
def flush(self):
if self.writable:
os.write(self._fd, self.buffer.getvalue())
self.buffer.truncate(0), self.buffer.seek(0)
class IOWrapper(IOBase):
def __init__(self, file):
self.buffer = FastIO(file)
self.flush = self.buffer.flush
self.writable = self.buffer.writable
self.write = lambda s: self.buffer.write(s.encode("ascii"))
self.read = lambda: self.buffer.read().decode("ascii")
self.readline = lambda: self.buffer.readline().decode("ascii")
sys.stdin, sys.stdout = IOWrapper(sys.stdin), IOWrapper(sys.stdout)
input = lambda: sys.stdin.readline().rstrip("\r\n")
from math import inf, log2
class SegmentTree:
def __init__(self, array):
self.n = len(array)
self.size = 2**(int(log2(self.n-1))+1) if self.n != 1 else 1
self.func = lambda a,b: a+b
self.default = 0 if self.func != min else inf
self.data = [self.default] * (2 * self.size)
self.process(array)
def process(self, array):
self.data[self.size : self.size+self.n] = array
for i in range(self.size-1, -1, -1):
self.data[i] = self.func(self.data[2*i], self.data[2*i+1])
def query(self, alpha, omega):
if alpha == omega:
return self.data[alpha + self.size]
res = self.default
alpha += self.size
omega += self.size + 1
while alpha < omega:
if alpha & 1:
res = self.func(res, self.data[alpha])
alpha += 1
if omega & 1:
omega -= 1
res = self.func(res, self.data[omega])
alpha >>= 1
omega >>= 1
return res
def update(self, index):
index += self.size
self.data[index] = 1 - self.data[index]
index >>= 1
while index:
self.data[index] = self.func(self.data[2*index], self.data[2*index+1])
index >>= 1
n, m = map(int, input().split())
a = list(map(int, input().split()))
st = SegmentTree(a)
for i in range(m):
x, y = map(int, input().split())
if x == 1:
st.update(y)
else:
y += 1
alpha, omega = 0, n - 1
while alpha < omega:
mid = (alpha + omega) // 2
if st.query(alpha, mid) < y:
y -= st.query(alpha, mid)
alpha = mid + 1
else:
omega = mid
print(omega)
| true | true |
f738b8ca2d0911c6dc22a033fd172ad4961cd0f4 | 756 | bzl | Python | source/bazel/deps/openal/get.bzl | luxe/CodeLang-compiler | 78837d90bdd09c4b5aabbf0586a5d8f8f0c1e76a | [
"MIT"
] | 1 | 2019-01-06T08:45:46.000Z | 2019-01-06T08:45:46.000Z | source/bazel/deps/openal/get.bzl | luxe/CodeLang-compiler | 78837d90bdd09c4b5aabbf0586a5d8f8f0c1e76a | [
"MIT"
] | 264 | 2015-11-30T08:34:00.000Z | 2018-06-26T02:28:41.000Z | source/bazel/deps/openal/get.bzl | UniLang/compiler | c338ee92994600af801033a37dfb2f1a0c9ca897 | [
"MIT"
] | null | null | null | # Do not edit this file directly.
# It was auto-generated by: code/programs/reflexivity/reflexive_refresh
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
def openal():
http_archive(
name = "openal",
build_file = "//bazel/deps/openal:build.BUILD",
sha256 = "992d88ffb89a9a8fd2ed42936caaca20fb3cee3f23ac8a07de568087e37cae88",
strip_prefix = "OpenAL-631ad459dda5f5598863f7efc21070347aed757d",
urls = [
"https://github.com/Unilang/OpenAL/archive/631ad459dda5f5598863f7efc21070347aed757d.tar.gz",
],
patch_cmds = [
"sed -i '/HAVE_IEEEFP_H/d' OpenAL32/Include/config.h",
],
)
| 37.8 | 104 | 0.681217 |
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
def openal():
http_archive(
name = "openal",
build_file = "//bazel/deps/openal:build.BUILD",
sha256 = "992d88ffb89a9a8fd2ed42936caaca20fb3cee3f23ac8a07de568087e37cae88",
strip_prefix = "OpenAL-631ad459dda5f5598863f7efc21070347aed757d",
urls = [
"https://github.com/Unilang/OpenAL/archive/631ad459dda5f5598863f7efc21070347aed757d.tar.gz",
],
patch_cmds = [
"sed -i '/HAVE_IEEEFP_H/d' OpenAL32/Include/config.h",
],
)
| true | true |
f738bab7247e42f74f5264add35fd0c6d547c325 | 2,590 | py | Python | webshop/urls.py | it-teaching-abo-akademi/webshop-project-arnelimperial | 98fc7bd8ce031a50c0bd83a6c5c48ed93030e528 | [
"Unlicense"
] | null | null | null | webshop/urls.py | it-teaching-abo-akademi/webshop-project-arnelimperial | 98fc7bd8ce031a50c0bd83a6c5c48ed93030e528 | [
"Unlicense"
] | null | null | null | webshop/urls.py | it-teaching-abo-akademi/webshop-project-arnelimperial | 98fc7bd8ce031a50c0bd83a6c5c48ed93030e528 | [
"Unlicense"
] | null | null | null | from django.contrib import admin
from django.urls import path, include, re_path
from core.views import client_view
from django.conf import settings
from django.conf.urls.static import static
from django.http import HttpResponseRedirect
from django.views import defaults as default_views
from rest_framework.authtoken.views import obtain_auth_token
from users.admin import admin_log
urlpatterns = [
#path(settings.ADMIN_URL, admin.site.urls),
path(settings.ADMIN_URL_OTP, admin_log.urls),
path("accounts/", include("allauth.urls")),
path("users/", include('users.urls', namespace='users')),
path("merchandises/", include('merchandises.urls', namespace='merchandises')),
path("carts/", include('carts.urls', namespace='carts')),
path("purchases/", include('purchases.urls', namespace='purchases')),
path("initial/", include('initial.urls', namespace='initial')),
path('favicon.ico', lambda x: HttpResponseRedirect(settings.STATIC_URL + 'favicon.ico')),
path('api.html', lambda x: HttpResponseRedirect(settings.STATIC_URL + 'api.html')),
path('robots.txt', lambda x: HttpResponseRedirect(settings.STATIC_URL + 'robots.txt')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# API URLS
urlpatterns += [
# API base url
path("api/", include("webshop.api_router")),
# DRF auth token
path("auth-token/", obtain_auth_token),
# Login via browsable api
path("api-auth/", include("rest_framework.urls")),
# Login via REST
path("api/rest-auth/", include("rest_auth.urls")),
# Registration via REST
path("api/rest-auth/registration/", include("rest_auth.registration.urls")),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
# URL patterns for frontend
urlpatterns += [
#path("", include('core.urls', namespace='core')),
# Catch all routes and redirect to index.html
re_path(r"^.*$", view=client_view, name="client-app"),
] | 34.533333 | 93 | 0.666409 | from django.contrib import admin
from django.urls import path, include, re_path
from core.views import client_view
from django.conf import settings
from django.conf.urls.static import static
from django.http import HttpResponseRedirect
from django.views import defaults as default_views
from rest_framework.authtoken.views import obtain_auth_token
from users.admin import admin_log
urlpatterns = [
path(settings.ADMIN_URL_OTP, admin_log.urls),
path("accounts/", include("allauth.urls")),
path("users/", include('users.urls', namespace='users')),
path("merchandises/", include('merchandises.urls', namespace='merchandises')),
path("carts/", include('carts.urls', namespace='carts')),
path("purchases/", include('purchases.urls', namespace='purchases')),
path("initial/", include('initial.urls', namespace='initial')),
path('favicon.ico', lambda x: HttpResponseRedirect(settings.STATIC_URL + 'favicon.ico')),
path('api.html', lambda x: HttpResponseRedirect(settings.STATIC_URL + 'api.html')),
path('robots.txt', lambda x: HttpResponseRedirect(settings.STATIC_URL + 'robots.txt')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += [
path("api/", include("webshop.api_router")),
path("auth-token/", obtain_auth_token),
path("api-auth/", include("rest_framework.urls")),
path("api/rest-auth/", include("rest_auth.urls")),
path("api/rest-auth/registration/", include("rest_auth.registration.urls")),
]
if settings.DEBUG:
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
urlpatterns += [
re_path(r"^.*$", view=client_view, name="client-app"),
] | true | true |
f738bbdc72e5367719d660a3595ad56aa23dcc85 | 3,675 | py | Python | src/cryptography/hazmat/primitives/kdf/hkdf.py | balabit-deps/balabit-os-6-python-cryptography | c31d184a56a18bad89a6444313367be71b5b0877 | [
"Apache-2.0",
"BSD-3-Clause"
] | 674 | 2015-11-06T04:22:47.000Z | 2022-02-26T17:31:43.000Z | src/cryptography/hazmat/primitives/kdf/hkdf.py | balabit-deps/balabit-os-6-python-cryptography | c31d184a56a18bad89a6444313367be71b5b0877 | [
"Apache-2.0",
"BSD-3-Clause"
] | 713 | 2015-11-06T10:48:58.000Z | 2018-11-27T16:32:18.000Z | src/cryptography/hazmat/primitives/kdf/hkdf.py | balabit-deps/balabit-os-6-python-cryptography | c31d184a56a18bad89a6444313367be71b5b0877 | [
"Apache-2.0",
"BSD-3-Clause"
] | 106 | 2015-12-07T11:21:06.000Z | 2022-03-11T10:58:41.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
def __init__(self, algorithm, length, salt, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * (self._algorithm.digest_size // 8)
self._salt = salt
self._backend = backend
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
def _extract(self, key_material):
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * (algorithm.digest_size // 8)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
while (self._algorithm.digest_size // 8) * len(output) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
| 31.410256 | 79 | 0.647075 |
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
@utils.register_interface(KeyDerivationFunction)
class HKDF(object):
def __init__(self, algorithm, length, salt, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
if not (salt is None or isinstance(salt, bytes)):
raise TypeError("salt must be bytes.")
if salt is None:
salt = b"\x00" * (self._algorithm.digest_size // 8)
self._salt = salt
self._backend = backend
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
def _extract(self, key_material):
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * (algorithm.digest_size // 8)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
while (self._algorithm.digest_size // 8) * len(output) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
| true | true |
f738bce587654bcba0eb98a3fd5794ef99517ba5 | 45 | py | Python | Python/basics/one.py | Tikam02/Data_Structure_Algorithms | 7c17f744975a72fa42f0f3f892c0b7e041cdef0c | [
"MIT"
] | null | null | null | Python/basics/one.py | Tikam02/Data_Structure_Algorithms | 7c17f744975a72fa42f0f3f892c0b7e041cdef0c | [
"MIT"
] | null | null | null | Python/basics/one.py | Tikam02/Data_Structure_Algorithms | 7c17f744975a72fa42f0f3f892c0b7e041cdef0c | [
"MIT"
] | null | null | null | print(*range(1, int(input()) + 1), sep="")
| 22.5 | 44 | 0.511111 | print(*range(1, int(input()) + 1), sep="")
| true | true |
f738be281d99251b10d7793746ea257565038f81 | 147 | py | Python | socialcrawl/auth_sample.py | enterstudio/socialcrawl | 8ad0081db3eafe96132291e579984e504c48021f | [
"MIT"
] | 13 | 2015-03-04T22:34:28.000Z | 2021-12-26T06:27:21.000Z | socialcrawl/auth_sample.py | MrMugiwara/socialcrawl | 8ad0081db3eafe96132291e579984e504c48021f | [
"MIT"
] | 2 | 2017-01-28T21:45:53.000Z | 2018-08-05T10:58:15.000Z | socialcrawl/auth_sample.py | enterstudio/socialcrawl | 8ad0081db3eafe96132291e579984e504c48021f | [
"MIT"
] | 4 | 2016-09-19T20:29:34.000Z | 2020-07-24T18:04:00.000Z | """Authentication tokens for twitter and facebook"""
TWITTER_ACCESS_TOKEN = "MyTwitterAccessToken"
FACEBOOK_ACCESS_TOKEN = "MyFacebookAccessToken"
| 36.75 | 52 | 0.836735 | TWITTER_ACCESS_TOKEN = "MyTwitterAccessToken"
FACEBOOK_ACCESS_TOKEN = "MyFacebookAccessToken"
| true | true |
f738be426977d8a67731ddcf2d9214283a77f7e5 | 6,713 | py | Python | asposetaskscloud/models/aspose_response.py | aspose-tasks-cloud/aspose-tasks-cloud-python | d1852a02fb1aa2591501a34d5e56079f8aac43f0 | [
"MIT"
] | 2 | 2021-08-16T09:25:51.000Z | 2022-01-27T20:20:41.000Z | asposetaskscloud/models/aspose_response.py | aspose-tasks-cloud/aspose-tasks-cloud-python | d1852a02fb1aa2591501a34d5e56079f8aac43f0 | [
"MIT"
] | null | null | null | asposetaskscloud/models/aspose_response.py | aspose-tasks-cloud/aspose-tasks-cloud-python | d1852a02fb1aa2591501a34d5e56079f8aac43f0 | [
"MIT"
] | null | null | null | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="AsposeResponse.py">
# Copyright (c) 2020 Aspose.Tasks Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
class AsposeResponse(object):
"""
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'code': 'int',
'status': 'str'
}
attribute_map = {
'code': 'code',
'status': 'status'
}
discriminator_value_class_map = {
'TaskLinkResponse': 'TaskLinkResponse',
'ExtendedAttributeResponse': 'ExtendedAttributeResponse',
'TimephasedDataResponse': 'TimephasedDataResponse',
'ExtendedAttributeItemsResponse': 'ExtendedAttributeItemsResponse',
'PageCountResponse': 'PageCountResponse',
'TaskResponse': 'TaskResponse',
'RecurringInfoResponse': 'RecurringInfoResponse',
'ExtendedAttributeItemResponse': 'ExtendedAttributeItemResponse',
'AssignmentResponse': 'AssignmentResponse',
'CalendarItemsResponse': 'CalendarItemsResponse',
'OutlineCodeItemsResponse': 'OutlineCodeItemsResponse',
'ResourceResponse': 'ResourceResponse',
'OutlineCodeResponse': 'OutlineCodeResponse',
'TaskLinksResponse': 'TaskLinksResponse',
'AssignmentItemResponse': 'AssignmentItemResponse',
'WBSDefinitionResponse': 'WBSDefinitionResponse',
'ProjectIdsResponse': 'ProjectIdsResponse',
'AssignmentItemsResponse': 'AssignmentItemsResponse',
'CalendarExceptionsResponse': 'CalendarExceptionsResponse',
'ProjectListResponse': 'ProjectListResponse',
'CalendarWorkWeeksResponse': 'CalendarWorkWeeksResponse',
'TaskItemsResponse': 'TaskItemsResponse',
'VbaProjectResponse': 'VbaProjectResponse',
'ProjectRecalculateResponse': 'ProjectRecalculateResponse',
'CalendarResponse': 'CalendarResponse',
'DocumentPropertyResponse': 'DocumentPropertyResponse',
'ResourceItemsResponse': 'ResourceItemsResponse',
'AssignmentsResponse': 'AssignmentsResponse',
'CalendarItemResponse': 'CalendarItemResponse',
'ResourceItemResponse': 'ResourceItemResponse',
'DocumentPropertiesResponse': 'DocumentPropertiesResponse',
'TaskItemResponse': 'TaskItemResponse'
}
def __init__(self, code=None, status=None): # noqa: E501
"""AsposeResponse - a model defined in Swagger""" # noqa: E501
self._code = None
self._status = None
self.discriminator = 'Type'
if code is not None:
self.code = code
if status is not None:
self.status = status
@property
def code(self):
"""Gets the code of this AsposeResponse. # noqa: E501
Response status code # noqa: E501
:return: The code of this AsposeResponse. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this AsposeResponse.
Response status code # noqa: E501
:param code: The code of this AsposeResponse. # noqa: E501
:type: int
"""
if code is None:
raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501
self._code = code
@property
def status(self):
"""Gets the status of this AsposeResponse. # noqa: E501
Response status # noqa: E501
:return: The status of this AsposeResponse. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this AsposeResponse.
Response status # noqa: E501
:param status: The status of this AsposeResponse. # noqa: E501
:type: str
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AsposeResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 36.091398 | 90 | 0.614032 |
import pprint
import re
import six
class AsposeResponse(object):
swagger_types = {
'code': 'int',
'status': 'str'
}
attribute_map = {
'code': 'code',
'status': 'status'
}
discriminator_value_class_map = {
'TaskLinkResponse': 'TaskLinkResponse',
'ExtendedAttributeResponse': 'ExtendedAttributeResponse',
'TimephasedDataResponse': 'TimephasedDataResponse',
'ExtendedAttributeItemsResponse': 'ExtendedAttributeItemsResponse',
'PageCountResponse': 'PageCountResponse',
'TaskResponse': 'TaskResponse',
'RecurringInfoResponse': 'RecurringInfoResponse',
'ExtendedAttributeItemResponse': 'ExtendedAttributeItemResponse',
'AssignmentResponse': 'AssignmentResponse',
'CalendarItemsResponse': 'CalendarItemsResponse',
'OutlineCodeItemsResponse': 'OutlineCodeItemsResponse',
'ResourceResponse': 'ResourceResponse',
'OutlineCodeResponse': 'OutlineCodeResponse',
'TaskLinksResponse': 'TaskLinksResponse',
'AssignmentItemResponse': 'AssignmentItemResponse',
'WBSDefinitionResponse': 'WBSDefinitionResponse',
'ProjectIdsResponse': 'ProjectIdsResponse',
'AssignmentItemsResponse': 'AssignmentItemsResponse',
'CalendarExceptionsResponse': 'CalendarExceptionsResponse',
'ProjectListResponse': 'ProjectListResponse',
'CalendarWorkWeeksResponse': 'CalendarWorkWeeksResponse',
'TaskItemsResponse': 'TaskItemsResponse',
'VbaProjectResponse': 'VbaProjectResponse',
'ProjectRecalculateResponse': 'ProjectRecalculateResponse',
'CalendarResponse': 'CalendarResponse',
'DocumentPropertyResponse': 'DocumentPropertyResponse',
'ResourceItemsResponse': 'ResourceItemsResponse',
'AssignmentsResponse': 'AssignmentsResponse',
'CalendarItemResponse': 'CalendarItemResponse',
'ResourceItemResponse': 'ResourceItemResponse',
'DocumentPropertiesResponse': 'DocumentPropertiesResponse',
'TaskItemResponse': 'TaskItemResponse'
}
def __init__(self, code=None, status=None):
self._code = None
self._status = None
self.discriminator = 'Type'
if code is not None:
self.code = code
if status is not None:
self.status = status
@property
def code(self):
return self._code
@code.setter
def code(self, code):
if code is None:
raise ValueError("Invalid value for `code`, must not be `None`")
self._code = code
@property
def status(self):
return self._status
@status.setter
def status(self, status):
self._status = status
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, AsposeResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f738bfa6b1263c0bb166fb26f531cc3af19dccd5 | 1,706 | py | Python | PYTHON/practice/lru_cache/doubly_linked_list.py | Web-Dev-Collaborative/DS-ALGO-OFFICIAL | 6d7195d33c28a0fe22f12231efffb39f4bf05c97 | [
"Apache-2.0"
] | 71 | 2019-03-05T04:44:48.000Z | 2022-03-24T09:47:48.000Z | PYTHON/practice/lru_cache/doubly_linked_list.py | Web-Dev-Collaborative/DS-ALGO-OFFICIAL | 6d7195d33c28a0fe22f12231efffb39f4bf05c97 | [
"Apache-2.0"
] | null | null | null | PYTHON/practice/lru_cache/doubly_linked_list.py | Web-Dev-Collaborative/DS-ALGO-OFFICIAL | 6d7195d33c28a0fe22f12231efffb39f4bf05c97 | [
"Apache-2.0"
] | 37 | 2019-03-07T05:08:03.000Z | 2022-01-05T11:32:51.000Z | """Each ListNode holds a reference to its previous node
as well as its next node in the List."""
class ListNode:
def __init__(self, value, prev=None, next=None):
self.value = value
self.prev = prev
self.next = next
"""Wrap the given value in a ListNode and insert it
after this node. Note that this node could already
have a next node it is point to."""
def insert_after(self, value):
current_next = self.next
self.next = ListNode(value, self, current_next)
if current_next:
current_next.prev = self.next
"""Wrap the given value in a ListNode and insert it
before this node. Note that this node could already
have a previous node it is point to."""
def insert_before(self, value):
current_prev = self.prev
self.prev = ListNode(value, current_prev, self)
if current_prev:
current_prev.next = self.prev
"""Rearranges this ListNode's previous and next pointers
accordingly, effectively deleting this ListNode."""
def delete(self):
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
"""Our doubly-linked list class. It holds references to
the list's head and tail nodes."""
class DoublyLinkedList:
def __init__(self, node=None):
self.head = node
self.tail = node
self.length = 1 if node is not None else 0
def __len__(self):
return self.length
def add_to_head(self, value):
pass
def remove_from_head(self):
pass
def add_to_tail(self, value):
pass
def remove_from_tail(self):
pass
def move_to_front(self, node):
pass
def move_to_end(self, node):
pass
def delete(self, node):
pass
def get_max(self):
pass | 25.088235 | 58 | 0.685229 | class ListNode:
def __init__(self, value, prev=None, next=None):
self.value = value
self.prev = prev
self.next = next
def insert_after(self, value):
current_next = self.next
self.next = ListNode(value, self, current_next)
if current_next:
current_next.prev = self.next
def insert_before(self, value):
current_prev = self.prev
self.prev = ListNode(value, current_prev, self)
if current_prev:
current_prev.next = self.prev
def delete(self):
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
class DoublyLinkedList:
def __init__(self, node=None):
self.head = node
self.tail = node
self.length = 1 if node is not None else 0
def __len__(self):
return self.length
def add_to_head(self, value):
pass
def remove_from_head(self):
pass
def add_to_tail(self, value):
pass
def remove_from_tail(self):
pass
def move_to_front(self, node):
pass
def move_to_end(self, node):
pass
def delete(self, node):
pass
def get_max(self):
pass | true | true |
f738c00af6e1c6e7598042f41faf3534b5051ca6 | 218 | py | Python | codeforces.com/202A/solution.py | zubtsov/competitive-programming | 919d63130144347d7f6eddcf8f5bc2afb85fddf3 | [
"MIT"
] | null | null | null | codeforces.com/202A/solution.py | zubtsov/competitive-programming | 919d63130144347d7f6eddcf8f5bc2afb85fddf3 | [
"MIT"
] | null | null | null | codeforces.com/202A/solution.py | zubtsov/competitive-programming | 919d63130144347d7f6eddcf8f5bc2afb85fddf3 | [
"MIT"
] | null | null | null | s = input()
count = 1
max_char = s[0]
for i in range(1, len(s)):
if ascii(s[i]) > ascii(max_char):
max_char = s[i]
count = 1
elif s[i] == max_char:
count += 1
print(max_char * count)
| 15.571429 | 37 | 0.522936 | s = input()
count = 1
max_char = s[0]
for i in range(1, len(s)):
if ascii(s[i]) > ascii(max_char):
max_char = s[i]
count = 1
elif s[i] == max_char:
count += 1
print(max_char * count)
| true | true |
f738c0be232c9371760bd3db4e9fc48ae12ce15a | 6,673 | py | Python | zerver/webhooks/splunk/tests.py | TylerPham2000/zulip | 2e7aaba0dde5517b4a55cb0bd782f009be45e3ba | [
"Apache-2.0"
] | 2 | 2020-11-12T12:28:46.000Z | 2020-11-16T11:17:46.000Z | zerver/webhooks/splunk/tests.py | TylerPham2000/zulip | 2e7aaba0dde5517b4a55cb0bd782f009be45e3ba | [
"Apache-2.0"
] | 1 | 2021-08-05T14:46:02.000Z | 2021-08-05T14:46:02.000Z | zerver/webhooks/splunk/tests.py | TylerPham2000/zulip | 2e7aaba0dde5517b4a55cb0bd782f009be45e3ba | [
"Apache-2.0"
] | 1 | 2021-08-05T14:27:13.000Z | 2021-08-05T14:27:13.000Z | from zerver.lib.test_classes import WebhookTestCase
class SplunkHookTests(WebhookTestCase):
STREAM_NAME = "splunk"
URL_TEMPLATE = "/api/v1/external/splunk?api_key={api_key}&stream={stream}"
FIXTURE_DIR_NAME = "splunk"
def test_splunk_search_one_result(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
# define the expected message contents
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
# using fixture named splunk_search_one_result, execute this test
self.check_webhook(
"search_one_result",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_short_search_name(self) -> None:
# don't provide a topic so the search name is used instead
expected_topic = "This search's name isn't that long"
expected_message = """
Splunk alert from saved search:
* **Search**: [This search's name isn't that long](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"short_search_name",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_long_search_name(self) -> None:
# don't provide a topic so the search name is used instead
expected_topic = "this-search's-got-47-words-37-sentences-58-words-we-wanna..."
expected_message = """
Splunk alert from saved search:
* **Search**: [this-search's-got-47-words-37-sentences-58-words-we-wanna-know-details-of-the-search-time-of-the-search-and-any-other-kind-of-thing-you-gotta-say-pertaining-to-and-about-the-search-I-want-to-know-authenticated-user's-name-and-any-other-kind-of-thing-you-gotta-say](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"long_search_name",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_results_link(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](Missing results_link)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_results_link",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_search_name(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [Missing search_name](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_search_name",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_host(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: Missing host
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_host",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_source(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `Missing source`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_source",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_raw(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Missing _raw`
""".strip()
self.check_webhook(
"missing_raw",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
| 39.720238 | 445 | 0.671212 | from zerver.lib.test_classes import WebhookTestCase
class SplunkHookTests(WebhookTestCase):
STREAM_NAME = "splunk"
URL_TEMPLATE = "/api/v1/external/splunk?api_key={api_key}&stream={stream}"
FIXTURE_DIR_NAME = "splunk"
def test_splunk_search_one_result(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"search_one_result",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_short_search_name(self) -> None:
expected_topic = "This search's name isn't that long"
expected_message = """
Splunk alert from saved search:
* **Search**: [This search's name isn't that long](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"short_search_name",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_long_search_name(self) -> None:
# don't provide a topic so the search name is used instead
expected_topic = "this-search's-got-47-words-37-sentences-58-words-we-wanna..."
expected_message = """
Splunk alert from saved search:
* **Search**: [this-search's-got-47-words-37-sentences-58-words-we-wanna-know-details-of-the-search-time-of-the-search-and-any-other-kind-of-thing-you-gotta-say-pertaining-to-and-about-the-search-I-want-to-know-authenticated-user's-name-and-any-other-kind-of-thing-you-gotta-say](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"long_search_name",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_results_link(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](Missing results_link)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_results_link",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_search_name(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [Missing search_name](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_search_name",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_host(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: Missing host
* **Source**: `/var/log/auth.log`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_host",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_source(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `Missing source`
* **Raw**: `Jan 4 11:14:32 myserver sudo: pam_unix(sudo:session): session closed for user root`
""".strip()
self.check_webhook(
"missing_source",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_splunk_missing_raw(self) -> None:
self.url = self.build_webhook_url(topic="New Search Alert")
expected_topic = "New Search Alert"
expected_message = """
Splunk alert from saved search:
* **Search**: [sudo](http://example.com:8000/app/search/search?q=%7Cloadjob%20rt_scheduler__admin__search__sudo_at_1483557185_2.2%20%7C%20head%201%20%7C%20tail%201&earliest=0&latest=now)
* **Host**: myserver
* **Source**: `/var/log/auth.log`
* **Raw**: `Missing _raw`
""".strip()
self.check_webhook(
"missing_raw",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
| true | true |
f738c0cf5e85f6761c0e2399f730cb999ee7c2cc | 391 | py | Python | piawareradar/const_touch.py | hippyau/PiAwareRadar | 6cfe30aea8f7bb7d5977d19aeb01cfbe42788a2d | [
"MIT"
] | 24 | 2015-11-11T17:29:45.000Z | 2021-02-06T17:41:35.000Z | piawareradar/const_touch.py | hippyau/PiAwareRadar | 6cfe30aea8f7bb7d5977d19aeb01cfbe42788a2d | [
"MIT"
] | null | null | null | piawareradar/const_touch.py | hippyau/PiAwareRadar | 6cfe30aea8f7bb7d5977d19aeb01cfbe42788a2d | [
"MIT"
] | 3 | 2017-02-10T03:11:48.000Z | 2021-01-22T14:33:30.000Z | BLACK = (0, 0, 0)
GREEN = (0, 255, 0)
SIZE = (800, 480)
RADARRECT = (165, 5, 470, 470)
CLOSERECT = (760, 20, 20, 20)
SCALEPLUSRECT = (760, 50, 20, 20)
SCALENEGRECT = (730, 50, 20, 20)
TITLEPOS = (10, 10)
FLIGHTDATAPOS = (620, 330)
FLIGHTDATARECT = (FLIGHTDATAPOS[0], FLIGHTDATAPOS[1], 180, 150)
FLIGHTDATATIMEOUT = 30
FONT = "monospace"
FONTSIZE = 16
LINESPACE = 20
FLIGHTDATAREFRESH = 1000
| 24.4375 | 63 | 0.667519 | BLACK = (0, 0, 0)
GREEN = (0, 255, 0)
SIZE = (800, 480)
RADARRECT = (165, 5, 470, 470)
CLOSERECT = (760, 20, 20, 20)
SCALEPLUSRECT = (760, 50, 20, 20)
SCALENEGRECT = (730, 50, 20, 20)
TITLEPOS = (10, 10)
FLIGHTDATAPOS = (620, 330)
FLIGHTDATARECT = (FLIGHTDATAPOS[0], FLIGHTDATAPOS[1], 180, 150)
FLIGHTDATATIMEOUT = 30
FONT = "monospace"
FONTSIZE = 16
LINESPACE = 20
FLIGHTDATAREFRESH = 1000
| true | true |
f738c1c2441bd105783c6c401a65daa59455be64 | 1,861 | py | Python | tests/test_mean_std.py | liuzuxin/RL-Safety-Algorithms | 2575225b1ea8ce12e1e13f7a81f8dda7b4189708 | [
"MIT"
] | 4 | 2021-09-05T17:49:02.000Z | 2021-12-22T03:13:39.000Z | tests/test_mean_std.py | liuzuxin/RL-Safety-Algorithms | 2575225b1ea8ce12e1e13f7a81f8dda7b4189708 | [
"MIT"
] | null | null | null | tests/test_mean_std.py | liuzuxin/RL-Safety-Algorithms | 2575225b1ea8ce12e1e13f7a81f8dda7b4189708 | [
"MIT"
] | 2 | 2021-09-05T17:49:07.000Z | 2021-11-30T17:36:30.000Z | import unittest
import numpy as np
import torch
from rl_safety_algorithms.common.online_mean_std import OnlineMeanStd
import rl_safety_algorithms.common.mpi_tools as mpi_tools
class TestOnlineMeanStd(unittest.TestCase):
""" Testing the non-MPI version.
"""
@staticmethod
def perform_single_pass(rms, input_shape) -> bool:
x = torch.from_numpy(np.random.normal(size=input_shape))
rms(x) # perform one call
return True
@staticmethod
def get_data(M, N, epoch):
"""Returns data matrix of shape MxN."""
np.random.seed(epoch)
# start = 10000 + 4 * epoch
# stop = pid*10000 + M * N + 4 * epoch
data = np.random.normal(size=(M, N))
return data
def test_vector_updates(self):
""" OnlineMeanStd module is updated with a batch of vector inputs,
i.e. inputs of shape M x N.
Note that std dev might differ more than 1e-5 when epochs > 10.
"""
epochs = 20
T = 500
obs_shape = (1, )
# === calculation through online updates
rms = OnlineMeanStd(shape=obs_shape)
for ep in range(epochs):
# shape of batch: T x obs_shape
vector_input = self.get_data(T, obs_shape[0], ep).flatten()
rms.update(vector_input)
rms_mean = rms.mean.numpy()
rms_std = rms.std.numpy()
# ===== calculate ground truths
obs_list = [self.get_data(T, obs_shape[0], ep) for ep in range(epochs)]
obs = np.vstack(obs_list)
gt_mean = np.mean(obs, axis=0)
gt_std = np.std(obs, axis=0)
self.assertTrue(np.allclose(rms_mean, gt_mean))
self.assertTrue(np.allclose(rms_std, gt_std, rtol=1e-2))
self.assertTrue(self.perform_single_pass(rms, obs_shape))
if __name__ == '__main__':
unittest.main()
| 32.086207 | 79 | 0.617947 | import unittest
import numpy as np
import torch
from rl_safety_algorithms.common.online_mean_std import OnlineMeanStd
import rl_safety_algorithms.common.mpi_tools as mpi_tools
class TestOnlineMeanStd(unittest.TestCase):
@staticmethod
def perform_single_pass(rms, input_shape) -> bool:
x = torch.from_numpy(np.random.normal(size=input_shape))
rms(x)
return True
@staticmethod
def get_data(M, N, epoch):
np.random.seed(epoch)
data = np.random.normal(size=(M, N))
return data
def test_vector_updates(self):
epochs = 20
T = 500
obs_shape = (1, )
rms = OnlineMeanStd(shape=obs_shape)
for ep in range(epochs):
vector_input = self.get_data(T, obs_shape[0], ep).flatten()
rms.update(vector_input)
rms_mean = rms.mean.numpy()
rms_std = rms.std.numpy()
obs_list = [self.get_data(T, obs_shape[0], ep) for ep in range(epochs)]
obs = np.vstack(obs_list)
gt_mean = np.mean(obs, axis=0)
gt_std = np.std(obs, axis=0)
self.assertTrue(np.allclose(rms_mean, gt_mean))
self.assertTrue(np.allclose(rms_std, gt_std, rtol=1e-2))
self.assertTrue(self.perform_single_pass(rms, obs_shape))
if __name__ == '__main__':
unittest.main()
| true | true |
f738c2aca32bd7a909ae9cff8505b2039bef1b0d | 900 | py | Python | q2_composition/_impute.py | Jiung-Wen/q2-composition | c543f50173691c539036313a21985e4178e86b14 | [
"BSD-3-Clause"
] | null | null | null | q2_composition/_impute.py | Jiung-Wen/q2-composition | c543f50173691c539036313a21985e4178e86b14 | [
"BSD-3-Clause"
] | null | null | null | q2_composition/_impute.py | Jiung-Wen/q2-composition | c543f50173691c539036313a21985e4178e86b14 | [
"BSD-3-Clause"
] | null | null | null | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2019, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import biom
def add_pseudocount(table: biom.Table,
pseudocount: int = 1) -> biom.Table:
# This is ugly, and it requires a sparse and dense representation to
# be in memory at the same time, but biom.Table.transform only operates
# on non-zero values, so it isn't useful here (as we need to operate on
# all values).
result = biom.Table(
[v + pseudocount
for v in table.iter_data(dense=True, axis='observation')],
table.ids(axis='observation'),
table.ids())
return result
| 37.5 | 78 | 0.551111 |
import biom
def add_pseudocount(table: biom.Table,
pseudocount: int = 1) -> biom.Table:
# all values).
result = biom.Table(
[v + pseudocount
for v in table.iter_data(dense=True, axis='observation')],
table.ids(axis='observation'),
table.ids())
return result
| true | true |
f738c3856f1b160339b9d8cc5f5b7be11c71155a | 3,135 | py | Python | driverapp/models.py | manisharmagarg/nessus-driver | b0aef1a9c6ec5a00036c3040e062a334b1a1ca7f | [
"Apache-2.0"
] | null | null | null | driverapp/models.py | manisharmagarg/nessus-driver | b0aef1a9c6ec5a00036c3040e062a334b1a1ca7f | [
"Apache-2.0"
] | null | null | null | driverapp/models.py | manisharmagarg/nessus-driver | b0aef1a9c6ec5a00036c3040e062a334b1a1ca7f | [
"Apache-2.0"
] | null | null | null | from driverapp import db
import uuid
import datetime
from sqlalchemy import Column, Integer, Text
from sqlalchemy import ForeignKey
# from sqlalchemy.dialects.postgresql import JSON, JSONB
def generate_uuid():
return str(uuid.uuid4())
class Events(db.Model):
id = db.Column(Integer, primary_key=True)
redtree_event_id = db.Column(Integer, nullable=True)
event_status = db.Column(db.String(80), nullable=True)
created = db.Column(db.DateTime, nullable=False,
default=datetime.datetime.utcnow)
def __str__(self):
return self.id
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id': self.id,
'redtree_event_id': self.redtree_event_id,
'event_status': self.event_status
}
class Scan(db.Model):
id = db.Column(db.String(100), primary_key=True, default=generate_uuid)
targets = db.Column(db.Text(), nullable=True)
scan_name = db.Column(db.String(80), nullable=True)
scan_type = db.Column(db.String(80), nullable=True)
nessus_url = db.Column(db.String(80), nullable=True)
nessus_username = db.Column(db.String(80), nullable=True)
nessus_password = db.Column(db.String(80), nullable=True)
status = db.Column(db.String(80), nullable=True)
scan_message = db.Column(db.String(100), nullable=True)
result = db.Column(db.String(200), nullable=True, default="False")
event_id = db.Column(Integer, ForeignKey('events.id'))
process_started = db.Column(db.DateTime, nullable=True)
created = db.Column(db.DateTime, nullable=False,
default=datetime.datetime.utcnow)
def __str__(self):
return self.id
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id': self.id,
'targets': self.targets,
'scan_name': self.scan_name,
'scan_type': self.scan_type,
'nessus_url': self.nessus_url,
'nessus_username': self.nessus_username,
'nessus_password': self.nessus_password,
'status': self.status,
'scan_message': self.scan_message,
'result': self.result,
'event_id': self.event_id,
'process_started': self.process_started,
'created': self.created
}
class ScanEvents(db.Model):
id = db.Column(Integer, primary_key=True)
scan_id = db.Column(db.String(100), ForeignKey('scan.id'))
event_id = db.Column(Integer, ForeignKey('events.id'))
scan_event_history = db.Column(db.Text(), nullable=True)
created = db.Column(db.DateTime, nullable=False,
default=datetime.datetime.utcnow)
def __str__(self):
return self.scan_id
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id': self.id,
'scan_id': self.scan_id,
'event_id': self.event_id,
'scan_event_history': self.scan_event_history
}
| 34.076087 | 75 | 0.635088 | from driverapp import db
import uuid
import datetime
from sqlalchemy import Column, Integer, Text
from sqlalchemy import ForeignKey
def generate_uuid():
return str(uuid.uuid4())
class Events(db.Model):
id = db.Column(Integer, primary_key=True)
redtree_event_id = db.Column(Integer, nullable=True)
event_status = db.Column(db.String(80), nullable=True)
created = db.Column(db.DateTime, nullable=False,
default=datetime.datetime.utcnow)
def __str__(self):
return self.id
@property
def serialize(self):
return {
'id': self.id,
'redtree_event_id': self.redtree_event_id,
'event_status': self.event_status
}
class Scan(db.Model):
id = db.Column(db.String(100), primary_key=True, default=generate_uuid)
targets = db.Column(db.Text(), nullable=True)
scan_name = db.Column(db.String(80), nullable=True)
scan_type = db.Column(db.String(80), nullable=True)
nessus_url = db.Column(db.String(80), nullable=True)
nessus_username = db.Column(db.String(80), nullable=True)
nessus_password = db.Column(db.String(80), nullable=True)
status = db.Column(db.String(80), nullable=True)
scan_message = db.Column(db.String(100), nullable=True)
result = db.Column(db.String(200), nullable=True, default="False")
event_id = db.Column(Integer, ForeignKey('events.id'))
process_started = db.Column(db.DateTime, nullable=True)
created = db.Column(db.DateTime, nullable=False,
default=datetime.datetime.utcnow)
def __str__(self):
return self.id
@property
def serialize(self):
return {
'id': self.id,
'targets': self.targets,
'scan_name': self.scan_name,
'scan_type': self.scan_type,
'nessus_url': self.nessus_url,
'nessus_username': self.nessus_username,
'nessus_password': self.nessus_password,
'status': self.status,
'scan_message': self.scan_message,
'result': self.result,
'event_id': self.event_id,
'process_started': self.process_started,
'created': self.created
}
class ScanEvents(db.Model):
id = db.Column(Integer, primary_key=True)
scan_id = db.Column(db.String(100), ForeignKey('scan.id'))
event_id = db.Column(Integer, ForeignKey('events.id'))
scan_event_history = db.Column(db.Text(), nullable=True)
created = db.Column(db.DateTime, nullable=False,
default=datetime.datetime.utcnow)
def __str__(self):
return self.scan_id
@property
def serialize(self):
return {
'id': self.id,
'scan_id': self.scan_id,
'event_id': self.event_id,
'scan_event_history': self.scan_event_history
}
| true | true |
f738c45d1f5cf19acf9d47da5e8341ee1d71c692 | 570 | py | Python | common/util/updateAmpPhase.py | silky/Qlab | ce4085d3ad5bac7f6056c6b71e4cdfad2d70820c | [
"Apache-2.0"
] | 1 | 2019-06-27T11:33:05.000Z | 2019-06-27T11:33:05.000Z | common/util/updateAmpPhase.py | silky/Qlab | ce4085d3ad5bac7f6056c6b71e4cdfad2d70820c | [
"Apache-2.0"
] | null | null | null | common/util/updateAmpPhase.py | silky/Qlab | ce4085d3ad5bac7f6056c6b71e4cdfad2d70820c | [
"Apache-2.0"
] | null | null | null | import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('physChan', help='physChan')
parser.add_argument('ampFactor', type=float, help='ampFactor')
parser.add_argument('phaseSkew', type=float, help='phaseSkew')
args = parser.parse_args()
sys.path.append(args.pyqlabpath)
from Libraries import channelLib
if args.physChan not in channelLib.channelDict:
sys.exit(1)
channelLib[args.physChan].ampFactor = args.ampFactor
channelLib[args.physChan].phaseSkew = args.phaseSkew | 33.529412 | 66 | 0.796491 | import argparse
import sys, os
parser = argparse.ArgumentParser()
parser.add_argument('pyqlabpath', help='path to PyQLab directory')
parser.add_argument('physChan', help='physChan')
parser.add_argument('ampFactor', type=float, help='ampFactor')
parser.add_argument('phaseSkew', type=float, help='phaseSkew')
args = parser.parse_args()
sys.path.append(args.pyqlabpath)
from Libraries import channelLib
if args.physChan not in channelLib.channelDict:
sys.exit(1)
channelLib[args.physChan].ampFactor = args.ampFactor
channelLib[args.physChan].phaseSkew = args.phaseSkew | true | true |
f738c485df6fda414acd48bfe683ecab9bc1709e | 8,005 | py | Python | desr/sympy_helper.py | tanbur/diffalg | cbebb5db36f4d112ebf6e09ace2bbf9c5cf0c2fb | [
"Apache-2.0"
] | 2 | 2019-04-01T06:43:27.000Z | 2022-01-14T06:38:21.000Z | desr/sympy_helper.py | tanbur/diffalg | cbebb5db36f4d112ebf6e09ace2bbf9c5cf0c2fb | [
"Apache-2.0"
] | 1 | 2021-06-01T22:17:47.000Z | 2021-06-01T22:17:47.000Z | desr/sympy_helper.py | tanbur/diffalg | cbebb5db36f4d112ebf6e09ace2bbf9c5cf0c2fb | [
"Apache-2.0"
] | 1 | 2020-04-06T23:44:22.000Z | 2020-04-06T23:44:22.000Z | """
Created on Fri Dec 26 12:35:16 2014
Helper functions to deal with sympy expressions and equations
Author: Richard Tanburn (richard.tanburn@gmail.com)
"""
import fractions
import re
import sympy
from __builtin__ import isinstance
def is_monomial(expr):
''' Determine whether expr is a monomial
>>> is_monomial(sympy.sympify('a*b**2/c'))
True
>>> is_monomial(sympy.sympify('a*b**2/c + d/e'))
False
>>> is_monomial(sympy.sympify('a*b**2/c + 1'))
False
>>> is_monomial(sympy.sympify('a*(b**2/c + 1)'))
False
'''
_const, _expr = expr.expand().as_coeff_add()
if (_const != 0 and len(_expr)) or (len(_expr) > 1):
return False
return True
def monomial_to_powers(monomial, variables):
''' Given a monomial, return powers wrt some variables
>>> variables = sympy.var('a b c d e')
>>> monomial_to_powers(sympy.sympify('a*b'), variables)
[1, 1, 0, 0, 0]
>>> monomial_to_powers(sympy.sympify('a*b**2/c'), variables)
[1, 2, -1, 0, 0]
>>> monomial_to_powers(sympy.sympify('a*b**2/c + d/e'), variables)
Traceback (most recent call last):
...
ValueError: a*b**2/c + d/e is not a monomial
>>> monomial_to_powers(sympy.sympify('a*b**2/c + 1'), variables)
Traceback (most recent call last):
...
ValueError: a*b**2/c + 1 is not a monomial
'''
# Check we have a monomial
if not is_monomial(monomial):
raise ValueError('{} is not a monomial'.format(monomial))
powers = []
power_dict = monomial.as_powers_dict()
for var in variables:
powers.append(power_dict.get(var, 0))
return powers
def unique_array_stable(array):
''' Given a list of things, return a new list with unique elements with
original order preserved (by first occurence)
>>> print unique_array_stable([1, 3, 5, 4, 7, 4, 2, 1, 9])
[1, 3, 5, 4, 7, 2, 9]
'''
seen = set()
seen_add = seen.add
return [x for x in array if not (x in seen or seen_add(x))]
## Helper functions
def degree(expr):
''' Return the degree of a sympy expression. I.e. the largest number of
variables multiplied together.
NOTE DOES take into account idempotency of binary variables
>>> str_eqns = ['x + y',
... 'x*y*z - 1',
... 'x ** 2 + a*b*c',
... 'x**2 + y',
... 'x',
... 'x*y',]
>>> eqns = str_exprs_to_sympy_eqns(str_eqns)
>>> for e in eqns: print degree(e.lhs - e.rhs)
1
3
3
1
1
2
Check we deal with constants correctly
>>> (degree(0), degree(1), degree(4),
... degree(sympy.S.Zero), degree(sympy.S.One), degree(sympy.sympify(4)))
(0, 0, 0, 0, 0, 0)
'''
if is_constant(expr):
return 0
degree = 0
for term in expr.as_coefficients_dict().keys():
degree = max(degree, len(term.atoms(sympy.Symbol)))
return degree
def is_constant(expr):
''' Determine whether an expression is constant
>>> expr = 'x + 2*y'
>>> is_constant(sympy.sympify(expr))
False
>>> expr = 'x + 5'
>>> is_constant(sympy.sympify(expr))
False
>>> expr = '3'
>>> is_constant(sympy.sympify(expr))
True
>>> expr = '2*x - 4'
>>> is_constant(sympy.sympify(expr))
False
'''
if isinstance(expr, (int, float)):
return True
return len(expr.atoms(sympy.Symbol)) == 0
def is_equation(eqn, check_true=True):
''' Return True if it is an equation rather than a boolean value.
If it is False, raise a ContradictionException. We never want anything
that might be False.
Optionally, we can turn the check off, but THE DEFAULT VALUE SHOULD
ALWAYS BE TRUE. Otherwise bad things will happen.
>>> x, y = sympy.symbols('x y')
>>> eq1 = sympy.Eq(x, y)
>>> eq2 = sympy.Eq(x, x)
>>> eq3 = sympy.Eq(x, y).subs(y, x)
>>> eq4 = sympy.Eq(2*x*y, 2)
>>> is_equation(eq1)
True
>>> is_equation(eq2)
False
>>> is_equation(eq3)
False
>>> is_equation(eq4)
True
Now check that it raises exceptions for the right things
>>> is_equation(0)
False
'''
if sympy.__version__ == '0.7.5':
return isinstance(eqn, sympy.Equality)
elif re.match('1\..*', sympy.__version__):
return isinstance(eqn, sympy.Equality)
else:
return eqn is True
def standardise_equation(eqn):
''' Remove binary squares etc '''
if not is_equation(eqn):
return eqn
eqn = remove_binary_squares_eqn(eqn.expand())
eqn = balance_terms(eqn)
eqn = cancel_constant_factor(eqn)
return eqn
def expressions_to_variables(exprs):
''' Take a list of equations or expressions and return a set of variables
>>> eqn = sympy.Eq(sympy.sympify('x*a + 1'))
>>> expr = sympy.sympify('x + y*z + 2*a^b')
>>> to_test = [expr, eqn]
>>> expressions_to_variables(to_test)
set([x, z, a, b, y])
'''
if len(exprs) == 0:
return set()
if sympy.__version__ == '0.7.5':
assert all(map(lambda x: isinstance(x, sympy.Basic), exprs))
return set.union(*[expr.atoms(sympy.Symbol) for expr in exprs])
def eqns_with_variables(eqns, variables, strict=False):
''' Given a set of atoms, return only equations that have something in
common
>>> x, y, z1, z2 = sympy.symbols('x y z1 z2')
>>> eqns = ['x + y == 1', '2*z1 + 1 == z2', 'x*z1 == 0']
>>> eqns = str_eqns_to_sympy_eqns(eqns)
>>> eqns_with_variables(eqns, [x])
[Eq(x + y - 1, 0), Eq(x*z1, 0)]
>>> eqns_with_variables(eqns, [z1])
[Eq(2*z1 - z2 + 1, 0), Eq(x*z1, 0)]
>>> eqns_with_variables(eqns, [y])
[Eq(x + y - 1, 0)]
>>> eqns_with_variables(eqns, [x], strict=True)
[]
>>> eqns_with_variables(eqns, [x, z1], strict=True)
[Eq(x*z1, 0)]
>>> eqns_with_variables(eqns, [x, y, z1], strict=True)
[Eq(x + y - 1, 0), Eq(x*z1, 0)]
'''
if strict:
return [eqn for eqn in eqns if eqn.atoms(sympy.Symbol).issubset(variables)]
else:
return [eqn for eqn in eqns if len(eqn.atoms(sympy.Symbol).intersection(variables))]
def dict_as_eqns(dict_):
''' Given a dictionary of lhs: rhs, return the sympy Equations in a list
>>> x, y, z = sympy.symbols('x y z')
>>> dict_as_eqns({x: 1, y: z, x*y: 1 - z})
[Eq(x*y, -z + 1), Eq(x, 1), Eq(y, z)]
'''
return [sympy.Eq(lhs, rhs) for lhs, rhs in dict_.iteritems()]
def str_eqns_to_sympy_eqns(str_eqns):
''' Take string equations and sympify
>>> str_eqns = ['x + y == 1', 'x*y*z - 3*a == -3']
>>> eqns = str_eqns_to_sympy_eqns(str_eqns)
>>> for e in eqns: print e
Eq(x + y - 1, 0)
Eq(-3*a + x*y*z + 3, 0)
'''
str_exprs = []
for str_eqn in str_eqns:
str_exprs.append('{} - ({})'.format(*str_eqn.split('==')))
return str_exprs_to_sympy_eqns(str_exprs)
def str_exprs_to_sympy_eqns(str_exprs):
''' Take some strings and return the sympy expressions
>>> str_eqns = ['x + y - 1', 'x*y*z - 3*a + 3', '2*a - 4*b']
>>> eqns = str_exprs_to_sympy_eqns(str_eqns)
>>> for e in eqns: print e
Eq(x + y - 1, 0)
Eq(-3*a + x*y*z + 3, 0)
Eq(2*a - 4*b, 0)
'''
exprs = map(sympy.sympify, str_exprs)
exprs = map(sympy.Eq, exprs)
return exprs
if __name__ == "__main__":
import doctest
doctest.testmod()
| 31.640316 | 93 | 0.540537 |
import fractions
import re
import sympy
from __builtin__ import isinstance
def is_monomial(expr):
_const, _expr = expr.expand().as_coeff_add()
if (_const != 0 and len(_expr)) or (len(_expr) > 1):
return False
return True
def monomial_to_powers(monomial, variables):
if not is_monomial(monomial):
raise ValueError('{} is not a monomial'.format(monomial))
powers = []
power_dict = monomial.as_powers_dict()
for var in variables:
powers.append(power_dict.get(var, 0))
return powers
def unique_array_stable(array):
seen = set()
seen_add = seen.add
return [x for x in array if not (x in seen or seen_add(x))]
:
if is_constant(expr):
return 0
degree = 0
for term in expr.as_coefficients_dict().keys():
degree = max(degree, len(term.atoms(sympy.Symbol)))
return degree
def is_constant(expr):
if isinstance(expr, (int, float)):
return True
return len(expr.atoms(sympy.Symbol)) == 0
def is_equation(eqn, check_true=True):
if sympy.__version__ == '0.7.5':
return isinstance(eqn, sympy.Equality)
elif re.match('1\..*', sympy.__version__):
return isinstance(eqn, sympy.Equality)
else:
return eqn is True
def standardise_equation(eqn):
if not is_equation(eqn):
return eqn
eqn = remove_binary_squares_eqn(eqn.expand())
eqn = balance_terms(eqn)
eqn = cancel_constant_factor(eqn)
return eqn
def expressions_to_variables(exprs):
if len(exprs) == 0:
return set()
if sympy.__version__ == '0.7.5':
assert all(map(lambda x: isinstance(x, sympy.Basic), exprs))
return set.union(*[expr.atoms(sympy.Symbol) for expr in exprs])
def eqns_with_variables(eqns, variables, strict=False):
if strict:
return [eqn for eqn in eqns if eqn.atoms(sympy.Symbol).issubset(variables)]
else:
return [eqn for eqn in eqns if len(eqn.atoms(sympy.Symbol).intersection(variables))]
def dict_as_eqns(dict_):
return [sympy.Eq(lhs, rhs) for lhs, rhs in dict_.iteritems()]
def str_eqns_to_sympy_eqns(str_eqns):
str_exprs = []
for str_eqn in str_eqns:
str_exprs.append('{} - ({})'.format(*str_eqn.split('==')))
return str_exprs_to_sympy_eqns(str_exprs)
def str_exprs_to_sympy_eqns(str_exprs):
exprs = map(sympy.sympify, str_exprs)
exprs = map(sympy.Eq, exprs)
return exprs
if __name__ == "__main__":
import doctest
doctest.testmod()
| true | true |
f738c54d1b3e71e3312699678842c098ea8f797e | 3,481 | py | Python | src/frameworks/detrac_torch/feature_composer.py | abdelsamea/DeTraC | 2c94d55908285fc9cbb24086da63078ee917525a | [
"MIT"
] | 1 | 2020-09-17T14:17:50.000Z | 2020-09-17T14:17:50.000Z | src/frameworks/detrac_torch/feature_composer.py | arkkhanu/DeTraC_COVId19 | ab03719b49a1a048f74f08600a6670f6757bbe60 | [
"MIT"
] | null | null | null | src/frameworks/detrac_torch/feature_composer.py | arkkhanu/DeTraC_COVId19 | ab03719b49a1a048f74f08600a6670f6757bbe60 | [
"MIT"
] | 1 | 2021-04-21T15:04:49.000Z | 2021-04-21T15:04:49.000Z | import tensorflow as tf
from sklearn.metrics import confusion_matrix
import numpy as np
from tools.preprocessing import preprocess_images, preprocess_single_image
from tools.kfold import KFold_cross_validation_split
from tools.extraction_and_metrics import extract_features, compute_confusion_matrix
from .network import Net
import torchvision.models as models
import torch
import os
import cv2
# Feature composer training
def train_feature_composer(
composed_dataset_path: str,
epochs: int,
batch_size: int,
num_classes: int,
folds: int,
lr:float,
cuda: bool,
ckpt_dir: str
):
"""
Feature extractor training.
params:
<string> composed_dataset_path
<int> epochs
<int> batch_size
<int> num_classes
<int> folds: Number of folds for KFold cross validation
<float> lr: Learning rate
<bool> cuda: Whether to use GPU or not
<string> ckpt_dir: Model's location
"""
# Preprocess images, returning the classes, features and labels
class_names, x, y = preprocess_images(
dataset_path=composed_dataset_path,
width=224,
height=224,
num_classes=num_classes,
framework="torch",
imagenet=True
)
# Split data
X_train, X_test, Y_train, Y_test = KFold_cross_validation_split(
features=x,
labels=y,
n_splits=folds
)
# Normalize
X_train /= 255
X_test /= 255
# Instantiate model
net = Net(
models.vgg16(pretrained=True),
num_classes=num_classes,
lr=lr,
cuda=cuda,
mode="feature_composer",
ckpt_dir=ckpt_dir,
labels=class_names
)
# Train model
net.fit(
X_train,
Y_train,
X_test,
Y_test,
epochs,
batch_size,
resume=False
)
# Confusion matrix
compute_confusion_matrix(
y_true=Y_test,
y_pred=net.infer(X_test),
framework="torch",
mode="feature_composer",
num_classes = num_classes // 2
)
# Inference
def infer(
ckpt_dir: str,
ckpt_name: str,
input_image: str
) -> dict:
"""
Main inference method.
params:
<string> ckpt_dir: Saved model's directory
<string> ckpt_name: Saved model's name
<string> input_image: Image path
returns:
<dict> Dictionary containing the predictions with their levels of confidence.
E.g.: {
COVID19_1:0.10
COVID19_2:0.15
...
}
"""
ckpt_path = os.path.join(ckpt_dir, ckpt_name)
num_classes = torch.load(ckpt_path, map_location=lambda storage, loc: storage)["num_classes"]
# Instantiate model
net = Net(
models.vgg16(pretrained=True),
num_classes=num_classes,
mode="feature_composer",
ckpt_dir=ckpt_dir
)
# Load model
net.load_model_for_inference(os.path.join(ckpt_dir, ckpt_name))
# Check if inputed file is an image.
assert input_image.lower().endswith("png") or input_image.lower().endswith("jpg") or input_image.lower().endswith("jpeg")
# Preprocess
img = preprocess_single_image(
img=input_image,
width=224,
height=224,
imagenet=True,
framework="torch"
)
# Return prediction
return net.infer(img, ckpt_path = os.path.join(ckpt_dir, ckpt_name), use_labels=True)
| 24.173611 | 125 | 0.625395 | import tensorflow as tf
from sklearn.metrics import confusion_matrix
import numpy as np
from tools.preprocessing import preprocess_images, preprocess_single_image
from tools.kfold import KFold_cross_validation_split
from tools.extraction_and_metrics import extract_features, compute_confusion_matrix
from .network import Net
import torchvision.models as models
import torch
import os
import cv2
def train_feature_composer(
composed_dataset_path: str,
epochs: int,
batch_size: int,
num_classes: int,
folds: int,
lr:float,
cuda: bool,
ckpt_dir: str
):
class_names, x, y = preprocess_images(
dataset_path=composed_dataset_path,
width=224,
height=224,
num_classes=num_classes,
framework="torch",
imagenet=True
)
X_train, X_test, Y_train, Y_test = KFold_cross_validation_split(
features=x,
labels=y,
n_splits=folds
)
X_train /= 255
X_test /= 255
net = Net(
models.vgg16(pretrained=True),
num_classes=num_classes,
lr=lr,
cuda=cuda,
mode="feature_composer",
ckpt_dir=ckpt_dir,
labels=class_names
)
net.fit(
X_train,
Y_train,
X_test,
Y_test,
epochs,
batch_size,
resume=False
)
compute_confusion_matrix(
y_true=Y_test,
y_pred=net.infer(X_test),
framework="torch",
mode="feature_composer",
num_classes = num_classes // 2
)
def infer(
ckpt_dir: str,
ckpt_name: str,
input_image: str
) -> dict:
ckpt_path = os.path.join(ckpt_dir, ckpt_name)
num_classes = torch.load(ckpt_path, map_location=lambda storage, loc: storage)["num_classes"]
net = Net(
models.vgg16(pretrained=True),
num_classes=num_classes,
mode="feature_composer",
ckpt_dir=ckpt_dir
)
net.load_model_for_inference(os.path.join(ckpt_dir, ckpt_name))
assert input_image.lower().endswith("png") or input_image.lower().endswith("jpg") or input_image.lower().endswith("jpeg")
img = preprocess_single_image(
img=input_image,
width=224,
height=224,
imagenet=True,
framework="torch"
)
return net.infer(img, ckpt_path = os.path.join(ckpt_dir, ckpt_name), use_labels=True)
| true | true |
f738c609edefe08595c13dc1da0d70448f547242 | 1,196 | py | Python | metric.py | rangsimanketkaew/learning-to-smell | 17021a82f7fcdda00536a906dd8dc64cb5663261 | [
"MIT"
] | 1 | 2021-01-01T13:11:39.000Z | 2021-01-01T13:11:39.000Z | metric.py | rangsimanketkaew/learning-to-smell | 17021a82f7fcdda00536a906dd8dc64cb5663261 | [
"MIT"
] | null | null | null | metric.py | rangsimanketkaew/learning-to-smell | 17021a82f7fcdda00536a906dd8dc64cb5663261 | [
"MIT"
] | null | null | null | import tensorflow as tf
# from tensorflow.python.framework.ops import disable_eager_execution
# disable_eager_execution()
from tensorflow.keras import backend as K
def jaccard_tensorflow(y_true, y_pred):
"""Jaccard score of Tensor in tensorflow for graph mode.
"""
intersection = tf.sets.intersection(y_true[None:], y_pred[None:])
intersection = tf.sparse.to_dense(intersection)[0]
union = tf.sets.union(y_true[None:], y_pred[None:])
union = tf.sparse.to_dense(union)[0]
return float(len(intersection) / len(union))
def jaccard_tensorflow_eager(y_true, y_pred):
"""Jaccard score with built-in function in tensorflow in eager mode.
"""
set1 = set(y_true.numpy())
set2 = set(y_pred.numpy())
return float((len(set1.intersection(set2))) / (len(set1.union(set2))))
def jaccard_from_keras_cont(y_true, y_pred):
"""Jaccard score for keras.
Taken directly from https://github.com/keras-team/keras-contrib/blob/master/keras_contrib/losses/jaccard.py
"""
intersection = K.sum(K.abs(y_true * y_pred), axis=-1)
sum_ = K.sum(K.abs(y_true) + K.abs(y_pred), axis=-1)
jac = (intersection) / (sum_ - intersection)
return (1 - jac)
| 36.242424 | 111 | 0.704013 | import tensorflow as tf
from tensorflow.keras import backend as K
def jaccard_tensorflow(y_true, y_pred):
intersection = tf.sets.intersection(y_true[None:], y_pred[None:])
intersection = tf.sparse.to_dense(intersection)[0]
union = tf.sets.union(y_true[None:], y_pred[None:])
union = tf.sparse.to_dense(union)[0]
return float(len(intersection) / len(union))
def jaccard_tensorflow_eager(y_true, y_pred):
set1 = set(y_true.numpy())
set2 = set(y_pred.numpy())
return float((len(set1.intersection(set2))) / (len(set1.union(set2))))
def jaccard_from_keras_cont(y_true, y_pred):
intersection = K.sum(K.abs(y_true * y_pred), axis=-1)
sum_ = K.sum(K.abs(y_true) + K.abs(y_pred), axis=-1)
jac = (intersection) / (sum_ - intersection)
return (1 - jac)
| true | true |
f738c6a9f21e18499d42ab07ca78ccb41d88c010 | 1,673 | py | Python | code/plot_wind.py | faver2014/InertialNav_Learn | 58a0b6db95918e037ed6d08e5d2c8ba2ce388554 | [
"BSD-3-Clause"
] | 2 | 2019-09-01T13:12:32.000Z | 2019-10-06T13:02:31.000Z | code/plot_wind.py | faver2014/InertialNav_Learn | 58a0b6db95918e037ed6d08e5d2c8ba2ce388554 | [
"BSD-3-Clause"
] | null | null | null | code/plot_wind.py | faver2014/InertialNav_Learn | 58a0b6db95918e037ed6d08e5d2c8ba2ce388554 | [
"BSD-3-Clause"
] | 1 | 2020-03-08T18:30:10.000Z | 2020-03-08T18:30:10.000Z | #!/bin/python
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
import numpy as np
import math
# State vector:
# 0-3: quaternions (q0, q1, q2, q3)
# 4-6: Velocity - m/sec (North, East, Down)
# 7-9: Position - m (North, East, Down)
# 10-12: Delta Angle bias - rad (X,Y,Z)
# 13: Accel offset
# 14-15: Wind Vector - m/sec (North,East)
# 16-18: Earth Magnetic Field Vector - milligauss (North, East, Down)
# 19-21: Body Magnetic Field Vector - milligauss (X,Y,Z)
# 22: Terrain
try:
data = np.genfromtxt('StateDataOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'q1', 'q2', 'q3', 'q4', 'Vn', 'Ve', 'Vd', 'Pn', 'Pe', 'Pd',
'Bx', 'By', 'Bz', 'Aoff', 'Wn', 'We', 'Mn', 'Me', 'Md', 'Mbn', 'Mbe', 'Mbd', 'dist'])
except ValueError:
try:
data = np.genfromtxt('StateDataOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'q1', 'q2', 'q3', 'q4', 'Vn', 'Ve', 'Vd', 'Pn', 'Pe', 'Pd',
'Bx', 'By', 'Bz', 'Aoff', 'Wn', 'We', 'Mn', 'Me', 'Md', 'Mbn', 'Mbe', 'Mbd'])
except ValueError:
data = np.genfromtxt('StateDataOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'q1', 'q2', 'q3', 'q4', 'Vn', 'Ve', 'Vd', 'Pn', 'Pe', 'Pd',
'Bx', 'By', 'Bz', 'Wn', 'We', 'Mn', 'Me', 'Md', 'Mbn', 'Mbe', 'Mbd'])
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax1.set_title("Wind Velocity")
ax1.set_xlabel('time (s)')
ax1.set_ylabel('Wind North')
ax1.plot(data['time'], data['Wn'], color='r', label='Wind N')
ax2 = fig.add_subplot(212)
ax2.set_xlabel('time (s)')
ax2.set_ylabel('Wind East')
ax2.plot(data['time'], data['We'], color='g', label='Wind E')
plt.show() | 34.854167 | 92 | 0.595935 |
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
import numpy as np
import math
try:
data = np.genfromtxt('StateDataOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'q1', 'q2', 'q3', 'q4', 'Vn', 'Ve', 'Vd', 'Pn', 'Pe', 'Pd',
'Bx', 'By', 'Bz', 'Aoff', 'Wn', 'We', 'Mn', 'Me', 'Md', 'Mbn', 'Mbe', 'Mbd', 'dist'])
except ValueError:
try:
data = np.genfromtxt('StateDataOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'q1', 'q2', 'q3', 'q4', 'Vn', 'Ve', 'Vd', 'Pn', 'Pe', 'Pd',
'Bx', 'By', 'Bz', 'Aoff', 'Wn', 'We', 'Mn', 'Me', 'Md', 'Mbn', 'Mbe', 'Mbd'])
except ValueError:
data = np.genfromtxt('StateDataOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'q1', 'q2', 'q3', 'q4', 'Vn', 'Ve', 'Vd', 'Pn', 'Pe', 'Pd',
'Bx', 'By', 'Bz', 'Wn', 'We', 'Mn', 'Me', 'Md', 'Mbn', 'Mbe', 'Mbd'])
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax1.set_title("Wind Velocity")
ax1.set_xlabel('time (s)')
ax1.set_ylabel('Wind North')
ax1.plot(data['time'], data['Wn'], color='r', label='Wind N')
ax2 = fig.add_subplot(212)
ax2.set_xlabel('time (s)')
ax2.set_ylabel('Wind East')
ax2.plot(data['time'], data['We'], color='g', label='Wind E')
plt.show() | true | true |
f738c77366bc2eac6bc9b6f0231ea2a809e38aa3 | 1,595 | py | Python | 0221/solution.py | yuyan1991/leetcode | eec7874b513318a27af7517663473c6d985edf70 | [
"MIT"
] | null | null | null | 0221/solution.py | yuyan1991/leetcode | eec7874b513318a27af7517663473c6d985edf70 | [
"MIT"
] | null | null | null | 0221/solution.py | yuyan1991/leetcode | eec7874b513318a27af7517663473c6d985edf70 | [
"MIT"
] | null | null | null | class Solution:
def maximalSquare(self, matrix: List[List[str]]) -> int:
n = len(matrix)
m = len(matrix[0])
sum = [[0 for i in range(m)] for i in range(n)]
if matrix[0][0] == '1':
sum[0][0] = 1
else:
sum[0][0] = 0
for i in range(1, m):
num = 0
if matrix[0][i] == '1':
num = 1
sum[0][i] = sum[0][i - 1] + num
for i in range(1, n):
num = 0
if matrix[i][0] == '1':
num = 1
sum[i][0] = sum[i - 1][0] + num
for i in range(1, n):
for j in range(1, m):
num = 0
if matrix[i][j] == '1':
num = 1
sum[i][j] = sum[i][j - 1] + sum[i - 1][j] - sum [i - 1][j - 1] + num
foundSize = 0
for i in range(n):
for j in range(m):
maxSize = min(i + 1, j + 1)
cur = 0
for k in range(foundSize + 1, maxSize + 1):
topSum = leftSum = leftTopSum = 0
if i >= k:
topSum = sum[i - k][j]
if j >= k:
leftSum = sum[i][j - k]
if i >= k and j >= k:
leftTopSum = sum[i - k][j - k]
if sum[i][j] - topSum - leftSum + leftTopSum == k * k:
cur = k
else:
break
foundSize = max(foundSize, cur)
return foundSize * foundSize | 35.444444 | 84 | 0.344828 | class Solution:
def maximalSquare(self, matrix: List[List[str]]) -> int:
n = len(matrix)
m = len(matrix[0])
sum = [[0 for i in range(m)] for i in range(n)]
if matrix[0][0] == '1':
sum[0][0] = 1
else:
sum[0][0] = 0
for i in range(1, m):
num = 0
if matrix[0][i] == '1':
num = 1
sum[0][i] = sum[0][i - 1] + num
for i in range(1, n):
num = 0
if matrix[i][0] == '1':
num = 1
sum[i][0] = sum[i - 1][0] + num
for i in range(1, n):
for j in range(1, m):
num = 0
if matrix[i][j] == '1':
num = 1
sum[i][j] = sum[i][j - 1] + sum[i - 1][j] - sum [i - 1][j - 1] + num
foundSize = 0
for i in range(n):
for j in range(m):
maxSize = min(i + 1, j + 1)
cur = 0
for k in range(foundSize + 1, maxSize + 1):
topSum = leftSum = leftTopSum = 0
if i >= k:
topSum = sum[i - k][j]
if j >= k:
leftSum = sum[i][j - k]
if i >= k and j >= k:
leftTopSum = sum[i - k][j - k]
if sum[i][j] - topSum - leftSum + leftTopSum == k * k:
cur = k
else:
break
foundSize = max(foundSize, cur)
return foundSize * foundSize | true | true |
f738c775e314127c27326ce6ca14c2f665ce477e | 5,336 | py | Python | src/petronia/base/events/component_events.py | groboclown/petronia | 486338023d19cee989e92f0c5692680f1a37811f | [
"MIT"
] | 19 | 2017-06-21T10:28:24.000Z | 2021-12-31T11:49:28.000Z | src/petronia/base/events/component_events.py | groboclown/petronia | 486338023d19cee989e92f0c5692680f1a37811f | [
"MIT"
] | 10 | 2016-11-11T18:57:57.000Z | 2021-02-01T15:33:43.000Z | src/petronia/base/events/component_events.py | groboclown/petronia | 486338023d19cee989e92f0c5692680f1a37811f | [
"MIT"
] | 3 | 2017-09-17T03:29:35.000Z | 2019-06-03T10:43:08.000Z |
"""
The events around component lifecycle creation.
"""
from typing import Generic
from ..internal_.identity_types import (
ParticipantId, ComponentId,
)
from ..internal_.bus_types import (
EventBus, EventId, EventCallback,
ListenerSetup,
)
from ..util.memory import T
from ..util.messages import UserMessage
# ---------------------------------------------------------------------------
# Note: not "core".
EVENT_ID_REQUEST_NEW_COMPONENT = EventId('petronia.registrar/request-new-component')
class RequestNewComponentEvent(Generic[T]):
"""
Asks the component factory to create a new instance. The target of the
event is the component factory, which will be called with the request_id
to allow the called-back target to know which component was created.
"""
__slots__ = ('_obj', '_target_id', '_request_id',)
def __init__(
self, construction_obj: T,
callback_target_id: ParticipantId, request_id: int
) -> None:
self._obj = construction_obj
self._target_id = callback_target_id
self._request_id = request_id
@property
def construction_obj(self) -> T:
"""Object for the category's factory to create the component."""
return self._obj
@property
def callback_target_id(self) -> ParticipantId:
"""
The target that will be notified of the created component.
"""
return self._target_id
@property
def request_id(self) -> int:
"""
An identifier that tells the target which category creation
this corresponds to.
"""
return self._request_id
def as_request_new_component_listener(
callback: EventCallback[RequestNewComponentEvent[T]]
) -> ListenerSetup[RequestNewComponentEvent[T]]:
"""Listener setup for RequestNewComponentEvent"""
return (EVENT_ID_REQUEST_NEW_COMPONENT, callback,)
def send_request_new_component(
bus: EventBus, category_target_id: ParticipantId, construction_obj: T,
callback_target_id: ParticipantId, request_id: int
) -> None:
"""
Request for a new instance of a category.
"""
bus.trigger(
EVENT_ID_REQUEST_NEW_COMPONENT,
category_target_id,
RequestNewComponentEvent(
construction_obj, callback_target_id, request_id
)
)
# ---------------------------------------------------------------------------
EVENT_ID_COMPONENT_CREATED = EventId('petronia.registrar/component-created')
class ComponentCreatedEvent:
"""
Reports that a component was created.
"""
__slots__ = ('__request_id', '__created_id')
def __init__(self, created_id: ComponentId, request_id: int):
self.__created_id = created_id
self.__request_id = request_id
@property
def created_id(self) -> ComponentId:
"""ID of the component created by the request."""
return self.__created_id
@property
def request_id(self) -> int:
"""Request ID that initiated the creation. Specific to the creator."""
return self.__request_id
def as_component_created_listener(
callback: EventCallback[ComponentCreatedEvent]
) -> ListenerSetup[ComponentCreatedEvent]:
"""Listener setup for ComponentCreatedEvent"""
return (EVENT_ID_COMPONENT_CREATED, callback,)
def send_component_created_event(
bus: EventBus,
request_event: RequestNewComponentEvent[T],
created_id: ComponentId,
) -> None:
bus.trigger(
EVENT_ID_COMPONENT_CREATED,
request_event.callback_target_id,
ComponentCreatedEvent(created_id, request_event.request_id)
)
# ---------------------------------------------------------------------------
EVENT_ID_COMPONENT_CREATION_FAILED = EventId('petronia.registrar/component-create-failed')
class ComponentCreationFailedEvent:
"""
Reports that a component creation attempt failed.
"""
__slots__ = ('__request_id', '__category', '__error_msg',)
def __init__(
self, category: str, request_id: int,
error_msg: UserMessage
):
self.__request_id = request_id
self.__category = category
self.__error_msg = error_msg
@property
def request_id(self) -> int:
"""Request ID that initiated the creation. Specific to the creator."""
return self.__request_id
@property
def category(self) -> str:
"""The requested category to create."""
return self.__category
@property
def error_message(self) -> UserMessage:
"""The description of the error."""
return self.__error_msg
def as_component_creation_failed_listener(
callback: EventCallback[ComponentCreationFailedEvent]
) -> ListenerSetup[ComponentCreationFailedEvent]:
"""Listener setup for ComponentCreationFailedEvent"""
return (EVENT_ID_COMPONENT_CREATION_FAILED, callback,)
def send_component_creation_failed_event(
bus: EventBus,
request_event: RequestNewComponentEvent[T],
category: str,
error_message: UserMessage
) -> None:
bus.trigger(
EVENT_ID_COMPONENT_CREATION_FAILED,
request_event.callback_target_id,
ComponentCreationFailedEvent(
category, request_event.request_id,
error_message
)
)
| 29 | 90 | 0.656859 |
from typing import Generic
from ..internal_.identity_types import (
ParticipantId, ComponentId,
)
from ..internal_.bus_types import (
EventBus, EventId, EventCallback,
ListenerSetup,
)
from ..util.memory import T
from ..util.messages import UserMessage
EVENT_ID_REQUEST_NEW_COMPONENT = EventId('petronia.registrar/request-new-component')
class RequestNewComponentEvent(Generic[T]):
__slots__ = ('_obj', '_target_id', '_request_id',)
def __init__(
self, construction_obj: T,
callback_target_id: ParticipantId, request_id: int
) -> None:
self._obj = construction_obj
self._target_id = callback_target_id
self._request_id = request_id
@property
def construction_obj(self) -> T:
return self._obj
@property
def callback_target_id(self) -> ParticipantId:
return self._target_id
@property
def request_id(self) -> int:
return self._request_id
def as_request_new_component_listener(
callback: EventCallback[RequestNewComponentEvent[T]]
) -> ListenerSetup[RequestNewComponentEvent[T]]:
return (EVENT_ID_REQUEST_NEW_COMPONENT, callback,)
def send_request_new_component(
bus: EventBus, category_target_id: ParticipantId, construction_obj: T,
callback_target_id: ParticipantId, request_id: int
) -> None:
bus.trigger(
EVENT_ID_REQUEST_NEW_COMPONENT,
category_target_id,
RequestNewComponentEvent(
construction_obj, callback_target_id, request_id
)
)
EVENT_ID_COMPONENT_CREATED = EventId('petronia.registrar/component-created')
class ComponentCreatedEvent:
__slots__ = ('__request_id', '__created_id')
def __init__(self, created_id: ComponentId, request_id: int):
self.__created_id = created_id
self.__request_id = request_id
@property
def created_id(self) -> ComponentId:
return self.__created_id
@property
def request_id(self) -> int:
return self.__request_id
def as_component_created_listener(
callback: EventCallback[ComponentCreatedEvent]
) -> ListenerSetup[ComponentCreatedEvent]:
return (EVENT_ID_COMPONENT_CREATED, callback,)
def send_component_created_event(
bus: EventBus,
request_event: RequestNewComponentEvent[T],
created_id: ComponentId,
) -> None:
bus.trigger(
EVENT_ID_COMPONENT_CREATED,
request_event.callback_target_id,
ComponentCreatedEvent(created_id, request_event.request_id)
)
EVENT_ID_COMPONENT_CREATION_FAILED = EventId('petronia.registrar/component-create-failed')
class ComponentCreationFailedEvent:
__slots__ = ('__request_id', '__category', '__error_msg',)
def __init__(
self, category: str, request_id: int,
error_msg: UserMessage
):
self.__request_id = request_id
self.__category = category
self.__error_msg = error_msg
@property
def request_id(self) -> int:
return self.__request_id
@property
def category(self) -> str:
return self.__category
@property
def error_message(self) -> UserMessage:
return self.__error_msg
def as_component_creation_failed_listener(
callback: EventCallback[ComponentCreationFailedEvent]
) -> ListenerSetup[ComponentCreationFailedEvent]:
return (EVENT_ID_COMPONENT_CREATION_FAILED, callback,)
def send_component_creation_failed_event(
bus: EventBus,
request_event: RequestNewComponentEvent[T],
category: str,
error_message: UserMessage
) -> None:
bus.trigger(
EVENT_ID_COMPONENT_CREATION_FAILED,
request_event.callback_target_id,
ComponentCreationFailedEvent(
category, request_event.request_id,
error_message
)
)
| true | true |
f738c8435d3fbed95a6e9687a5bbe8f05cb05aab | 4,476 | py | Python | __init__.py | niekvdg/domoticz_skill | 8b6ee046350c6ec14337fb1dd6c880b19acb1adb | [
"MIT"
] | null | null | null | __init__.py | niekvdg/domoticz_skill | 8b6ee046350c6ec14337fb1dd6c880b19acb1adb | [
"MIT"
] | null | null | null | __init__.py | niekvdg/domoticz_skill | 8b6ee046350c6ec14337fb1dd6c880b19acb1adb | [
"MIT"
] | null | null | null | # Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
from adapt.intent import IntentBuilder
from mycroft.skills.core import MycroftSkill
from mycroft.util.log import getLogger
from os.path import dirname, abspath
from .Domoticz import Domoticz
import sys
import re
__author__ = 'mTreussart'
sys.path.append(abspath(dirname(__file__)))
LOGGER = getLogger(__name__)
class DomoticzSkill(MycroftSkill):
def __init__(self):
super(DomoticzSkill, self).__init__(name="DomoticzSkill")
def initialize(self):
domoticz_switch_intent = IntentBuilder("SwitchIntent")\
.optionally("TurnKeyword")\
.require("StateKeyword")\
.require("WhatKeyword")\
.require("WhereKeyword").build()
self.register_intent(domoticz_switch_intent,
self.handle_domoticz_switch_intent)
domoticz_infos_intent = IntentBuilder("InfosIntent")\
.require("InfosKeyword")\
.require("WhatKeyword")\
.optionally("WhereKeyword")\
.optionally("StateKeyword").build()
self.register_intent(domoticz_infos_intent,
self.handle_domoticz_infos_intent)
def handle_domoticz_switch_intent(self, message):
domoticz = Domoticz(
self.settings.get("hostname"),
self.settings.get("port"),
self.settings.get("protocol"),
self.settings.get("authentication"),
self.settings.get("username"),
self.settings.get("password"),
self.settings.get("feedback"))
state = message.data.get("StateKeyword")
what = message.data.get("WhatKeyword")
where = message.data.get("WhereKeyword")
action = message.data.get("TurnKeyword")
data = {
'what': what,
'where': where
}
LOGGER.debug("message : " + str(message.data))
response = domoticz.switch(state, what, where, action)
edng = re.compile(str(state).title(), re.I)
ending = "ed"
if edng.search('on') or edng.search('off'):
ending = ""
if response is None:
self.speak_dialog("NotFound", data)
elif response is 0:
self.speak("The " + str(what) + " is already " + str(state).title() + ending)
elif response is 1:
self.speak("The " + str(what) + " can not be operated with " + str(state).title())
elif self.settings.get("feedback"):
self.speak("The " + str(where) + " " + str(what) + " is turned " + str(state).title())
def handle_domoticz_infos_intent(self, message):
what = message.data.get("WhatKeyword")
where = message.data.get("WhereKeyword")
domoticz = Domoticz(
self.settings.get("hostname"),
self.settings.get("port"),
self.settings.get("protocol"),
self.settings.get("authentication"),
self.settings.get("username"),
self.settings.get("password"),
self.settings.get("feedback"))
data = {
'what': what,
'where': where
}
response = domoticz.get(what, where)
data = str(response['Data'])
if data is None:
if where is None:
self.speak_dialog("NotFoundShort", data)
else:
self.speak_dialog("NotFound", data)
if re.search('\d\s+C', data):
data = data.replace(' C', ' degrees celsius')
if re.search('\d\s+F', data):
data = data.replace(' F', ' degrees fahrenheit')
data = "It's " + data
LOGGER.debug("result : " + str(data))
self.speak(str(data))
def stop(self):
pass
def create_skill():
return DomoticzSkill()
| 36.390244 | 98 | 0.604781 |
from adapt.intent import IntentBuilder
from mycroft.skills.core import MycroftSkill
from mycroft.util.log import getLogger
from os.path import dirname, abspath
from .Domoticz import Domoticz
import sys
import re
__author__ = 'mTreussart'
sys.path.append(abspath(dirname(__file__)))
LOGGER = getLogger(__name__)
class DomoticzSkill(MycroftSkill):
def __init__(self):
super(DomoticzSkill, self).__init__(name="DomoticzSkill")
def initialize(self):
domoticz_switch_intent = IntentBuilder("SwitchIntent")\
.optionally("TurnKeyword")\
.require("StateKeyword")\
.require("WhatKeyword")\
.require("WhereKeyword").build()
self.register_intent(domoticz_switch_intent,
self.handle_domoticz_switch_intent)
domoticz_infos_intent = IntentBuilder("InfosIntent")\
.require("InfosKeyword")\
.require("WhatKeyword")\
.optionally("WhereKeyword")\
.optionally("StateKeyword").build()
self.register_intent(domoticz_infos_intent,
self.handle_domoticz_infos_intent)
def handle_domoticz_switch_intent(self, message):
domoticz = Domoticz(
self.settings.get("hostname"),
self.settings.get("port"),
self.settings.get("protocol"),
self.settings.get("authentication"),
self.settings.get("username"),
self.settings.get("password"),
self.settings.get("feedback"))
state = message.data.get("StateKeyword")
what = message.data.get("WhatKeyword")
where = message.data.get("WhereKeyword")
action = message.data.get("TurnKeyword")
data = {
'what': what,
'where': where
}
LOGGER.debug("message : " + str(message.data))
response = domoticz.switch(state, what, where, action)
edng = re.compile(str(state).title(), re.I)
ending = "ed"
if edng.search('on') or edng.search('off'):
ending = ""
if response is None:
self.speak_dialog("NotFound", data)
elif response is 0:
self.speak("The " + str(what) + " is already " + str(state).title() + ending)
elif response is 1:
self.speak("The " + str(what) + " can not be operated with " + str(state).title())
elif self.settings.get("feedback"):
self.speak("The " + str(where) + " " + str(what) + " is turned " + str(state).title())
def handle_domoticz_infos_intent(self, message):
what = message.data.get("WhatKeyword")
where = message.data.get("WhereKeyword")
domoticz = Domoticz(
self.settings.get("hostname"),
self.settings.get("port"),
self.settings.get("protocol"),
self.settings.get("authentication"),
self.settings.get("username"),
self.settings.get("password"),
self.settings.get("feedback"))
data = {
'what': what,
'where': where
}
response = domoticz.get(what, where)
data = str(response['Data'])
if data is None:
if where is None:
self.speak_dialog("NotFoundShort", data)
else:
self.speak_dialog("NotFound", data)
if re.search('\d\s+C', data):
data = data.replace(' C', ' degrees celsius')
if re.search('\d\s+F', data):
data = data.replace(' F', ' degrees fahrenheit')
data = "It's " + data
LOGGER.debug("result : " + str(data))
self.speak(str(data))
def stop(self):
pass
def create_skill():
return DomoticzSkill()
| true | true |
f738c8d7c592d77c0c81025d123c1cecf33946f4 | 154 | py | Python | pysensors/basis/__init__.py | Jimmy-INL/pysensors | 62b79a233a551ae01125e20e06fde0c96b4dffd2 | [
"MIT"
] | null | null | null | pysensors/basis/__init__.py | Jimmy-INL/pysensors | 62b79a233a551ae01125e20e06fde0c96b4dffd2 | [
"MIT"
] | null | null | null | pysensors/basis/__init__.py | Jimmy-INL/pysensors | 62b79a233a551ae01125e20e06fde0c96b4dffd2 | [
"MIT"
] | null | null | null | from ._identity import Identity
from ._random_projection import RandomProjection
from ._svd import SVD
__all__ = ["Identity", "SVD", "RandomProjection"]
| 25.666667 | 49 | 0.792208 | from ._identity import Identity
from ._random_projection import RandomProjection
from ._svd import SVD
__all__ = ["Identity", "SVD", "RandomProjection"]
| true | true |
f738c9a6762bcb2a35099b936401cb4af0906605 | 4,669 | py | Python | src/clients/ctm_api_client/models/raw_cms_xml_request.py | IceT-M/ctm-python-client | 0ef1d8a3c9a27a01c088be1cdf5d177d25912bac | [
"BSD-3-Clause"
] | 5 | 2021-12-01T18:40:00.000Z | 2022-03-04T10:51:44.000Z | src/clients/ctm_api_client/models/raw_cms_xml_request.py | IceT-M/ctm-python-client | 0ef1d8a3c9a27a01c088be1cdf5d177d25912bac | [
"BSD-3-Clause"
] | 3 | 2022-02-21T20:08:32.000Z | 2022-03-16T17:41:03.000Z | src/clients/ctm_api_client/models/raw_cms_xml_request.py | IceT-M/ctm-python-client | 0ef1d8a3c9a27a01c088be1cdf5d177d25912bac | [
"BSD-3-Clause"
] | 7 | 2021-12-01T11:59:16.000Z | 2022-03-01T18:16:40.000Z | # coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.215
Contact: customer_support@bmc.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from clients.ctm_api_client.configuration import Configuration
class RawCmsXmlRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
"raw_xml_request": "str",
"encryption_metadata": "EncryptionMetadata",
}
attribute_map = {
"raw_xml_request": "rawXmlRequest",
"encryption_metadata": "encryptionMetadata",
}
def __init__(
self, raw_xml_request=None, encryption_metadata=None, _configuration=None
): # noqa: E501
"""RawCmsXmlRequest - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._raw_xml_request = None
self._encryption_metadata = None
self.discriminator = None
if raw_xml_request is not None:
self.raw_xml_request = raw_xml_request
if encryption_metadata is not None:
self.encryption_metadata = encryption_metadata
@property
def raw_xml_request(self):
"""Gets the raw_xml_request of this RawCmsXmlRequest. # noqa: E501
The raw XML request string # noqa: E501
:return: The raw_xml_request of this RawCmsXmlRequest. # noqa: E501
:rtype: str
"""
return self._raw_xml_request
@raw_xml_request.setter
def raw_xml_request(self, raw_xml_request):
"""Sets the raw_xml_request of this RawCmsXmlRequest.
The raw XML request string # noqa: E501
:param raw_xml_request: The raw_xml_request of this RawCmsXmlRequest. # noqa: E501
:type: str
"""
self._raw_xml_request = raw_xml_request
@property
def encryption_metadata(self):
"""Gets the encryption_metadata of this RawCmsXmlRequest. # noqa: E501
:return: The encryption_metadata of this RawCmsXmlRequest. # noqa: E501
:rtype: EncryptionMetadata
"""
return self._encryption_metadata
@encryption_metadata.setter
def encryption_metadata(self, encryption_metadata):
"""Sets the encryption_metadata of this RawCmsXmlRequest.
:param encryption_metadata: The encryption_metadata of this RawCmsXmlRequest. # noqa: E501
:type: EncryptionMetadata
"""
self._encryption_metadata = encryption_metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
if issubclass(RawCmsXmlRequest, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RawCmsXmlRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, RawCmsXmlRequest):
return True
return self.to_dict() != other.to_dict()
| 29.929487 | 99 | 0.602484 |
import pprint
import re
import six
from clients.ctm_api_client.configuration import Configuration
class RawCmsXmlRequest(object):
swagger_types = {
"raw_xml_request": "str",
"encryption_metadata": "EncryptionMetadata",
}
attribute_map = {
"raw_xml_request": "rawXmlRequest",
"encryption_metadata": "encryptionMetadata",
}
def __init__(
self, raw_xml_request=None, encryption_metadata=None, _configuration=None
):
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._raw_xml_request = None
self._encryption_metadata = None
self.discriminator = None
if raw_xml_request is not None:
self.raw_xml_request = raw_xml_request
if encryption_metadata is not None:
self.encryption_metadata = encryption_metadata
@property
def raw_xml_request(self):
return self._raw_xml_request
@raw_xml_request.setter
def raw_xml_request(self, raw_xml_request):
self._raw_xml_request = raw_xml_request
@property
def encryption_metadata(self):
return self._encryption_metadata
@encryption_metadata.setter
def encryption_metadata(self, encryption_metadata):
self._encryption_metadata = encryption_metadata
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
if issubclass(RawCmsXmlRequest, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, RawCmsXmlRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, RawCmsXmlRequest):
return True
return self.to_dict() != other.to_dict()
| true | true |
f738ca239bd2274d28c1686d0902a2152d62598d | 5,973 | py | Python | tests/test_foreignkey.py | chenmin1992/SchemaObject | e7eefb07d842a8ac026e2431c799430ba730fcfe | [
"Apache-2.0"
] | 18 | 2015-04-11T18:28:07.000Z | 2020-12-11T08:14:48.000Z | tests/test_foreignkey.py | chenmin1992/SchemaObject | e7eefb07d842a8ac026e2431c799430ba730fcfe | [
"Apache-2.0"
] | 8 | 2015-09-03T18:38:48.000Z | 2020-01-25T21:40:47.000Z | tests/test_foreignkey.py | chenmin1992/SchemaObject | e7eefb07d842a8ac026e2431c799430ba730fcfe | [
"Apache-2.0"
] | 27 | 2015-02-20T22:01:39.000Z | 2021-07-12T15:39:29.000Z | #!/usr/bin/python
import re
import unittest
import schemaobject
class TestForeignKeySchema(unittest.TestCase):
def setUp(self):
self.database_url = "mysql://root:root@localhost:3306/"
self.schema = schemaobject.SchemaObject(self.database_url + 'sakila', charset='utf8')
self.fk = self.schema.selected.tables['rental'].foreign_keys
def test_fk_exists(self):
self.assertTrue("fk_rental_customer" in list(self.fk.keys()))
def test_fk_not_exists(self):
self.assertFalse("fk_foobar" in list(self.fk.keys()))
def test_fk_name(self):
self.assertEqual("fk_rental_customer", self.fk['fk_rental_customer'].name)
def test_fk_symbol(self):
self.assertEqual("fk_rental_customer", self.fk['fk_rental_customer'].symbol)
def test_fk_table_name(self):
self.assertEqual("rental", self.fk['fk_rental_customer'].table_name)
def test_fk_table_schema(self):
self.assertEqual("sakila", self.fk['fk_rental_customer'].table_schema)
def test_fk_columns(self):
self.assertEqual(['customer_id'], self.fk['fk_rental_customer'].columns)
def test_fk_referenced_table_name(self):
self.assertEqual("customer", self.fk['fk_rental_customer'].referenced_table_name)
def test_fk_referenced_table_schema(self):
self.assertEqual("sakila", self.fk['fk_rental_customer'].referenced_table_schema)
def test_fk_referenced_columns(self):
self.assertEqual(['customer_id'], self.fk['fk_rental_customer'].referenced_columns)
def test_fk_match_option(self):
self.assertEqual(None, self.fk['fk_rental_customer'].match_option)
def test_fk_update_rule(self):
self.assertEqual("CASCADE", self.fk['fk_rental_customer'].update_rule)
def test_fk_delete_rule(self):
self.assertEqual("RESTRICT", self.fk['fk_rental_customer'].delete_rule)
def test_format_referenced_col_with_length(self):
self.assertEqual('`fk_rental_customer`(11)', schemaobject.foreignkey.ForeignKeySchema._format_referenced_col('fk_rental_customer', 11))
def test_format_referenced_col_without_length(self):
self.assertEqual('`fk_rental_customer`', schemaobject.foreignkey.ForeignKeySchema._format_referenced_col('fk_rental_customer', 0))
self.assertEqual('`fk_rental_customer`', schemaobject.foreignkey.ForeignKeySchema._format_referenced_col('fk_rental_customer', None))
def test_fk_drop(self):
self.assertEqual(self.fk['fk_rental_customer'].drop(), "DROP FOREIGN KEY `fk_rental_customer`")
def test_fk_create(self):
self.assertEqual(self.fk['fk_rental_customer'].create(),
"ADD CONSTRAINT `fk_rental_customer` FOREIGN KEY `fk_rental_customer` (`customer_id`) REFERENCES `customer` (`customer_id`) ON DELETE RESTRICT ON UPDATE CASCADE")
def test_fk_eq(self):
self.assertEqual(self.fk['fk_rental_customer'], self.fk['fk_rental_customer'])
def test_fk_neq(self):
self.assertNotEqual(self.fk['fk_rental_customer'], self.fk['fk_rental_inventory'])
# def test_fk_reference_opts_update_and_delete(self):
# table_def = """CREATE TABLE `child` (
# `id` int(11) DEFAULT NULL,
# `parent_id` int(11) DEFAULT NULL,
# KEY `par_ind` (`parent_id`),
# CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`)
# REFERENCES `parent` (`id`) ON DELETE SET NULL ON UPDATE CASCADE,
# CONSTRAINT `child_ibfk_2` FOREIGN KEY (`parent_id`)
# REFERENCES `parent` (`id`) ON DELETE RESTRICT ON UPDATE RESTRICT )
# ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_danish_ci COMMENT='hello world';"""
#
# matches = re.search(REGEX_FK_REFERENCE_OPTIONS % 'child_ibfk_1', table_def, re.X)
# self.assertTrue(matches)
# self.assertTrue(matches.group('on_delete'))
# self.assertTrue(matches.group('on_update'))
# self.assertEqual(matches.group('on_delete'), 'SET NULL')
# self.assertEqual(matches.group('on_update'), 'CASCADE')
#
# matches = re.search(REGEX_FK_REFERENCE_OPTIONS % 'child_ibfk_1', table_def, re.X)
# self.assertTrue(matches)
# self.assertTrue(matches.group('on_delete'))
# self.assertTrue(matches.group('on_update'))
# self.assertEqual(matches.group('on_delete'), 'RESTRICT')
# self.assertEqual(matches.group('on_update'), 'RESTRICT')
#
# def test_fk_reference_opts_delete(self):
# table_def = """CREATE TABLE `child` (
# `id` int(11) DEFAULT NULL,
# `parent_id` int(11) DEFAULT NULL,
# KEY `par_ind` (`parent_id`),
# CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON DELETE SET NULL )
# ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_danish_ci COMMENT='hello world';"""
#
# matches = re.search(REGEX_FK_REFERENCE_OPTIONS % 'child_ibfk_1', table_def, re.X)
# self.assertTrue(matches)
# self.assertTrue(matches.group('on_delete'))
# self.assertTrue(not matches.group('on_update'))
# self.assertEqual(matches.group('on_delete'), 'SET NULL')
#
# def test_fk_reference_opts_update(self):
# table_def = """CREATE TABLE `child` (
# `id` int(11) DEFAULT NULL,
# `parent_id` int(11) DEFAULT NULL,
# KEY `par_ind` (`parent_id`),
# CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON UPDATE CASCADE )
# ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_danish_ci COMMENT='hello world';"""
#
# matches = re.search(REGEX_FK_REFERENCE_OPTIONS % 'child_ibfk_1', table_def, re.X)
# self.assertTrue(matches)
# self.assertTrue(not matches.group('on_delete'))
# self.assertTrue(matches.group('on_update'))
# self.assertEqual(matches.group('on_update'), 'CASCADE') | 48.169355 | 186 | 0.680897 |
import re
import unittest
import schemaobject
class TestForeignKeySchema(unittest.TestCase):
def setUp(self):
self.database_url = "mysql://root:root@localhost:3306/"
self.schema = schemaobject.SchemaObject(self.database_url + 'sakila', charset='utf8')
self.fk = self.schema.selected.tables['rental'].foreign_keys
def test_fk_exists(self):
self.assertTrue("fk_rental_customer" in list(self.fk.keys()))
def test_fk_not_exists(self):
self.assertFalse("fk_foobar" in list(self.fk.keys()))
def test_fk_name(self):
self.assertEqual("fk_rental_customer", self.fk['fk_rental_customer'].name)
def test_fk_symbol(self):
self.assertEqual("fk_rental_customer", self.fk['fk_rental_customer'].symbol)
def test_fk_table_name(self):
self.assertEqual("rental", self.fk['fk_rental_customer'].table_name)
def test_fk_table_schema(self):
self.assertEqual("sakila", self.fk['fk_rental_customer'].table_schema)
def test_fk_columns(self):
self.assertEqual(['customer_id'], self.fk['fk_rental_customer'].columns)
def test_fk_referenced_table_name(self):
self.assertEqual("customer", self.fk['fk_rental_customer'].referenced_table_name)
def test_fk_referenced_table_schema(self):
self.assertEqual("sakila", self.fk['fk_rental_customer'].referenced_table_schema)
def test_fk_referenced_columns(self):
self.assertEqual(['customer_id'], self.fk['fk_rental_customer'].referenced_columns)
def test_fk_match_option(self):
self.assertEqual(None, self.fk['fk_rental_customer'].match_option)
def test_fk_update_rule(self):
self.assertEqual("CASCADE", self.fk['fk_rental_customer'].update_rule)
def test_fk_delete_rule(self):
self.assertEqual("RESTRICT", self.fk['fk_rental_customer'].delete_rule)
def test_format_referenced_col_with_length(self):
self.assertEqual('`fk_rental_customer`(11)', schemaobject.foreignkey.ForeignKeySchema._format_referenced_col('fk_rental_customer', 11))
def test_format_referenced_col_without_length(self):
self.assertEqual('`fk_rental_customer`', schemaobject.foreignkey.ForeignKeySchema._format_referenced_col('fk_rental_customer', 0))
self.assertEqual('`fk_rental_customer`', schemaobject.foreignkey.ForeignKeySchema._format_referenced_col('fk_rental_customer', None))
def test_fk_drop(self):
self.assertEqual(self.fk['fk_rental_customer'].drop(), "DROP FOREIGN KEY `fk_rental_customer`")
def test_fk_create(self):
self.assertEqual(self.fk['fk_rental_customer'].create(),
"ADD CONSTRAINT `fk_rental_customer` FOREIGN KEY `fk_rental_customer` (`customer_id`) REFERENCES `customer` (`customer_id`) ON DELETE RESTRICT ON UPDATE CASCADE")
def test_fk_eq(self):
self.assertEqual(self.fk['fk_rental_customer'], self.fk['fk_rental_customer'])
def test_fk_neq(self):
self.assertNotEqual(self.fk['fk_rental_customer'], self.fk['fk_rental_inventory'])
# `id` int(11) DEFAULT NULL,
# `parent_id` int(11) DEFAULT NULL,
# KEY `par_ind` (`parent_id`),
# CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`)
# REFERENCES `parent` (`id`) ON DELETE SET NULL ON UPDATE CASCADE,
# CONSTRAINT `child_ibfk_2` FOREIGN KEY (`parent_id`)
# REFERENCES `parent` (`id`) ON DELETE RESTRICT ON UPDATE RESTRICT )
# ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_danish_ci COMMENT='hello world';"""
# `id` int(11) DEFAULT NULL,
# `parent_id` int(11) DEFAULT NULL,
# KEY `par_ind` (`parent_id`),
# CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON DELETE SET NULL )
# ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_danish_ci COMMENT='hello world';"""
# `id` int(11) DEFAULT NULL,
# `parent_id` int(11) DEFAULT NULL,
# KEY `par_ind` (`parent_id`),
# CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON UPDATE CASCADE )
# ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_danish_ci COMMENT='hello world';"""
| true | true |
f738ca4546da7ac1bfe5e0404bac34620c087ed6 | 10,454 | py | Python | xlson/xl_preparation.py | loven-doo/xlson | 2461db4c73bbdc5c84b4a11eb2241c7614ab84f7 | [
"BSD-3-Clause"
] | null | null | null | xlson/xl_preparation.py | loven-doo/xlson | 2461db4c73bbdc5c84b4a11eb2241c7614ab84f7 | [
"BSD-3-Clause"
] | null | null | null | xlson/xl_preparation.py | loven-doo/xlson | 2461db4c73bbdc5c84b4a11eb2241c7614ab84f7 | [
"BSD-3-Clause"
] | null | null | null | import datetime
from copy import deepcopy
from openpyxl import load_workbook
from openpyxl.utils import coordinate_to_tuple
from xlrd import open_workbook, xldate_as_tuple
from xlson.constants import xlson_logger
from xlson.formatting import cell_meta_to_dict
from xlson.handlers import XLSonHandler, XLSonSheetHandler
from xlson.formatting import CELL_DEFAULT_META
from xlson._lib.general_utils import digitalize_str
from xlson._lib.coords_tools import coords_from_range
def prepare_xl(xl_path, data_only=False, values_strip=None, digitalization=True, crop_empty=True, n_rows=None):
try:
return prepare_new_xl(new_xl_path=xl_path,
data_only=data_only,
values_strip=values_strip,
digitalization=digitalization,
crop_empty=crop_empty,
n_rows=n_rows)
# except InvalidFileException:
except:
xlson_logger.warning("%s cannot be prepared as new Excel format - trying old Excel format preparation" % xl_path)
try:
return prepare_old_xl(old_xl_path=xl_path,
values_strip=values_strip,
digitalization=digitalization,
crop_empty=crop_empty,
n_rows=n_rows)
except:
xlson_logger.warning("cannot read '%s'" % xl_path)
return XLSonHandler(
main_sheet=XLSonSheetHandler.load_from_dict({
"data_df": [["an error", "occurred", "while", "processing", xl_path]],
"meta_df": [[None] * 5],
"entities": XLSonSheetHandler.entites_0,
"supp_sheets": XLSonSheetHandler.supp_sheets_0,
"cell_default_meta": CELL_DEFAULT_META,
}, main_sheet=True),
supp_sheets = list(),
source_path = xl_path,
)
def prepare_new_xl(new_xl_path, data_only=False, values_strip=None, digitalization=True, crop_empty=True, n_rows=None):
xlson_logger.info("%s conversion to xlson started" % new_xl_path)
main_sheet = dict()
supp_sheets_list = list()
wb = load_workbook(new_xl_path, data_only=data_only)
n = 0
for sheet_name in wb.sheetnames:
merged_cells_dict = get_merged_cells(wb[sheet_name])
sheet_dict = {
'cell_default_meta': CELL_DEFAULT_META,
'sheet_name': sheet_name,
'data_df': iterate_sheet(wb[sheet_name],
cell_func=get_cell_value,
add_args_dict={'value_strip': values_strip,
'digitalization': digitalization},
n_rows=n_rows),
'entities': deepcopy(XLSonSheetHandler.entites_0),
}
if crop_empty:
last_cell = get_last_cell(sheet_dict['data_df'])
sheet_dict['data_df'] = [sheet_dict['data_df'][i][:last_cell[1]+1] for i in range(last_cell[0]+1)]
sheet_dict['meta_df'] = iterate_sheet(wb[sheet_name],
cell_func=cell_meta_to_dict,
add_args_dict={'merged_cells_dict': merged_cells_dict},
last_cell=last_cell,
n_rows=n_rows)
else:
sheet_dict['meta_df'] = iterate_sheet(wb[sheet_name],
cell_func=cell_meta_to_dict,
add_args_dict={'merged_cells_dict': merged_cells_dict},
n_rows=n_rows)
if n == 0:
main_sheet = sheet_dict
else:
supp_sheets_list.append(sheet_dict)
n += 1
main_sheet['supp_sheets'] = [supp_sheet['sheet_name'] for supp_sheet in supp_sheets_list]
xlson_logger.info("%s conversion to xlson finished" % new_xl_path)
return XLSonHandler(main_sheet=XLSonSheetHandler.load_from_dict(main_sheet, main_sheet=True),
supp_sheets=supp_sheets_list,
source_path=new_xl_path)
def get_last_cell(data_df):
max_row = 0
max_col = 0
for i in range(len(data_df)):
for j in range(len(data_df[i])):
if data_df[i][j] is not None:
if i > max_row:
max_row = i
if j > max_col:
max_col = j
return max_row, max_col
def get_merged_cells(sheet):
merged_cells_dict = {
"merged_with": {},
"merged_to": {},
}
mc_ranges = sheet.merged_cells.ranges
for mc_range in mc_ranges:
c_list = mc_range.coord.split(":")
first_c = tuple(map(lambda c: c - 1, coordinate_to_tuple(c_list[0])))
last_c = tuple(map(lambda c: c - 1, coordinate_to_tuple(c_list[1])))
merged_coords_list = coords_from_range(first_c, last_c)
merged_cells_dict["merged_with"][first_c] = merged_coords_list[1:]
for merged_coord in merged_coords_list[1:]:
merged_cells_dict["merged_to"][merged_coord] = first_c
return merged_cells_dict
def iterate_sheet(sheet, cell_func=None, add_args_dict=None, last_cell=None, n_rows=None):
rows_list = list()
i = 0
for row in sheet:
if last_cell is not None and i > last_cell[0]:
break
if type(n_rows) is int and i >= n_rows:
break
curr_row_list = list()
j = 0
for cell in row:
if last_cell is not None and j > last_cell[1]:
break
if callable(cell_func):
if type(add_args_dict) is dict:
curr_row_list.append(cell_func(cell, **add_args_dict))
elif type(add_args_dict) is list or type(add_args_dict) is tuple:
curr_row_list.append(cell_func(cell, *add_args_dict))
else:
curr_row_list.append(cell_func(cell))
else:
curr_row_list.append(cell)
j += 1
rows_list.append(curr_row_list)
i += 1
return rows_list
def get_cell_value(cell, value_strip=None, digitalization=True, special_formating=None, **kwargs):
if callable(special_formating):
v = special_formating(cell, **kwargs)
else:
v = cell.value
if type(v) is datetime.datetime:
return v.strftime("%d.%m.%Y")
if type(v) is str:
if not v:
return None
if type(value_strip) is str or value_strip is None:
if digitalization:
return digitalize_str(v.strip(value_strip))
else:
return v.strip(value_strip)
return v
def prepare_old_xl(old_xl_path, values_strip=None, digitalization=True, crop_empty=True, n_rows=None):
# TODO: implement formatting info conversion to meta_df
xlson_logger.info("%s conversion to xlson started" % old_xl_path)
main_sheet = dict()
supp_sheets_list = list()
wb = open_workbook(old_xl_path, formatting_info=True) # data_only equiv has not been found
n = 0
for sheet_name in wb.sheet_names():
# merged_cells_dict = get_merged_cells(wb.sheet_by_name(sheet_name)) # TODO: implement meged cells preparation
sheet_dict = {
'cell_default_meta': CELL_DEFAULT_META,
'sheet_name': sheet_name,
'data_df': iterate_sheet(wb.sheet_by_name(sheet_name).get_rows(),
cell_func=get_cell_value,
add_args_dict={'value_strip': values_strip,
'digitalization': digitalization,
'special_formating': _check_xlrd_types,
'datemode': wb.datemode},
n_rows=n_rows),
'entities': deepcopy(XLSonSheetHandler.entites_0),
}
if crop_empty:
last_cell = get_last_cell(sheet_dict['data_df'])
if last_cell == (0, 0):
sheet_dict['data_df'] = [[None]]
else:
sheet_dict['data_df'] = [sheet_dict['data_df'][i][:last_cell[1]+1] for i in range(last_cell[0]+1)]
sheet_dict['meta_df'] = [[None] * (last_cell[1]+1)] * (last_cell[0]+1)
# TODO: fill meta_df
# sheet_dict['meta_df'] = iterate_sheet(wb[sheet_name],
# cell_func=cell_meta_to_dict,
# add_args_dict={'merged_cells_dict': merged_cells_dict},
# last_cell=last_cell,
# n_rows=n_rows)
else:
sheet_dict['meta_df'] = [[None] * wb.sheet_by_name(sheet_name).ncols] * wb.sheet_by_name(sheet_name).nrows
# TODO: fill meta_df
# sheet_dict['meta_df'] = iterate_sheet(wb[sheet_name],
# cell_func=cell_meta_to_dict,
# add_args_dict={'merged_cells_dict': merged_cells_dict},
# n_rows=n_rows)
if n == 0:
main_sheet = sheet_dict
else:
supp_sheets_list.append(sheet_dict)
n += 1
main_sheet['supp_sheets'] = [supp_sheet['sheet_name'] for supp_sheet in supp_sheets_list]
xlson_logger.info("%s conversion to xlson finished" % old_xl_path)
return XLSonHandler(main_sheet=XLSonSheetHandler.load_from_dict(main_sheet, main_sheet=True),
supp_sheets=supp_sheets_list,
source_path=old_xl_path)
def _check_xlrd_types(cell, **kwargs):
v = cell.value
if cell.ctype == 0 or cell.ctype == 6:
return None
if cell.ctype == 2:
if v - float(int(v)) > 0.0:
return v
else:
return int(v)
if cell.ctype == 3:
return datetime.datetime(*xldate_as_tuple(v, kwargs.get("datemode", 0)))
# return datetime.datetime(1900, 1, 1) + datetime.timedelta(int(v)-2)
return v
| 43.92437 | 121 | 0.55816 | import datetime
from copy import deepcopy
from openpyxl import load_workbook
from openpyxl.utils import coordinate_to_tuple
from xlrd import open_workbook, xldate_as_tuple
from xlson.constants import xlson_logger
from xlson.formatting import cell_meta_to_dict
from xlson.handlers import XLSonHandler, XLSonSheetHandler
from xlson.formatting import CELL_DEFAULT_META
from xlson._lib.general_utils import digitalize_str
from xlson._lib.coords_tools import coords_from_range
def prepare_xl(xl_path, data_only=False, values_strip=None, digitalization=True, crop_empty=True, n_rows=None):
try:
return prepare_new_xl(new_xl_path=xl_path,
data_only=data_only,
values_strip=values_strip,
digitalization=digitalization,
crop_empty=crop_empty,
n_rows=n_rows)
except:
xlson_logger.warning("%s cannot be prepared as new Excel format - trying old Excel format preparation" % xl_path)
try:
return prepare_old_xl(old_xl_path=xl_path,
values_strip=values_strip,
digitalization=digitalization,
crop_empty=crop_empty,
n_rows=n_rows)
except:
xlson_logger.warning("cannot read '%s'" % xl_path)
return XLSonHandler(
main_sheet=XLSonSheetHandler.load_from_dict({
"data_df": [["an error", "occurred", "while", "processing", xl_path]],
"meta_df": [[None] * 5],
"entities": XLSonSheetHandler.entites_0,
"supp_sheets": XLSonSheetHandler.supp_sheets_0,
"cell_default_meta": CELL_DEFAULT_META,
}, main_sheet=True),
supp_sheets = list(),
source_path = xl_path,
)
def prepare_new_xl(new_xl_path, data_only=False, values_strip=None, digitalization=True, crop_empty=True, n_rows=None):
xlson_logger.info("%s conversion to xlson started" % new_xl_path)
main_sheet = dict()
supp_sheets_list = list()
wb = load_workbook(new_xl_path, data_only=data_only)
n = 0
for sheet_name in wb.sheetnames:
merged_cells_dict = get_merged_cells(wb[sheet_name])
sheet_dict = {
'cell_default_meta': CELL_DEFAULT_META,
'sheet_name': sheet_name,
'data_df': iterate_sheet(wb[sheet_name],
cell_func=get_cell_value,
add_args_dict={'value_strip': values_strip,
'digitalization': digitalization},
n_rows=n_rows),
'entities': deepcopy(XLSonSheetHandler.entites_0),
}
if crop_empty:
last_cell = get_last_cell(sheet_dict['data_df'])
sheet_dict['data_df'] = [sheet_dict['data_df'][i][:last_cell[1]+1] for i in range(last_cell[0]+1)]
sheet_dict['meta_df'] = iterate_sheet(wb[sheet_name],
cell_func=cell_meta_to_dict,
add_args_dict={'merged_cells_dict': merged_cells_dict},
last_cell=last_cell,
n_rows=n_rows)
else:
sheet_dict['meta_df'] = iterate_sheet(wb[sheet_name],
cell_func=cell_meta_to_dict,
add_args_dict={'merged_cells_dict': merged_cells_dict},
n_rows=n_rows)
if n == 0:
main_sheet = sheet_dict
else:
supp_sheets_list.append(sheet_dict)
n += 1
main_sheet['supp_sheets'] = [supp_sheet['sheet_name'] for supp_sheet in supp_sheets_list]
xlson_logger.info("%s conversion to xlson finished" % new_xl_path)
return XLSonHandler(main_sheet=XLSonSheetHandler.load_from_dict(main_sheet, main_sheet=True),
supp_sheets=supp_sheets_list,
source_path=new_xl_path)
def get_last_cell(data_df):
max_row = 0
max_col = 0
for i in range(len(data_df)):
for j in range(len(data_df[i])):
if data_df[i][j] is not None:
if i > max_row:
max_row = i
if j > max_col:
max_col = j
return max_row, max_col
def get_merged_cells(sheet):
merged_cells_dict = {
"merged_with": {},
"merged_to": {},
}
mc_ranges = sheet.merged_cells.ranges
for mc_range in mc_ranges:
c_list = mc_range.coord.split(":")
first_c = tuple(map(lambda c: c - 1, coordinate_to_tuple(c_list[0])))
last_c = tuple(map(lambda c: c - 1, coordinate_to_tuple(c_list[1])))
merged_coords_list = coords_from_range(first_c, last_c)
merged_cells_dict["merged_with"][first_c] = merged_coords_list[1:]
for merged_coord in merged_coords_list[1:]:
merged_cells_dict["merged_to"][merged_coord] = first_c
return merged_cells_dict
def iterate_sheet(sheet, cell_func=None, add_args_dict=None, last_cell=None, n_rows=None):
rows_list = list()
i = 0
for row in sheet:
if last_cell is not None and i > last_cell[0]:
break
if type(n_rows) is int and i >= n_rows:
break
curr_row_list = list()
j = 0
for cell in row:
if last_cell is not None and j > last_cell[1]:
break
if callable(cell_func):
if type(add_args_dict) is dict:
curr_row_list.append(cell_func(cell, **add_args_dict))
elif type(add_args_dict) is list or type(add_args_dict) is tuple:
curr_row_list.append(cell_func(cell, *add_args_dict))
else:
curr_row_list.append(cell_func(cell))
else:
curr_row_list.append(cell)
j += 1
rows_list.append(curr_row_list)
i += 1
return rows_list
def get_cell_value(cell, value_strip=None, digitalization=True, special_formating=None, **kwargs):
if callable(special_formating):
v = special_formating(cell, **kwargs)
else:
v = cell.value
if type(v) is datetime.datetime:
return v.strftime("%d.%m.%Y")
if type(v) is str:
if not v:
return None
if type(value_strip) is str or value_strip is None:
if digitalization:
return digitalize_str(v.strip(value_strip))
else:
return v.strip(value_strip)
return v
def prepare_old_xl(old_xl_path, values_strip=None, digitalization=True, crop_empty=True, n_rows=None):
xlson_logger.info("%s conversion to xlson started" % old_xl_path)
main_sheet = dict()
supp_sheets_list = list()
wb = open_workbook(old_xl_path, formatting_info=True)
n = 0
for sheet_name in wb.sheet_names():
_default_meta': CELL_DEFAULT_META,
'sheet_name': sheet_name,
'data_df': iterate_sheet(wb.sheet_by_name(sheet_name).get_rows(),
cell_func=get_cell_value,
add_args_dict={'value_strip': values_strip,
'digitalization': digitalization,
'special_formating': _check_xlrd_types,
'datemode': wb.datemode},
n_rows=n_rows),
'entities': deepcopy(XLSonSheetHandler.entites_0),
}
if crop_empty:
last_cell = get_last_cell(sheet_dict['data_df'])
if last_cell == (0, 0):
sheet_dict['data_df'] = [[None]]
else:
sheet_dict['data_df'] = [sheet_dict['data_df'][i][:last_cell[1]+1] for i in range(last_cell[0]+1)]
sheet_dict['meta_df'] = [[None] * (last_cell[1]+1)] * (last_cell[0]+1)
else:
sheet_dict['meta_df'] = [[None] * wb.sheet_by_name(sheet_name).ncols] * wb.sheet_by_name(sheet_name).nrows
if n == 0:
main_sheet = sheet_dict
else:
supp_sheets_list.append(sheet_dict)
n += 1
main_sheet['supp_sheets'] = [supp_sheet['sheet_name'] for supp_sheet in supp_sheets_list]
xlson_logger.info("%s conversion to xlson finished" % old_xl_path)
return XLSonHandler(main_sheet=XLSonSheetHandler.load_from_dict(main_sheet, main_sheet=True),
supp_sheets=supp_sheets_list,
source_path=old_xl_path)
def _check_xlrd_types(cell, **kwargs):
v = cell.value
if cell.ctype == 0 or cell.ctype == 6:
return None
if cell.ctype == 2:
if v - float(int(v)) > 0.0:
return v
else:
return int(v)
if cell.ctype == 3:
return datetime.datetime(*xldate_as_tuple(v, kwargs.get("datemode", 0)))
return v
| true | true |
f738ca9f58a637abb2c6f7fcd732220c4a38674a | 483 | py | Python | imagersite/imager_images/migrations/0002_auto_20171203_2331.py | famavott/django-imager | a9867656af7a665f81574c1be5d50a2a703b4af4 | [
"MIT"
] | null | null | null | imagersite/imager_images/migrations/0002_auto_20171203_2331.py | famavott/django-imager | a9867656af7a665f81574c1be5d50a2a703b4af4 | [
"MIT"
] | 1 | 2017-11-27T05:32:39.000Z | 2017-11-27T05:32:39.000Z | imagersite/imager_images/migrations/0002_auto_20171203_2331.py | famavott/django-imager | a9867656af7a665f81574c1be5d50a2a703b4af4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-03 23:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('imager_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='album',
name='photo',
field=models.ManyToManyField(related_name='album', to='imager_images.Photo'),
),
]
| 23 | 89 | 0.625259 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('imager_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='album',
name='photo',
field=models.ManyToManyField(related_name='album', to='imager_images.Photo'),
),
]
| true | true |
f738cbc0425789b9ea6b28462237198682a598ac | 2,823 | py | Python | mindspore/python/mindspore/train/callback/_loss_monitor.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | 1 | 2021-12-27T13:42:29.000Z | 2021-12-27T13:42:29.000Z | mindspore/python/mindspore/train/callback/_loss_monitor.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | null | null | null | mindspore/python/mindspore/train/callback/_loss_monitor.py | zimo-geek/mindspore | 665ec683d4af85c71b2a1f0d6829356f2bc0e1ff | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""LossMonitor Callback class."""
import numpy as np
from mindspore.common.tensor import Tensor
from ._callback import Callback
class LossMonitor(Callback):
"""
Monitor the loss in training.
If the loss is NAN or INF, it will terminate training.
Note:
If per_print_times is 0, do not print loss.
Args:
per_print_times (int): How many steps to print once loss. During sink mode, it will print loss in the
nearest step. Default: 1.
Raises:
ValueError: If per_print_times is not an integer or less than zero.
"""
def __init__(self, per_print_times=1):
super(LossMonitor, self).__init__()
if not isinstance(per_print_times, int) or per_print_times < 0:
raise ValueError("The argument 'per_print_times' must be int and >= 0, "
"but got {}".format(per_print_times))
self._per_print_times = per_print_times
self._last_print_time = 0
def step_end(self, run_context):
"""
Print training loss at the end of step.
Args:
run_context (RunContext): Context of the train running.
"""
cb_params = run_context.original_args()
loss = cb_params.net_outputs
if isinstance(loss, (tuple, list)):
if isinstance(loss[0], Tensor) and isinstance(loss[0].asnumpy(), np.ndarray):
loss = loss[0]
if isinstance(loss, Tensor) and isinstance(loss.asnumpy(), np.ndarray):
loss = float(np.mean(loss.asnumpy()))
cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num + 1
if isinstance(loss, float) and (np.isnan(loss) or np.isinf(loss)):
raise ValueError("epoch: {} step: {}. Invalid loss, terminating training.".format(
cb_params.cur_epoch_num, cur_step_in_epoch))
if self._per_print_times != 0 and (cb_params.cur_step_num - self._last_print_time) >= self._per_print_times:
self._last_print_time = cb_params.cur_step_num
print("epoch: %s step: %s, loss is %s" % (cb_params.cur_epoch_num, cur_step_in_epoch, loss), flush=True)
| 38.671233 | 116 | 0.650372 |
import numpy as np
from mindspore.common.tensor import Tensor
from ._callback import Callback
class LossMonitor(Callback):
def __init__(self, per_print_times=1):
super(LossMonitor, self).__init__()
if not isinstance(per_print_times, int) or per_print_times < 0:
raise ValueError("The argument 'per_print_times' must be int and >= 0, "
"but got {}".format(per_print_times))
self._per_print_times = per_print_times
self._last_print_time = 0
def step_end(self, run_context):
cb_params = run_context.original_args()
loss = cb_params.net_outputs
if isinstance(loss, (tuple, list)):
if isinstance(loss[0], Tensor) and isinstance(loss[0].asnumpy(), np.ndarray):
loss = loss[0]
if isinstance(loss, Tensor) and isinstance(loss.asnumpy(), np.ndarray):
loss = float(np.mean(loss.asnumpy()))
cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num + 1
if isinstance(loss, float) and (np.isnan(loss) or np.isinf(loss)):
raise ValueError("epoch: {} step: {}. Invalid loss, terminating training.".format(
cb_params.cur_epoch_num, cur_step_in_epoch))
if self._per_print_times != 0 and (cb_params.cur_step_num - self._last_print_time) >= self._per_print_times:
self._last_print_time = cb_params.cur_step_num
print("epoch: %s step: %s, loss is %s" % (cb_params.cur_epoch_num, cur_step_in_epoch, loss), flush=True)
| true | true |
f738cc052adb066e7d7edc7e0e9e7e58caf06a76 | 1,459 | py | Python | sandbox/lib/jumpscale/JumpscaleLibsExtra/sal_zos/tfchain/TfChainFactory.py | threefoldtech/threebot_prebuilt | 1f0e1c65c14cef079cd80f73927d7c8318755c48 | [
"Apache-2.0"
] | 1 | 2020-10-05T08:53:57.000Z | 2020-10-05T08:53:57.000Z | sandbox/lib/jumpscale/JumpscaleLibsExtra/sal_zos/tfchain/TfChainFactory.py | threefoldtech/threebot_prebuilt | 1f0e1c65c14cef079cd80f73927d7c8318755c48 | [
"Apache-2.0"
] | 17 | 2019-11-14T08:41:37.000Z | 2020-05-27T09:23:51.000Z | sandbox/lib/jumpscale/JumpscaleLibsExtra/sal_zos/tfchain/TfChainFactory.py | threefoldtech/threebot_prebuilt | 1f0e1c65c14cef079cd80f73927d7c8318755c48 | [
"Apache-2.0"
] | null | null | null | from .TfChain import TfChainClient, TfChainExplorer, TfChainBridged, TfChainDaemon
from Jumpscale import j
JSBASE = j.baseclasses.object_config_collection
class TfChainFactory(JSBASE):
__jslocation__ = "j.sal_zos.tfchain"
def daemon(
self,
name,
container,
data_dir="/mnt/data",
rpc_addr="0.0.0.0:23112",
api_addr="localhost:23110",
network="standard",
):
return TfChainDaemon(
name=name, container=container, data_dir=data_dir, rpc_addr=rpc_addr, api_addr=api_addr, network=network
)
def explorer(
self,
name,
container,
domain,
data_dir="/mnt/data",
rpc_addr="0.0.0.0:23112",
api_addr="localhost:23110",
network="standard",
):
return TfChainExplorer(
name=name,
container=container,
data_dir=data_dir,
rpc_addr=rpc_addr,
api_addr=api_addr,
domain=domain,
network=network,
)
def client(self, name, container, wallet_passphrase, api_addr="localhost:23110"):
return TfChainClient(name=name, container=container, addr=api_addr, wallet_passphrase=wallet_passphrase)
def bridged(self, name, container, rpc_addr, network, eth_port, account_json, account_password):
return TfChainBridged(name, container, rpc_addr, network, eth_port, account_json, account_password)
| 30.395833 | 116 | 0.638794 | from .TfChain import TfChainClient, TfChainExplorer, TfChainBridged, TfChainDaemon
from Jumpscale import j
JSBASE = j.baseclasses.object_config_collection
class TfChainFactory(JSBASE):
__jslocation__ = "j.sal_zos.tfchain"
def daemon(
self,
name,
container,
data_dir="/mnt/data",
rpc_addr="0.0.0.0:23112",
api_addr="localhost:23110",
network="standard",
):
return TfChainDaemon(
name=name, container=container, data_dir=data_dir, rpc_addr=rpc_addr, api_addr=api_addr, network=network
)
def explorer(
self,
name,
container,
domain,
data_dir="/mnt/data",
rpc_addr="0.0.0.0:23112",
api_addr="localhost:23110",
network="standard",
):
return TfChainExplorer(
name=name,
container=container,
data_dir=data_dir,
rpc_addr=rpc_addr,
api_addr=api_addr,
domain=domain,
network=network,
)
def client(self, name, container, wallet_passphrase, api_addr="localhost:23110"):
return TfChainClient(name=name, container=container, addr=api_addr, wallet_passphrase=wallet_passphrase)
def bridged(self, name, container, rpc_addr, network, eth_port, account_json, account_password):
return TfChainBridged(name, container, rpc_addr, network, eth_port, account_json, account_password)
| true | true |
f738cd121013442359615340082f55926bc27b78 | 4,808 | py | Python | acme/adders/reverb/sequence.py | StepNeverStop/acme | 1966849118ac25a86dcbf9b8edf9c9615cde41ff | [
"Apache-2.0"
] | null | null | null | acme/adders/reverb/sequence.py | StepNeverStop/acme | 1966849118ac25a86dcbf9b8edf9c9615cde41ff | [
"Apache-2.0"
] | null | null | null | acme/adders/reverb/sequence.py | StepNeverStop/acme | 1966849118ac25a86dcbf9b8edf9c9615cde41ff | [
"Apache-2.0"
] | null | null | null | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Sequence adders.
This implements adders which add sequences or partial trajectories.
"""
from typing import Optional
from acme.adders.reverb import base
from acme.adders.reverb import utils
import reverb
import tree
class SequenceAdder(base.ReverbAdder):
"""An adder which adds sequences of fixed length."""
def __init__(
self,
client: reverb.Client,
sequence_length: int,
period: int,
delta_encoded: bool = False,
chunk_length: Optional[int] = None,
priority_fns: Optional[base.PriorityFnMapping] = None,
pad_end_of_episode: bool = True,
):
"""Makes a SequenceAdder instance.
Args:
client: See docstring for BaseAdder.
sequence_length: The fixed length of sequences we wish to add.
period: The period with which we add sequences. If less than
sequence_length, overlapping sequences are added. If equal to
sequence_length, sequences are exactly non-overlapping.
delta_encoded: If `True` (False by default) enables delta encoding, see
`Client` for more information.
chunk_length: Number of timesteps grouped together before delta encoding
and compression. See `Client` for more information.
priority_fns: See docstring for BaseAdder.
pad_end_of_episode: If True (default) then upon end of episode the current
sequence will be padded (with observations, actions, etc... whose values
are 0) until its length is `sequence_length`. If False then the last
sequence in the episode may have length less than `sequence_length`.
"""
super().__init__(
client=client,
buffer_size=sequence_length,
max_sequence_length=sequence_length,
delta_encoded=delta_encoded,
chunk_length=chunk_length,
priority_fns=priority_fns)
self._period = period
self._step = 0
self._pad_end_of_episode = pad_end_of_episode
def reset(self):
self._step = 0
super().reset()
def _write(self):
# Append the previous step and increment number of steps written.
self._writer.append(self._buffer[-1])
self._step += 1
self._maybe_add_priorities()
def _write_last(self):
# Create a final step.
final_step = utils.final_step_like(self._buffer[0], self._next_observation)
# Append the final step.
self._buffer.append(final_step)
self._writer.append(final_step)
self._step += 1
# Determine the delta to the next time we would write a sequence.
first_write = self._step <= self._max_sequence_length
if first_write:
delta = self._max_sequence_length - self._step
else:
delta = (self._period -
(self._step - self._max_sequence_length)) % self._period
# Bump up to the position where we will write a sequence.
self._step += delta
if self._pad_end_of_episode:
zero_step = tree.map_structure(utils.zeros_like, final_step)
# Pad with zeros to get a full sequence.
for _ in range(delta):
self._buffer.append(zero_step)
self._writer.append(zero_step)
elif not first_write:
# Pop items from the buffer to get a truncated sequence.
# Note: this is consistent with the padding loop above, since adding zero
# steps pops the left-most elements. Here we just pop without padding.
for _ in range(delta):
self._buffer.popleft()
# Write priorities for the sequence.
self._maybe_add_priorities()
def _maybe_add_priorities(self):
if not (
# Write the first time we hit the max sequence length...
self._step == self._max_sequence_length or
# ... or every `period`th time after hitting max length.
(self._step > self._max_sequence_length and
(self._step - self._max_sequence_length) % self._period == 0)):
return
# Compute priorities for the buffer.
steps = list(self._buffer)
num_steps = len(steps)
table_priorities = utils.calculate_priorities(self._priority_fns, steps)
# Create a prioritized item for each table.
for table_name, priority in table_priorities.items():
self._writer.create_item(table_name, num_steps, priority)
| 35.094891 | 80 | 0.703619 |
from typing import Optional
from acme.adders.reverb import base
from acme.adders.reverb import utils
import reverb
import tree
class SequenceAdder(base.ReverbAdder):
def __init__(
self,
client: reverb.Client,
sequence_length: int,
period: int,
delta_encoded: bool = False,
chunk_length: Optional[int] = None,
priority_fns: Optional[base.PriorityFnMapping] = None,
pad_end_of_episode: bool = True,
):
super().__init__(
client=client,
buffer_size=sequence_length,
max_sequence_length=sequence_length,
delta_encoded=delta_encoded,
chunk_length=chunk_length,
priority_fns=priority_fns)
self._period = period
self._step = 0
self._pad_end_of_episode = pad_end_of_episode
def reset(self):
self._step = 0
super().reset()
def _write(self):
self._writer.append(self._buffer[-1])
self._step += 1
self._maybe_add_priorities()
def _write_last(self):
final_step = utils.final_step_like(self._buffer[0], self._next_observation)
self._buffer.append(final_step)
self._writer.append(final_step)
self._step += 1
first_write = self._step <= self._max_sequence_length
if first_write:
delta = self._max_sequence_length - self._step
else:
delta = (self._period -
(self._step - self._max_sequence_length)) % self._period
self._step += delta
if self._pad_end_of_episode:
zero_step = tree.map_structure(utils.zeros_like, final_step)
for _ in range(delta):
self._buffer.append(zero_step)
self._writer.append(zero_step)
elif not first_write:
for _ in range(delta):
self._buffer.popleft()
self._maybe_add_priorities()
def _maybe_add_priorities(self):
if not (
self._step == self._max_sequence_length or
(self._step > self._max_sequence_length and
(self._step - self._max_sequence_length) % self._period == 0)):
return
steps = list(self._buffer)
num_steps = len(steps)
table_priorities = utils.calculate_priorities(self._priority_fns, steps)
for table_name, priority in table_priorities.items():
self._writer.create_item(table_name, num_steps, priority)
| true | true |
f738cd4aef654b46eaa7486f003ac40a4d71b295 | 4,097 | py | Python | onnx/backend/test/case/node/reducemin.py | How-Wang/onnx | c940fa3fea84948e46603cab2f86467291443beb | [
"Apache-2.0"
] | 1 | 2022-02-04T07:45:14.000Z | 2022-02-04T07:45:14.000Z | onnx/backend/test/case/node/reducemin.py | How-Wang/onnx | c940fa3fea84948e46603cab2f86467291443beb | [
"Apache-2.0"
] | null | null | null | onnx/backend/test/case/node/reducemin.py | How-Wang/onnx | c940fa3fea84948e46603cab2f86467291443beb | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
class ReduceMin(Base):
@staticmethod
def export_do_not_keepdims() -> None:
shape = [3, 2, 2]
axes = [1]
keepdims = 0
node = onnx.helper.make_node(
'ReduceMin',
inputs=['data'],
outputs=['reduced'],
axes=axes,
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
#print(reduced)
#[[5., 1.]
# [30., 1.]
# [55., 1.]]
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_do_not_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_do_not_keepdims_random')
@staticmethod
def export_keepdims() -> None:
shape = [3, 2, 2]
axes = [1]
keepdims = 1
node = onnx.helper.make_node(
'ReduceMin', inputs=['data'],
outputs=['reduced'],
axes=axes,
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
#print(reduced)
#[[[5., 1.]]
# [[30., 1.]]
# [[55., 1.]]]
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_keepdims_random')
@staticmethod
def export_default_axes_keepdims() -> None:
shape = [3, 2, 2]
axes = None
keepdims = 1
node = onnx.helper.make_node(
'ReduceMin',
inputs=['data'],
outputs=['reduced'],
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=axes, keepdims=keepdims == 1)
#print(reduced)
#[[[1.]]]
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_default_axes_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=axes, keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_default_axes_keepdims_random')
@staticmethod
def export_negative_axes_keepdims() -> None:
shape = [3, 2, 2]
axes = [-2]
keepdims = 1
node = onnx.helper.make_node(
'ReduceMin', inputs=['data'],
outputs=['reduced'],
axes=axes,
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
# print(reduced)
#[[[5., 1.]]
# [[30., 1.]]
# [[55., 1.]]]
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_negative_axes_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_negative_axes_keepdims_random')
| 33.308943 | 109 | 0.573346 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import onnx
from ..base import Base
from . import expect
class ReduceMin(Base):
@staticmethod
def export_do_not_keepdims() -> None:
shape = [3, 2, 2]
axes = [1]
keepdims = 0
node = onnx.helper.make_node(
'ReduceMin',
inputs=['data'],
outputs=['reduced'],
axes=axes,
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_do_not_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_do_not_keepdims_random')
@staticmethod
def export_keepdims() -> None:
shape = [3, 2, 2]
axes = [1]
keepdims = 1
node = onnx.helper.make_node(
'ReduceMin', inputs=['data'],
outputs=['reduced'],
axes=axes,
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_keepdims_random')
@staticmethod
def export_default_axes_keepdims() -> None:
shape = [3, 2, 2]
axes = None
keepdims = 1
node = onnx.helper.make_node(
'ReduceMin',
inputs=['data'],
outputs=['reduced'],
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=axes, keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_default_axes_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=axes, keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_default_axes_keepdims_random')
@staticmethod
def export_negative_axes_keepdims() -> None:
shape = [3, 2, 2]
axes = [-2]
keepdims = 1
node = onnx.helper.make_node(
'ReduceMin', inputs=['data'],
outputs=['reduced'],
axes=axes,
keepdims=keepdims)
data = np.array([[[5, 1], [20, 2]], [[30, 1], [40, 2]], [[55, 1], [60, 2]]], dtype=np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_negative_axes_keepdims_example')
np.random.seed(0)
data = np.random.uniform(-10, 10, shape).astype(np.float32)
reduced = np.minimum.reduce(data, axis=tuple(axes), keepdims=keepdims == 1)
expect(node, inputs=[data], outputs=[reduced], name='test_reduce_min_negative_axes_keepdims_random')
| true | true |
f738cdc91311c1bb24cd8c46ec6c3b5cb5d0cfe4 | 2,509 | py | Python | qcinfo/D.py | icharm/cninfo-sdk | dff3f7fe3044c203d7d9a8c758392053c8b26ee5 | [
"MIT"
] | 1 | 2019-04-14T05:37:56.000Z | 2019-04-14T05:37:56.000Z | qcinfo/D.py | icharm/quantc | dff3f7fe3044c203d7d9a8c758392053c8b26ee5 | [
"MIT"
] | null | null | null | qcinfo/D.py | icharm/quantc | dff3f7fe3044c203d7d9a8c758392053c8b26ee5 | [
"MIT"
] | 1 | 2018-10-18T06:43:43.000Z | 2018-10-18T06:43:43.000Z | # -*- coding: UTF-8 -*-
# Stock data fetch from website.
from qcinfo import xueqiu as XQ
from qcinfo import qcrepo, gtimg
from qcinfo.log import qcinfo_log
logger = qcinfo_log()
async def company_info_async(code):
'''
上市公司基本信息
:param code: Stock code
:return:
'''
return await XQ.company_info_async(code)
def quotes(code, type="d"):
'''
所有行情数据列表
:param code: Stock code
:param type: d:daily, w: weekly
:return: dateframe
'''
return qcrepo.quotes(code, type)
def quotes_lately(code, type="d"):
'''
最近的100条行情数据列表
:param code: Stock code
:param type: d:daily, w: weekly
:return: list
'''
if type == "d":
return gtimg.daily_lately(code)
else:
return gtimg.weekly_lately(code)
############################## Calendar ####################################
def is_trading(date):
'''
是否为交易日
:param date: %Y-%d-%m
:return: True : yes, False: no
'''
cal = qcrepo.calendar()
df = cal.loc[cal['date'] == date]
if df.iloc[0]["open"] == 1:
return True
else:
return False
def calendar():
'''
交易日历
:return: pandas.DataFrame(data=content, columns=["date", "open", "weekday"]) 日期 是否交易日 星期
'''
return qcrepo.calendar()
def last_trading_day_week(date):
'''
本周中最后一个交易日
:param date: String %Y-%m-%d
:return: date string or none(no trading day this week)
'''
cdf = calendar()
last = None
for index, row in cdf.iterrows():
if row["date"] == date:
week_start_index = index - row["weekday"]
for index1, row1 in cdf.iloc[week_start_index:week_start_index+7].iterrows():
if row1["open"] == 1:
last = row1["date"]
return last
def islast_trading_day_week(date):
'''
是否为本周最后一个交易日
:param date: String %Y-%m-%d
:return: true or false
'''
if last_trading_day_week(date) == date:
return True
else:
return False
def previous_trading_day(date, n=1):
'''
指定日期的前一个交易日
:param date: String %Y-%m-%d
:param n: Int 前n个交易日
:return: date string or none(no trading day this week)
'''
cdf = calendar()
previous = None
for index, row in cdf.iterrows():
if row['date'] == date:
for i in range(n+0, n+100):
tmp = cdf.iloc[index - i]
if tmp['open'] == 1:
previous = tmp['date']
break
return previous | 24.359223 | 92 | 0.555201 |
from qcinfo import xueqiu as XQ
from qcinfo import qcrepo, gtimg
from qcinfo.log import qcinfo_log
logger = qcinfo_log()
async def company_info_async(code):
return await XQ.company_info_async(code)
def quotes(code, type="d"):
return qcrepo.quotes(code, type)
def quotes_lately(code, type="d"):
if type == "d":
return gtimg.daily_lately(code)
else:
return gtimg.weekly_lately(code)
| true | true |
f738ce8023f6a2decafbfa571a6db38148ea9429 | 4,411 | py | Python | src/procedural_city_generation/polygons/getBlock.py | kritika-srivastava/The-Conurbation-Algorithm | d5d39d701b1e09c975dceca5445c4398fd5fd93b | [
"MIT"
] | 4 | 2020-09-22T14:28:24.000Z | 2022-02-02T00:15:27.000Z | src/procedural_city_generation/polygons/getBlock.py | kritika-srivastava/The-Conurbation-Algorithm | d5d39d701b1e09c975dceca5445c4398fd5fd93b | [
"MIT"
] | null | null | null | src/procedural_city_generation/polygons/getBlock.py | kritika-srivastava/The-Conurbation-Algorithm | d5d39d701b1e09c975dceca5445c4398fd5fd93b | [
"MIT"
] | 1 | 2021-06-25T15:25:13.000Z | 2021-06-25T15:25:13.000Z | from __future__ import division
import numpy as np
from procedural_city_generation.additional_stuff.Singleton import Singleton
from procedural_city_generation.polygons.Polygon2D import Polygon2D
singleton = Singleton("polygons")
def p_in_poly(poly, point):
x, y = point
n = len(poly)
inside = False
p1x, p1y = poly[0][0]
for i in range(n+1):
p2x, p2y = poly[i % n][0]
if y > min(p1y, p2y):
if y <= max(p1y, p2y):
if x <= max(p1x, p2x):
if p1y != p2y:
xinters = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xinters:
inside = not inside
p1x, p1y = p2x, p2y
return inside
def getBlock(wedges, vertex_list):
'''Calculate block to be divided into lots, as well as street polygons'''
old_vertices = [vertex_list[wedge.b] for wedge in wedges]
old_poly = Polygon2D([v.coords for v in old_vertices])
new_vertices = []
polylist = []
last2 = []
for i in range(len(old_vertices)):
# Calculate position of new vertex
alpha = wedges[i-1].alpha
a, b, c = old_vertices[i-2], old_vertices[i-1], old_vertices[i]
v1 = a.coords - b.coords
v2 = c.coords - b.coords
n1 = np.array((-v1[1], v1[0]))/np.linalg.norm(v1)
n2 = np.array((v2[1], -v2[0]))/np.linalg.norm(v2)
# Change lengths of normal vectors depending on whether each
# edge is a minor road or a main road
if b.minor_road or a.minor_road:
n1 *= singleton.minor_factor
else:
n1 *= singleton.main_factor
if b.minor_road or c.minor_road:
n2 *= singleton.minor_factor
else:
n2 *= singleton.main_factor
# Check if current vertex is dead end
if not 0 - 0.001 < alpha < 0 + 0.001:
# Not a dead end: move edges which share this vertex
# inwards along their normal vectors, find intersection
try:
intersection = np.linalg.solve(
np.array(((v1), (v2))).T, (b.coords+n2)-(b.coords+n1))
except np.linalg.LinAlgError:
raise Exception(str(v1)+", "+str(v2),
"angle: "+str(wedges[i-1].alpha))
new = b.coords + n1 + intersection[0]*v1
# Check if new vertex is in old polygon
if p_in_poly(old_poly.edges, new):
# Append new vertex to lot polygon
new_vertices.append(new)
these2 = [b.coords, new]
if last2:
street_vertices = last2 + these2
polylist.append(
Polygon2D(street_vertices, poly_type="road"))
last2 = these2[::-1]
else:
# New vertex not in polygon, return old polygon as street polygon
return [old_poly]
else:
# Dead end: determine two new vertices by adding the two normals
# to current vector, then check if these are in old polygon
new1, new2 = b.coords + n1, b.coords + n2
if p_in_poly(old_poly.edges, new1) and p_in_poly(old_poly.edges, new2):
new_vertices += [new1, new2]
if last2:
street_vertices = last2 + [b.coords, new1]
polylist.append(
Polygon2D(street_vertices, poly_type="road"))
street_vertices = [b.coords, new2, new1]
polylist.append(
Polygon2D(street_vertices, poly_type="road"))
last2 = [new2, b.coords]
else:
old_poly.poly_type = "road"
return [old_poly]
street_vertices = last2 + [old_vertices[-1].coords, new_vertices[0]]
polylist.append(Polygon2D(street_vertices, poly_type="road"))
# All new vertices are in old polygon: append block polygon
block_poly = Polygon2D(new_vertices)
if block_poly.area < singleton.max_area:
block_poly.poly_type = "lot"
polylist.append(block_poly)
return polylist
if __name__ == "__main__":
import matplotlib.pyplot as plt
import construct_polygons as cp
polys, vertices = cp.main()
for p in getBlock(polys[1], vertices):
p.selfplot()
plt.show()
| 36.155738 | 83 | 0.557017 | from __future__ import division
import numpy as np
from procedural_city_generation.additional_stuff.Singleton import Singleton
from procedural_city_generation.polygons.Polygon2D import Polygon2D
singleton = Singleton("polygons")
def p_in_poly(poly, point):
x, y = point
n = len(poly)
inside = False
p1x, p1y = poly[0][0]
for i in range(n+1):
p2x, p2y = poly[i % n][0]
if y > min(p1y, p2y):
if y <= max(p1y, p2y):
if x <= max(p1x, p2x):
if p1y != p2y:
xinters = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xinters:
inside = not inside
p1x, p1y = p2x, p2y
return inside
def getBlock(wedges, vertex_list):
old_vertices = [vertex_list[wedge.b] for wedge in wedges]
old_poly = Polygon2D([v.coords for v in old_vertices])
new_vertices = []
polylist = []
last2 = []
for i in range(len(old_vertices)):
alpha = wedges[i-1].alpha
a, b, c = old_vertices[i-2], old_vertices[i-1], old_vertices[i]
v1 = a.coords - b.coords
v2 = c.coords - b.coords
n1 = np.array((-v1[1], v1[0]))/np.linalg.norm(v1)
n2 = np.array((v2[1], -v2[0]))/np.linalg.norm(v2)
if b.minor_road or a.minor_road:
n1 *= singleton.minor_factor
else:
n1 *= singleton.main_factor
if b.minor_road or c.minor_road:
n2 *= singleton.minor_factor
else:
n2 *= singleton.main_factor
if not 0 - 0.001 < alpha < 0 + 0.001:
try:
intersection = np.linalg.solve(
np.array(((v1), (v2))).T, (b.coords+n2)-(b.coords+n1))
except np.linalg.LinAlgError:
raise Exception(str(v1)+", "+str(v2),
"angle: "+str(wedges[i-1].alpha))
new = b.coords + n1 + intersection[0]*v1
if p_in_poly(old_poly.edges, new):
new_vertices.append(new)
these2 = [b.coords, new]
if last2:
street_vertices = last2 + these2
polylist.append(
Polygon2D(street_vertices, poly_type="road"))
last2 = these2[::-1]
else:
return [old_poly]
else:
new1, new2 = b.coords + n1, b.coords + n2
if p_in_poly(old_poly.edges, new1) and p_in_poly(old_poly.edges, new2):
new_vertices += [new1, new2]
if last2:
street_vertices = last2 + [b.coords, new1]
polylist.append(
Polygon2D(street_vertices, poly_type="road"))
street_vertices = [b.coords, new2, new1]
polylist.append(
Polygon2D(street_vertices, poly_type="road"))
last2 = [new2, b.coords]
else:
old_poly.poly_type = "road"
return [old_poly]
street_vertices = last2 + [old_vertices[-1].coords, new_vertices[0]]
polylist.append(Polygon2D(street_vertices, poly_type="road"))
block_poly = Polygon2D(new_vertices)
if block_poly.area < singleton.max_area:
block_poly.poly_type = "lot"
polylist.append(block_poly)
return polylist
if __name__ == "__main__":
import matplotlib.pyplot as plt
import construct_polygons as cp
polys, vertices = cp.main()
for p in getBlock(polys[1], vertices):
p.selfplot()
plt.show()
| true | true |
f738ceaf874ab5508e04c45a60cbc61c1d376c10 | 3,610 | py | Python | extended_kalman_filter/extended_kalman_filter.py | ryan-dd/autonomous-systems | 39fa1394e6b9577600e52d9b7ecd9184a1c90ce1 | [
"MIT"
] | null | null | null | extended_kalman_filter/extended_kalman_filter.py | ryan-dd/autonomous-systems | 39fa1394e6b9577600e52d9b7ecd9184a1c90ce1 | [
"MIT"
] | null | null | null | extended_kalman_filter/extended_kalman_filter.py | ryan-dd/autonomous-systems | 39fa1394e6b9577600e52d9b7ecd9184a1c90ce1 | [
"MIT"
] | null | null | null | from math import cos, sin, atan2, exp
import numpy as np
from heading_range_robot.parameters import *
class EKF:
def __init__(self, sample_period):
self._change_t = sample_period
self.mean_belief = np.vstack((INITIAL_X, INITIAL_Y, INITIAL_THETA))
self.covariance_belief = np.eye(3)
self.Qt = np.eye(2)*np.vstack((STD_DEV_LOCATION_RANGE**2, STD_DEV_LOCATION_BEARING**2))
self.all_features = np.vstack((LANDMARK_1_LOCATION, LANDMARK_2_LOCATION, LANDMARK_3_LOCATION))
def prediction_step(self, theta_prev, vc, wc):
change_t = self._change_t
theta = theta_prev
# Jacobian of ut at xt-1
Gt = np.array([
[1, 0, -vc/wc*cos(theta) + vc/wc*cos(theta + wc*change_t)],
[0, 1, -vc/wc*sin(theta) + vc/wc*sin(theta + wc*change_t)],
[0, 0, 1]])
# Jacobian to map noise in control space to state space
Vt = np.array([
[(-sin(theta) + sin(theta + wc*change_t))/wc, vc*(sin(theta)-sin(theta + wc*change_t))/(wc**2) + (vc*cos(theta + wc*change_t)*change_t)/wc],
[(-cos(theta) + cos(theta + wc*change_t))/wc, vc*(cos(theta)-cos(theta + wc*change_t))/(wc**2) + (vc*sin(theta + wc*change_t)*change_t)/wc],
[0, change_t]])
Mt = np.array([
[ALPHA1*vc**2 + ALPHA2*wc**2, 0],
[0, ALPHA3*vc**2 + ALPHA4*wc**2]
])
self.mean_belief = self.mean_belief + np.array([
[-vc/wc*sin(theta) + vc/wc*sin(theta + wc*change_t)],
[vc/wc*cos(theta) - vc/wc*cos(theta + wc*change_t)],
[wc*change_t]
])
self.covariance_belief = Gt @ self.covariance_belief @ Gt.T + Vt @ Mt @ Vt.T
def measurement_step(self, true_state):
Qt = self.Qt
for feature in self.all_features:
f_x = feature[0]
f_y = feature[1]
mean_x = self.mean_belief[0]
mean_y = self.mean_belief[1]
mean_theta = self.mean_belief[2]
# Range and bearing from mean belief
q = (f_x - mean_x)**2 + (f_y - mean_y)**2
zti = np.array([
[np.sqrt(q)],
[np.arctan2((f_y - mean_y), (f_x - mean_x)) - mean_theta]]).reshape((2,1))
measurement = simulate_measurement(true_state, f_x, f_y)
Ht = np.array([
[-(f_x - mean_x)/np.sqrt(q), -(f_y - mean_y)/np.sqrt(q), np.array([0])],
[(f_y - mean_y)/q, -(f_x - mean_x)/q, np.array([-1])]]).reshape((2,3))
covariance_belief = self.covariance_belief
mean_belief = self.mean_belief
St = Ht @ covariance_belief @ Ht.T + Qt
Kt = covariance_belief @ Ht.T @ np.linalg.inv(St)
self.mean_belief = mean_belief + Kt @ (measurement - zti)
self.covariance_belief = (np.eye(len(Kt)) - Kt @ Ht) @ covariance_belief
self.kt = Kt
#pzt = np.linalg.det(2*pi*St)**(-1/2) @ exp(-1/2*(zti - measurement[index]).T @ np.linalg.inv(St) @ (zti - measurement[index]))
def simulate_measurement(true_state, f_x, f_y):
true_x = true_state[0]
true_y = true_state[1]
true_theta = true_state[2]
q = (f_x - true_x)**2 + (f_y - true_y)**2
zt = np.array([
[np.sqrt(q)],
[np.arctan2((f_y - true_y), (f_x - true_x)) - true_theta]]).reshape((2,1))
return zt + np.vstack((range_measurement_noise(), bearing_measurement_noise()))
def range_measurement_noise():
return np.random.normal(0, STD_DEV_LOCATION_RANGE)
def bearing_measurement_noise():
return np.random.normal(0, STD_DEV_LOCATION_BEARING) | 42.97619 | 148 | 0.581163 | from math import cos, sin, atan2, exp
import numpy as np
from heading_range_robot.parameters import *
class EKF:
def __init__(self, sample_period):
self._change_t = sample_period
self.mean_belief = np.vstack((INITIAL_X, INITIAL_Y, INITIAL_THETA))
self.covariance_belief = np.eye(3)
self.Qt = np.eye(2)*np.vstack((STD_DEV_LOCATION_RANGE**2, STD_DEV_LOCATION_BEARING**2))
self.all_features = np.vstack((LANDMARK_1_LOCATION, LANDMARK_2_LOCATION, LANDMARK_3_LOCATION))
def prediction_step(self, theta_prev, vc, wc):
change_t = self._change_t
theta = theta_prev
Gt = np.array([
[1, 0, -vc/wc*cos(theta) + vc/wc*cos(theta + wc*change_t)],
[0, 1, -vc/wc*sin(theta) + vc/wc*sin(theta + wc*change_t)],
[0, 0, 1]])
Vt = np.array([
[(-sin(theta) + sin(theta + wc*change_t))/wc, vc*(sin(theta)-sin(theta + wc*change_t))/(wc**2) + (vc*cos(theta + wc*change_t)*change_t)/wc],
[(-cos(theta) + cos(theta + wc*change_t))/wc, vc*(cos(theta)-cos(theta + wc*change_t))/(wc**2) + (vc*sin(theta + wc*change_t)*change_t)/wc],
[0, change_t]])
Mt = np.array([
[ALPHA1*vc**2 + ALPHA2*wc**2, 0],
[0, ALPHA3*vc**2 + ALPHA4*wc**2]
])
self.mean_belief = self.mean_belief + np.array([
[-vc/wc*sin(theta) + vc/wc*sin(theta + wc*change_t)],
[vc/wc*cos(theta) - vc/wc*cos(theta + wc*change_t)],
[wc*change_t]
])
self.covariance_belief = Gt @ self.covariance_belief @ Gt.T + Vt @ Mt @ Vt.T
def measurement_step(self, true_state):
Qt = self.Qt
for feature in self.all_features:
f_x = feature[0]
f_y = feature[1]
mean_x = self.mean_belief[0]
mean_y = self.mean_belief[1]
mean_theta = self.mean_belief[2]
q = (f_x - mean_x)**2 + (f_y - mean_y)**2
zti = np.array([
[np.sqrt(q)],
[np.arctan2((f_y - mean_y), (f_x - mean_x)) - mean_theta]]).reshape((2,1))
measurement = simulate_measurement(true_state, f_x, f_y)
Ht = np.array([
[-(f_x - mean_x)/np.sqrt(q), -(f_y - mean_y)/np.sqrt(q), np.array([0])],
[(f_y - mean_y)/q, -(f_x - mean_x)/q, np.array([-1])]]).reshape((2,3))
covariance_belief = self.covariance_belief
mean_belief = self.mean_belief
St = Ht @ covariance_belief @ Ht.T + Qt
Kt = covariance_belief @ Ht.T @ np.linalg.inv(St)
self.mean_belief = mean_belief + Kt @ (measurement - zti)
self.covariance_belief = (np.eye(len(Kt)) - Kt @ Ht) @ covariance_belief
self.kt = Kt
def simulate_measurement(true_state, f_x, f_y):
true_x = true_state[0]
true_y = true_state[1]
true_theta = true_state[2]
q = (f_x - true_x)**2 + (f_y - true_y)**2
zt = np.array([
[np.sqrt(q)],
[np.arctan2((f_y - true_y), (f_x - true_x)) - true_theta]]).reshape((2,1))
return zt + np.vstack((range_measurement_noise(), bearing_measurement_noise()))
def range_measurement_noise():
return np.random.normal(0, STD_DEV_LOCATION_RANGE)
def bearing_measurement_noise():
return np.random.normal(0, STD_DEV_LOCATION_BEARING) | true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.