hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a0aa0758bfa595cd25f970e948e10a627041392
| 935
|
py
|
Python
|
tests/api_testing/test_default.py
|
ooclab/ga.service
|
894b4703628b2ce93790db31939009783e8e7b09
|
[
"MIT"
] | null | null | null |
tests/api_testing/test_default.py
|
ooclab/ga.service
|
894b4703628b2ce93790db31939009783e8e7b09
|
[
"MIT"
] | null | null | null |
tests/api_testing/test_default.py
|
ooclab/ga.service
|
894b4703628b2ce93790db31939009783e8e7b09
|
[
"MIT"
] | null | null | null |
# pylint: disable=R0201
import os
from yaml import safe_load
from swagger_spec_validator.util import get_validator
from .base import BaseTestCase
class HealthTestCase(BaseTestCase):
"""GET /_health - 健康检查
"""
def test_health(self):
"""返回正确
"""
resp = self.fetch("/_health")
self.assertEqual(resp.code, 200)
self.assertEqual(resp.body, b"ok")
class SpecTestCase(BaseTestCase):
"""GET / - SwaggerUI 文档
"""
def test_spec(self):
"""返回正确
"""
resp = self.fetch("/")
self.assertEqual(resp.code, 200)
def test_validate_swaggerui(self):
"""验证 SwaggerUI 文档是否有效
"""
curdir = os.path.dirname(__file__)
spec_path = os.path.join(curdir, "../../src/codebase/schema.yml")
spec_json = safe_load(open(spec_path))
validator = get_validator(spec_json)
validator.validate_spec(spec_json)
| 21.744186
| 73
| 0.617112
|
4a0aa1e7c3d7da93395bb8245c72fb4cb1cae055
| 5,062
|
py
|
Python
|
tests/test_rollingrank.py
|
contribu/rollingrank
|
33cdeadf5eda724f5d50438ae7b314b3670d3503
|
[
"MIT"
] | 9
|
2020-04-03T17:22:59.000Z
|
2021-11-19T01:09:54.000Z
|
tests/test_rollingrank.py
|
contribu/rollingrank
|
33cdeadf5eda724f5d50438ae7b314b3670d3503
|
[
"MIT"
] | 3
|
2020-12-17T13:18:06.000Z
|
2022-03-02T11:12:47.000Z
|
tests/test_rollingrank.py
|
contribu/rollingrank
|
33cdeadf5eda724f5d50438ae7b314b3670d3503
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
import numpy as np
import pandas as pd
import rollingrank
import random
class TestRollingrank(TestCase):
def test_normal_case(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=3)
np.testing.assert_array_equal(y, [np.nan, np.nan, 3, 2, 1, 2, 3])
def test_float16(self):
x = np.array([-1, 0, 1, 3, 2]).astype(np.float16)
y = rollingrank.rollingrank(x, window=3)
np.testing.assert_array_equal(y, [np.nan, np.nan, 3, 3, 2])
def test_method_default(self):
x = np.array([0.1, 0.1])
y = rollingrank.rollingrank(x, window=2)
np.testing.assert_array_equal(y, [np.nan, 1.5])
def test_method_average(self):
x = np.array([0.1, 0.1])
y = rollingrank.rollingrank(x, window=2, method='average')
np.testing.assert_array_equal(y, [np.nan, 1.5])
def test_method_min(self):
x = np.array([0.1, 0.1])
y = rollingrank.rollingrank(x, window=2, method='min')
np.testing.assert_array_equal(y, [np.nan, 1])
def test_method_max(self):
x = np.array([0.1, 0.1])
y = rollingrank.rollingrank(x, window=2, method='max')
np.testing.assert_array_equal(y, [np.nan, 2])
def test_method_first(self):
x = np.array([0.1, 0.1])
y = rollingrank.rollingrank(x, window=2, method='first')
np.testing.assert_array_equal(y, [np.nan, 2])
def test_window1(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=1)
np.testing.assert_array_equal(y, [1, 1, 1, 1, 1, 1, 1])
def test_rollingrank_same_window(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=7)
np.testing.assert_array_equal(y, [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, 6.5])
def test_rollingrank_large_window(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=8)
np.testing.assert_array_equal(y, [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan])
def test_rollingrank_pct(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=3, pct=True)
np.testing.assert_array_equal(y, [np.nan, np.nan, 1, 2.0 / 3, 1.0 / 3, 2.0 / 3, 1])
def test_rollingrank_pct_pandas(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=3, pct=True, pct_mode='pandas')
np.testing.assert_array_equal(y, [np.nan, np.nan, 1, 2.0 / 3, 1.0 / 3, 2.0 / 3, 1])
def test_rollingrank_pct_closed(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=3, pct=True, pct_mode='closed')
np.testing.assert_array_equal(y, [np.nan, np.nan, 1, 0.5, 0, 0.5, 1])
def test_rollingrank_pct_closed_window1(self):
x = np.array([0.1, 0.2, 0.3, 0.25, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=1, pct=True, pct_mode='closed')
np.testing.assert_array_equal(y, [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
def test_nan(self):
x = np.array([1, np.nan, 2, np.nan, 3])
y = rollingrank.rollingrank(x, window=3)
np.testing.assert_array_equal(y, [np.nan, np.nan, 2, np.nan, 2])
def test_nan_window1(self):
x = np.array([1, np.nan, 2])
y = rollingrank.rollingrank(x, window=1)
np.testing.assert_array_equal(y, [1, np.nan, 1])
def test_nan_pct(self):
x = np.array([1, np.nan, 2, np.nan, 3])
y = rollingrank.rollingrank(x, window=3, pct=True)
np.testing.assert_array_equal(y, [np.nan, np.nan, 1, np.nan, 1])
def test_complex_case(self):
x = np.array([0.1, 0.2, 0.3, 0.2, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(x, window=3)
np.testing.assert_array_equal(y, [np.nan, np.nan, 3, 1.5, 1, 2.5, 3])
def test_list_input(self):
x = [0.1, 0.2, 0.3, 0.2, 0.1, 0.2, 0.3]
y = rollingrank.rollingrank(x, window=3)
np.testing.assert_array_equal(y, [np.nan, np.nan, 3, 1.5, 1, 2.5, 3])
def test_pandas_series_input(self):
x = np.array([0.1, 0.2, 0.3, 0.2, 0.1, 0.2, 0.3])
y = rollingrank.rollingrank(pd.Series(x), window=3)
np.testing.assert_array_equal(y, [np.nan, np.nan, 3, 1.5, 1, 2.5, 3])
def test_parallel(self):
x = np.random.rand(2 ** 20)
y = rollingrank.rollingrank(x, window=3, n_jobs=1)
y_parallel = rollingrank.rollingrank(x, window=3)
np.testing.assert_array_equal(y_parallel, y)
def test_random_test(self):
for i in range(100):
n = random.randint(1, 2 ** 20)
w = random.randint(1, 2 ** 10)
x = np.random.rand(n)
y = rollingrank.rollingrank(x, window=w, n_jobs=1)
y_parallel = rollingrank.rollingrank(x, window=w)
np.testing.assert_array_equal(y_parallel, y)
| 41.491803
| 98
| 0.589885
|
4a0aa27fe4db45a9235e1b363dc0f3b4fd7a4b2a
| 7,987
|
py
|
Python
|
qa/rpc-tests/txn_clone.py
|
wmchain/wmc1
|
b993266ddb5c027e5e301a39669556b02581f854
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/txn_clone.py
|
wmchain/wmc1
|
b993266ddb5c027e5e301a39669556b02581f854
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/txn_clone.py
|
wmchain/wmc1
|
b993266ddb5c027e5e301a39669556b02581f854
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test proper accounting with an equivalent malleability clone
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 12,500 WMC:
starting_balance = 12500
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 12190)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 290)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 12190 - 290 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 400, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 200, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs)
# 3 hex manipulations on the clone are required
# manipulation 1. sequence is at version+#inputs+input+sigstub
posseq = 2*(4+1+36+1)
seqbe = '%08x' % rawtx1["vin"][0]["sequence"]
clone_raw = clone_raw[:posseq] + seqbe[6:8] + seqbe[4:6] + seqbe[2:4] + seqbe[0:2] + clone_raw[posseq + 8:]
# manipulation 2. createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 400 WMC serialized is 00902f5009000000
pos0 = 2*(4+1+36+1+4+1)
hex400 = "00902f5009000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 400 and clone_raw[pos0 : pos0 + 16] != hex400 or
rawtx1["vout"][0]["value"] != 400 and clone_raw[pos0 : pos0 + 16] == hex400):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# manipulation 3. locktime is after outputs
poslt = pos0 + 2 * output_len
ltbe = '%08x' % rawtx1["locktime"]
clone_raw = clone_raw[:poslt] + ltbe[6:8] + ltbe[4:6] + ltbe[2:4] + ltbe[0:2] + clone_raw[poslt + 8:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 500DASH for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 500
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 12190 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 290 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 1000 WMC for 2 matured,
# less possible orphaned matured subsidy
expected += 1000
if (self.options.mine_block):
expected -= 500
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 12190 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 290 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 12190
+ fund_foo_tx["fee"]
- 290
+ fund_bar_tx["fee"]
+ 1000)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
| 48.114458
| 115
| 0.60924
|
4a0aa465c6bdbbc5440de7a5515951e1d7118fcf
| 21,268
|
py
|
Python
|
espnet/nets/pytorch_backend/e2e_sid_transformer.py
|
creatorscan/espnet-asrtts
|
e516601bd550aeb5d75ee819749c743fc4777eee
|
[
"Apache-2.0"
] | 5
|
2021-04-17T13:12:20.000Z
|
2022-02-22T09:36:45.000Z
|
espnet/nets/pytorch_backend/e2e_sid_transformer.py
|
creatorscan/espnet-asrtts
|
e516601bd550aeb5d75ee819749c743fc4777eee
|
[
"Apache-2.0"
] | null | null | null |
espnet/nets/pytorch_backend/e2e_sid_transformer.py
|
creatorscan/espnet-asrtts
|
e516601bd550aeb5d75ee819749c743fc4777eee
|
[
"Apache-2.0"
] | 5
|
2020-02-24T08:13:54.000Z
|
2022-02-22T09:03:09.000Z
|
# Copyright 2019 Shigeki Karita
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
from argparse import Namespace
from distutils.util import strtobool
import logging
import math
import torch
from espnet.nets.asr_interface import ASRInterface
from espnet.nets.pytorch_backend.ctc import CTC
from espnet.nets.pytorch_backend.e2e_asr import CTC_LOSS_THRESHOLD
from espnet.nets.pytorch_backend.e2e_asr import Reporter
from espnet.nets.pytorch_backend.nets_utils import make_pad_mask
from espnet.nets.pytorch_backend.nets_utils import th_accuracy
from espnet.nets.pytorch_backend.transformer.attention import MultiHeadedAttention
from espnet.nets.pytorch_backend.transformer.decoder import Decoder
from espnet.nets.pytorch_backend.transformer.sid_decoder import SID_Decoder
from espnet.nets.pytorch_backend.transformer.encoder import Encoder
from espnet.nets.pytorch_backend.transformer.initializer import initialize
from espnet.nets.pytorch_backend.transformer.label_smoothing_loss import LabelSmoothingLoss
from espnet.nets.pytorch_backend.transformer.mask import subsequent_mask
from espnet.nets.pytorch_backend.transformer.plot import PlotAttentionReport
from espnet.nets.scorers.ctc import CTCPrefixScorer
class E2E(ASRInterface, torch.nn.Module):
@staticmethod
def add_arguments(parser):
group = parser.add_argument_group("transformer model setting")
group.add_argument("--transformer-init", type=str, default="pytorch",
choices=["pytorch", "xavier_uniform", "xavier_normal",
"kaiming_uniform", "kaiming_normal"],
help='how to initialize transformer parameters')
group.add_argument("--transformer-input-layer", type=str, default="conv2d",
choices=["conv2d", "linear", "embed"],
help='transformer input layer type')
group.add_argument('--transformer-attn-dropout-rate', default=None, type=float,
help='dropout in transformer attention. use --dropout-rate if None is set')
group.add_argument('--transformer-lr', default=10.0, type=float,
help='Initial value of learning rate')
group.add_argument('--transformer-warmup-steps', default=25000, type=int,
help='optimizer warmup steps')
group.add_argument('--transformer-length-normalized-loss', default=True, type=strtobool,
help='normalize loss by length')
group.add_argument('--dropout-rate', default=0.0, type=float,
help='Dropout rate for the encoder')
# Encoder
group.add_argument('--elayers', default=4, type=int,
help='Number of encoder layers (for shared recognition part in multi-speaker asr mode)')
group.add_argument('--eunits', '-u', default=300, type=int,
help='Number of encoder hidden units')
# Attention
group.add_argument('--adim', default=320, type=int,
help='Number of attention transformation dimensions')
group.add_argument('--aheads', default=4, type=int,
help='Number of heads for multi head attention')
# Decoder
group.add_argument('--dlayers', default=1, type=int,
help='Number of decoder layers')
group.add_argument('--dunits', default=320, type=int,
help='Number of decoder hidden units')
return parser
@property
def attention_plot_class(self):
return PlotAttentionReport
def __init__(self, idim, odim, args, ignore_id=-1):
torch.nn.Module.__init__(self)
if args.transformer_attn_dropout_rate is None:
args.transformer_attn_dropout_rate = args.dropout_rate
self.encoder = Encoder(
idim=idim,
attention_dim=args.adim,
attention_heads=args.aheads,
linear_units=args.eunits,
num_blocks=args.elayers,
input_layer=args.transformer_input_layer,
dropout_rate=args.dropout_rate,
positional_dropout_rate=args.dropout_rate,
attention_dropout_rate=args.transformer_attn_dropout_rate
)
self.decoder = SID_Decoder(
odim=odim,
attention_dim=args.adim,
attention_heads=args.aheads,
linear_units=args.dunits,
num_blocks=args.dlayers,
dropout_rate=args.dropout_rate,
positional_dropout_rate=args.dropout_rate,
self_attention_dropout_rate=args.transformer_attn_dropout_rate,
src_attention_dropout_rate=args.transformer_attn_dropout_rate
)
self.sos = odim - 1
self.eos = odim - 1
self.odim = odim
self.ignore_id = self.eos
self.subsample = [1]
self.reporter = Reporter()
# self.lsm_weight = a
self.criterion = LabelSmoothingLoss(self.odim, self.ignore_id, args.lsm_weight,
args.transformer_length_normalized_loss)
# self.verbose = args.verbose
self.reset_parameters(args)
self.adim = args.adim
self.mtlalpha = args.mtlalpha
self.char_list = args.char_list
if args.mtlalpha > 0.0:
self.ctc = CTC(odim, args.adim, args.dropout_rate, ctc_type=args.ctc_type, reduce=True)
else:
self.ctc = None
if args.report_cer or args.report_wer:
from espnet.nets.e2e_asr_common import ErrorCalculator
self.error_calculator = ErrorCalculator(args.char_list,
args.sym_space, args.sym_blank,
args.report_cer, args.report_wer)
else:
self.error_calculator = None
self.rnnlm = None
def reset_parameters(self, args):
# initialize parameters
initialize(self, args.transformer_init)
def add_sos_eos(self, ys_pad):
from espnet.nets.pytorch_backend.nets_utils import pad_list
eos = ys_pad.new([self.eos])
sos = ys_pad.new([self.sos])
ys = [y[y != self.ignore_id] for y in ys_pad] # parse padded ys
ys_in = [torch.cat([sos, y], dim=0) for y in ys]
#ys_out = [torch.cat([y, eos], dim=0) for y in ys]
ys_out = ys
return pad_list(ys_in, self.eos), pad_list(ys_out, self.ignore_id)
def target_mask(self, ys_in_pad):
ys_mask = ys_in_pad != self.ignore_id
m = subsequent_mask(ys_mask.size(-1), device=ys_mask.device).unsqueeze(0)
return ys_mask.unsqueeze(-2) & m
def forward(self, xs_pad, ilens, ys_pad):
'''E2E forward
:param torch.Tensor xs_pad: batch of padded source sequences (B, Tmax, idim)
:param torch.Tensor ilens: batch of lengths of source sequences (B)
:param torch.Tensor ys_pad: batch of padded target sequences (B, Lmax)
:return: ctc loass value
:rtype: torch.Tensor
:return: attention loss value
:rtype: torch.Tensor
:return: accuracy in attention decoder
:rtype: float
'''
# 1. forward encoder
xs_pad = xs_pad[:, :max(ilens)] # for data parallel
src_mask = (~make_pad_mask(ilens.tolist())).to(xs_pad.device).unsqueeze(-2)
hs_pad, hs_mask = self.encoder(xs_pad, src_mask)
self.hs_pad = hs_pad
# 2. forward decoder
ys_in_pad, ys_out_pad = self.add_sos_eos(ys_pad)
ys_mask = self.target_mask(ys_in_pad)
pred_pad, _ = self.decoder(ys_in_pad, ys_mask, hs_pad, hs_mask)
self.pred_pad = pred_pad.unsqueeze(1) # fetching the last segment
# 3. compute attenttion loss
loss_att = torch.nn.functional.cross_entropy(self.pred_pad, ys_out_pad.view(-1),
reduction='mean')
#loss_att = self.criterion(self.pred_pad, ys_out_pad)
self.acc = th_accuracy(pred_pad.view(-1, self.odim), ys_out_pad,
ignore_label=self.ignore_id)
# 3.1 compute predicted and groundtruth for 1 sample
for i in range(0, 1):
y_pred_a = self.char_list[int(torch.topk(pred_pad[i], 1)[1])]
y_true_a = self.char_list[int(ys_out_pad[i, 0])]
logging.info("ground truth: %s", str(y_true_a))
logging.info("prediction: %s", str(y_pred_a))
# TODO(karita) show predicted text
# TODO(karita) calculate these stats
cer_ctc = None
if self.mtlalpha == 0.0:
loss_ctc = None
else:
batch_size = xs_pad.size(0)
hs_len = hs_mask.view(batch_size, -1).sum(1)
loss_ctc = self.ctc(hs_pad.view(batch_size, -1, self.adim), hs_len, ys_pad)
if self.error_calculator is not None:
ys_hat = self.ctc.argmax(hs_pad.view(batch_size, -1, self.adim)).data
cer_ctc = self.error_calculator(ys_hat.cpu(), ys_pad.cpu(), is_ctc=True)
# 5. compute cer/wer
if self.training or self.error_calculator is None:
cer, wer = None, None
else:
ys_hat = pred_pad.argmax(dim=-1)
cer, wer = self.error_calculator(ys_hat.cpu(), ys_pad.cpu())
# copyied from e2e_asr
alpha = self.mtlalpha
if alpha == 0:
self.loss = loss_att
loss_att_data = float(loss_att)
loss_ctc_data = None
elif alpha == 1:
self.loss = loss_ctc
loss_att_data = None
loss_ctc_data = float(loss_ctc)
else:
self.loss = alpha * loss_ctc + (1 - alpha) * loss_att
loss_att_data = float(loss_att)
loss_ctc_data = float(loss_ctc)
loss_data = float(self.loss)
if loss_data < CTC_LOSS_THRESHOLD and not math.isnan(loss_data):
self.reporter.report(loss_ctc_data, loss_att_data, self.acc, cer_ctc, cer, wer, loss_data)
else:
logging.warning('loss (=%f) is not correct', loss_data)
return self.loss
def scorers(self):
return dict(decoder=self.decoder, ctc=CTCPrefixScorer(self.ctc, self.eos))
def encode(self, feat):
self.eval()
feat = torch.as_tensor(feat).unsqueeze(0)
enc_output, _ = self.encoder(feat, None)
return enc_output.squeeze(0)
def inference(self, feat, ys, recog_args, char_list=None):
'''recognize feat
:param ndnarray x: input acouctic feature (B, T, D) or (T, D)
:param namespace recog_args: argment namespace contraining options
:param list char_list: list of characters
:param torch.nn.Module rnnlm: language model module
:return: N-best decoding results
:rtype: list
TODO(karita): do not recompute previous attention for faster decoding
'''
src_mask = (~make_pad_mask([len(feat)])).unsqueeze(-2)
enc_output, enc_mask = self.encoder(torch.as_tensor(feat).unsqueeze(0), src_mask)
logging.info('input lengths: ' + str(enc_output.size(0)))
# search parms
#beam = recog_args.beam_size
#penalty = recog_args.penalty
#ctc_weight = recog_args.ctc_weight
## preprare sos
#y = self.sos
#vy = h.new_zeros(1).long()
#if recog_args.maxlenratio == 0:
# maxlen = h.shape[0]
#else:
# # maxlen >= 1
# maxlen = max(1, int(recog_args.maxlenratio * h.size(0)))
#minlen = int(recog_args.minlenratio * h.size(0))
#logging.info('max output length: ' + str(maxlen))
#logging.info('min output length: ' + str(minlen))
# initialize hypothesis
local_att_scores, embeddings = self.decoder.inference(enc_output, enc_mask)
#from espnet.nets.pytorch_backend.nets_utils import pad_list
#ys_pad = pad_list(ys, self.ignore_id)
#self.acc = th_accuracy(local_att_scores, ys_pad,
# ignore_label=self.ignore_id)
return local_att_scores, embeddings
def recognize(self, feat, recog_args, char_list=None, rnnlm=None, use_jit=False):
'''recognize feat
:param ndnarray x: input acouctic feature (B, T, D) or (T, D)
:param namespace recog_args: argment namespace contraining options
:param list char_list: list of characters
:param torch.nn.Module rnnlm: language model module
:return: N-best decoding results
:rtype: list
TODO(karita): do not recompute previous attention for faster decoding
'''
enc_output = self.encode(feat).unsqueeze(0)
if recog_args.ctc_weight > 0.0:
lpz = self.ctc.log_softmax(enc_output)
lpz = lpz.squeeze(0)
else:
lpz = None
h = enc_output.squeeze(0)
logging.info('input lengths: ' + str(h.size(0)))
# search parms
beam = recog_args.beam_size
penalty = recog_args.penalty
ctc_weight = recog_args.ctc_weight
# preprare sos
y = self.sos
vy = h.new_zeros(1).long()
if recog_args.maxlenratio == 0:
maxlen = h.shape[0]
else:
# maxlen >= 1
maxlen = max(1, int(recog_args.maxlenratio * h.size(0)))
minlen = int(recog_args.minlenratio * h.size(0))
logging.info('max output length: ' + str(maxlen))
logging.info('min output length: ' + str(minlen))
# initialize hypothesis
if rnnlm:
hyp = {'score': 0.0, 'yseq': [y], 'rnnlm_prev': None}
else:
hyp = {'score': 0.0, 'yseq': [y]}
if lpz is not None:
import numpy
from espnet.nets.ctc_prefix_score import CTCPrefixScore
ctc_prefix_score = CTCPrefixScore(lpz.detach().numpy(), 0, self.eos, numpy)
hyp['ctc_state_prev'] = ctc_prefix_score.initial_state()
hyp['ctc_score_prev'] = 0.0
if ctc_weight != 1.0:
# pre-pruning based on attention scores
from espnet.nets.pytorch_backend.rnn.decoders import CTC_SCORING_RATIO
ctc_beam = min(lpz.shape[-1], int(beam * CTC_SCORING_RATIO))
else:
ctc_beam = lpz.shape[-1]
hyps = [hyp]
ended_hyps = []
import six
traced_decoder = None
for i in six.moves.range(maxlen):
logging.debug('position ' + str(i))
hyps_best_kept = []
for hyp in hyps:
vy.unsqueeze(1)
vy[0] = hyp['yseq'][i]
# get nbest local scores and their ids
ys_mask = subsequent_mask(i + 1).unsqueeze(0)
ys = torch.tensor(hyp['yseq']).unsqueeze(0)
# FIXME: jit does not match non-jit result
if use_jit:
if traced_decoder is None:
traced_decoder = torch.jit.trace(self.decoder.recognize, (ys, ys_mask, enc_output))
local_att_scores = traced_decoder(ys, ys_mask, enc_output)
else:
local_att_scores = self.decoder.recognize(ys, ys_mask, enc_output)
if rnnlm:
rnnlm_state, local_lm_scores = rnnlm.predict(hyp['rnnlm_prev'], vy)
local_scores = local_att_scores + recog_args.lm_weight * local_lm_scores
else:
local_scores = local_att_scores
if lpz is not None:
local_best_scores, local_best_ids = torch.topk(
local_att_scores, ctc_beam, dim=1)
ctc_scores, ctc_states = ctc_prefix_score(
hyp['yseq'], local_best_ids[0], hyp['ctc_state_prev'])
local_scores = \
(1.0 - ctc_weight) * local_att_scores[:, local_best_ids[0]] \
+ ctc_weight * torch.from_numpy(ctc_scores - hyp['ctc_score_prev'])
if rnnlm:
local_scores += recog_args.lm_weight * local_lm_scores[:, local_best_ids[0]]
local_best_scores, joint_best_ids = torch.topk(local_scores, beam, dim=1)
local_best_ids = local_best_ids[:, joint_best_ids[0]]
else:
local_best_scores, local_best_ids = torch.topk(local_scores, beam, dim=1)
for j in six.moves.range(beam):
new_hyp = {}
new_hyp['score'] = hyp['score'] + float(local_best_scores[0, j])
new_hyp['yseq'] = [0] * (1 + len(hyp['yseq']))
new_hyp['yseq'][:len(hyp['yseq'])] = hyp['yseq']
new_hyp['yseq'][len(hyp['yseq'])] = int(local_best_ids[0, j])
if rnnlm:
new_hyp['rnnlm_prev'] = rnnlm_state
if lpz is not None:
new_hyp['ctc_state_prev'] = ctc_states[joint_best_ids[0, j]]
new_hyp['ctc_score_prev'] = ctc_scores[joint_best_ids[0, j]]
# will be (2 x beam) hyps at most
hyps_best_kept.append(new_hyp)
hyps_best_kept = sorted(
hyps_best_kept, key=lambda x: x['score'], reverse=True)[:beam]
# sort and get nbest
hyps = hyps_best_kept
logging.debug('number of pruned hypothes: ' + str(len(hyps)))
if char_list is not None:
logging.debug(
'best hypo: ' + ''.join([char_list[int(x)] for x in hyps[0]['yseq'][1:]]))
# add eos in the final loop to avoid that there are no ended hyps
if i == maxlen - 1:
logging.info('adding <eos> in the last postion in the loop')
for hyp in hyps:
hyp['yseq'].append(self.eos)
# add ended hypothes to a final list, and removed them from current hypothes
# (this will be a probmlem, number of hyps < beam)
remained_hyps = []
for hyp in hyps:
if hyp['yseq'][-1] == self.eos:
# only store the sequence that has more than minlen outputs
# also add penalty
if len(hyp['yseq']) > minlen:
hyp['score'] += (i + 1) * penalty
if rnnlm: # Word LM needs to add final <eos> score
hyp['score'] += recog_args.lm_weight * rnnlm.final(
hyp['rnnlm_prev'])
ended_hyps.append(hyp)
else:
remained_hyps.append(hyp)
# end detection
from espnet.nets.e2e_asr_common import end_detect
if end_detect(ended_hyps, i) and recog_args.maxlenratio == 0.0:
logging.info('end detected at %d', i)
break
hyps = remained_hyps
if len(hyps) > 0:
logging.debug('remeined hypothes: ' + str(len(hyps)))
else:
logging.info('no hypothesis. Finish decoding.')
break
if char_list is not None:
for hyp in hyps:
logging.debug(
'hypo: ' + ''.join([char_list[int(x)] for x in hyp['yseq'][1:]]))
logging.debug('number of ended hypothes: ' + str(len(ended_hyps)))
nbest_hyps = sorted(
ended_hyps, key=lambda x: x['score'], reverse=True)[:min(len(ended_hyps), recog_args.nbest)]
# check number of hypotheis
if len(nbest_hyps) == 0:
logging.warning('there is no N-best results, perform recognition again with smaller minlenratio.')
# should copy becasuse Namespace will be overwritten globally
recog_args = Namespace(**vars(recog_args))
recog_args.minlenratio = max(0.0, recog_args.minlenratio - 0.1)
return self.recognize(feat, recog_args, char_list, rnnlm)
logging.info('total log probability: ' + str(nbest_hyps[0]['score']))
logging.info('normalized log probability: ' + str(nbest_hyps[0]['score'] / len(nbest_hyps[0]['yseq'])))
return nbest_hyps
def calculate_all_attentions(self, xs_pad, ilens, ys_pad):
'''E2E attention calculation
:param torch.Tensor xs_pad: batch of padded input sequences (B, Tmax, idim)
:param torch.Tensor ilens: batch of lengths of input sequences (B)
:param torch.Tensor ys_pad: batch of padded character id sequence tensor (B, Lmax)
:return: attention weights with the following shape,
1) multi-head case => attention weights (B, H, Lmax, Tmax),
2) other case => attention weights (B, Lmax, Tmax).
:rtype: float ndarray
'''
with torch.no_grad():
self.forward(xs_pad, ilens, ys_pad)
ret = dict()
for name, m in self.named_modules():
if isinstance(m, MultiHeadedAttention):
ret[name] = m.attn.cpu().numpy()
return ret
| 44.124481
| 115
| 0.588772
|
4a0aa4825f75e8b41abd17783e211c64f5316d62
| 932
|
py
|
Python
|
0169.Majority Element/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
0169.Majority Element/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
0169.Majority Element/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: zhlinhng@gmail.com
Version: 0.0.1
Created Time: 2016-03-22
Last_modify: 2016-03-22
******************************************
'''
'''
Given an array of size n, find the majority element.
The majority element is the element that appears more than ⌊ n/2 ⌋ times.
You may assume that the array is non-empty and the majority
element always exist in the array.
Credits:
Special thanks to @ts for adding this problem and creating all test cases.
'''
class Solution(object):
def majorityElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
major, counts = 0, 0
for n in nums:
if counts == 0:
major, counts = n, 1
else:
counts += 1 if major == n else -1
return major
| 25.189189
| 74
| 0.530043
|
4a0aa4ccadf6d0b04891719e11482d77d70b5b07
| 918
|
py
|
Python
|
wxPython/CommonControls/CommandLinkButton/CommandLinkButton.py
|
Python-clique/Examples_Python
|
1f86a50b43177a7ee03bef43cb37ceb657a952a4
|
[
"MIT"
] | 1
|
2020-09-08T05:36:32.000Z
|
2020-09-08T05:36:32.000Z
|
wxPython/CommonControls/CommandLinkButton/CommandLinkButton.py
|
Python-clique/Examples_Python
|
1f86a50b43177a7ee03bef43cb37ceb657a952a4
|
[
"MIT"
] | null | null | null |
wxPython/CommonControls/CommandLinkButton/CommandLinkButton.py
|
Python-clique/Examples_Python
|
1f86a50b43177a7ee03bef43cb37ceb657a952a4
|
[
"MIT"
] | 2
|
2021-09-21T09:57:38.000Z
|
2021-09-30T11:16:31.000Z
|
#!/usr/bin/env python3
# -*-coding:utf-8 -*
import wx
import wx.adv
class Frame1(wx.Frame):
def __init__(self):
super().__init__(None, wx.ID_ANY, 'Command link button example', wx.DefaultPosition, wx.Size(300, 300))
self.commandLinkButton1Clicked = 0
self.panel = wx.Panel(self)
self.commandLinkButton1 = wx.adv.CommandLinkButton(self.panel, wx.ID_ANY, 'Link', 'Information text', wx.Point(30, 30), wx.Size(200, 60))
self.commandLinkButton1.Bind(wx.EVT_BUTTON, self.OnCommandLinkButton1Click)
self.staticText1 = wx.StaticText(self.panel, wx.ID_ANY, 'commandLinkButton1 clicked 0 times', wx.Point(30, 100), wx.Size(200, 50))
def OnCommandLinkButton1Click(self, event):
self.commandLinkButton1Clicked += 1
self.staticText1.SetLabel('commandLinkButton1 clicked {0} times'.format(self.commandLinkButton1Clicked))
application = wx.App()
Frame1().Show()
application.MainLoop()
| 39.913043
| 141
| 0.734205
|
4a0aa62af53e58172657e9e787ef6a6b0a67eccc
| 1,646
|
py
|
Python
|
lab/lab4 Regular Expressions.py
|
SilvesterHsu/COM6115
|
94acc02a844138915fce1afaf7169aab0b6b9d4d
|
[
"MIT"
] | null | null | null |
lab/lab4 Regular Expressions.py
|
SilvesterHsu/COM6115
|
94acc02a844138915fce1afaf7169aab0b6b9d4d
|
[
"MIT"
] | null | null | null |
lab/lab4 Regular Expressions.py
|
SilvesterHsu/COM6115
|
94acc02a844138915fce1afaf7169aab0b6b9d4d
|
[
"MIT"
] | 1
|
2021-02-04T00:23:55.000Z
|
2021-02-04T00:23:55.000Z
|
import re
PATH = 'lab/data/RGX_DATA.html'
def load(PATH):
with open(PATH) as handle:
text = handle.read()
return text
text = load(PATH)
# QUESTION: 1
def Q1(text):
rule = re.compile("<(.*?)>",re.I)
for tag in rule.findall(text):
print("TAG: {}".format(tag))
Q1(text)
# QUESTION: 2
def Q2(text):
rule = re.compile("<(.*?)>",re.I)
tag_dict = {'p':"OPENTAG",'/p':"CLOSETAG"}
for tag in rule.findall(text):
print(tag_dict[tag],end='') if tag in tag_dict else print("TAG",end='')
print(': {}'.format(tag))
Q2(text)
# QUESTION: 3
def Q3(text):
rule = re.compile("<(.*?)>",re.I)
tag_dict = {'p':"OPENTAG:",'/p':"CLOSETAG:"}
for tag in rule.findall(text):
if tag in tag_dict:
print(tag_dict[tag],tag)
elif ' ' in tag:
opentag, param = tag.split()[0], tag.split()[1:]
print("OPENTAG:",opentag)
for i in param:
print(' '*3+"PARAM:",i)
else:
print("TAG:",tag)
Q3(text)
# QUESTION: 4
def Q4(text):
rule = re.compile(r"<(.*?)>(.*)</\1>",re.I)
for tag,content in rule.findall(text):
print("PAIR [{}]: {}".format(tag,content))
Q4(text)
def load(PATH):
with open(PATH) as handle:
text = handle.read()
return text
text = load(PATH)
def Q4(text):
rule = re.compile(r"<(.*?)>(.*)</\1>",re.I)
for tag,content in rule.findall(text):
print("PAIR [{}]: {}".format(tag,content))
Q4(text)
rule = re.compile(r"<(.*?)>\n?(.*)\n?<(/\1)>",re.M)
rule.findall(text)
str = "The fat\ncat sat\non the mat."
re.findall('(.at).?$',str,re.M)
| 21.102564
| 79
| 0.532199
|
4a0aa7c53739b0a4a112e280a2a8a2e7e3dee74d
| 1,177
|
py
|
Python
|
backend/api/authentication/migrations/0003_auto_20200601_2036.py
|
jacorea/ismp
|
81cf55559005753f3055165689889b18aec958ac
|
[
"CC0-1.0"
] | 3
|
2020-05-08T03:51:43.000Z
|
2020-06-13T23:12:26.000Z
|
backend/api/authentication/migrations/0003_auto_20200601_2036.py
|
jacorea/ismp
|
81cf55559005753f3055165689889b18aec958ac
|
[
"CC0-1.0"
] | 15
|
2020-05-04T05:49:17.000Z
|
2020-06-01T21:31:03.000Z
|
backend/api/authentication/migrations/0003_auto_20200601_2036.py
|
jacorea/ismp
|
81cf55559005753f3055165689889b18aec958ac
|
[
"CC0-1.0"
] | 11
|
2020-05-01T04:35:24.000Z
|
2020-05-28T17:17:21.000Z
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0011_update_proxy_permissions'),
('authentication', '0002_auto_20200530_2129'),
]
operations = [
migrations.AlterField(
model_name='user',
name='groups',
field=models.ManyToManyField(blank=True,
help_text="""
The groups this user belongs to. A user will get all permissions granted to each of their groups.""",
related_name='user_set',
related_query_name='user',
to='auth.Group',
verbose_name='groups'),
),
migrations.AlterField(
model_name='user',
name='is_superuser',
field=models.BooleanField(default=False,
help_text="""
Designates that this user has all permissions without explicitly assigning them.""",
verbose_name='superuser status'),
),
]
| 36.78125
| 101
| 0.497876
|
4a0aaa43a3a276e862d7ac6a27a0072cae85026d
| 1,561
|
py
|
Python
|
core/migrations/0006_change_fields_as_not_null.py
|
profesormig/quimica3a
|
a453f0d7485ebc4b2d7b06a72b44c6c179a3bbd4
|
[
"BSD-3-Clause"
] | null | null | null |
core/migrations/0006_change_fields_as_not_null.py
|
profesormig/quimica3a
|
a453f0d7485ebc4b2d7b06a72b44c6c179a3bbd4
|
[
"BSD-3-Clause"
] | null | null | null |
core/migrations/0006_change_fields_as_not_null.py
|
profesormig/quimica3a
|
a453f0d7485ebc4b2d7b06a72b44c6c179a3bbd4
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def remove_null_entries(apps, schema_editor):
ProviderMachineMembership = apps.get_model(
"core",
"ProviderMachineMembership")
MachineRequest = apps.get_model("core", "MachineRequest")
memberships = ProviderMachineMembership.objects.filter(
provider_machine=None)
requests = MachineRequest.objects.filter(parent_machine=None)
memberships.delete()
requests.delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0005_delete_null_fields'),
]
operations = [
migrations.RunPython(remove_null_entries),
migrations.AlterField(
model_name='instance', name='source', field=models.ForeignKey(
related_name='instances', to='core.InstanceSource'),
preserve_default=True,),
migrations.AlterField(
model_name='providermachinemembership', name='provider_machine',
field=models.ForeignKey(to='core.ProviderMachine'),
preserve_default=True,),
migrations.AlterField(
model_name='machinerequest', name='parent_machine',
field=models.ForeignKey(
related_name='ancestor_machine', to='core.ProviderMachine'),
preserve_default=True,),
migrations.AlterField(
model_name='volumestatushistory', name='volume',
field=models.ForeignKey(to='core.Volume'),
preserve_default=True,), ]
| 33.934783
| 76
| 0.663037
|
4a0aaa54aa3a1dab74512493ff458eb0b1eea5f6
| 3,979
|
py
|
Python
|
mmdet/apis/train.py
|
limbo0000/InstanceLoc
|
a419bb6d27d05f5224a734a91a9f366ed815c1e1
|
[
"Apache-2.0"
] | 120
|
2021-02-16T12:06:05.000Z
|
2022-03-30T03:38:37.000Z
|
mmdet/apis/train.py
|
limbo0000/InstanceLoc
|
a419bb6d27d05f5224a734a91a9f366ed815c1e1
|
[
"Apache-2.0"
] | 19
|
2021-02-22T12:52:31.000Z
|
2022-03-07T12:04:03.000Z
|
mmdet/apis/train.py
|
limbo0000/InstanceLoc
|
a419bb6d27d05f5224a734a91a9f366ed815c1e1
|
[
"Apache-2.0"
] | 9
|
2021-02-22T02:35:20.000Z
|
2022-02-25T05:38:52.000Z
|
import random
import numpy as np
import torch
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import (DistSamplerSeedHook, EpochBasedRunner, OptimizerHook,
build_optimizer)
from mmdet.core import DistEvalHook, EvalHook
from mmdet.datasets import build_dataloader, build_dataset
from mmdet.utils import get_root_logger
def set_random_seed(seed, deterministic=False):
"""Set random seed.
Args:
seed (int): Seed to be used.
deterministic (bool): Whether to set the deterministic option for
CUDNN backend, i.e., set `torch.backends.cudnn.deterministic`
to True and `torch.backends.cudnn.benchmark` to False.
Default: False.
"""
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
if deterministic:
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
def train_detector(model,
dataset,
cfg,
distributed=False,
validate=False,
timestamp=None,
meta=None):
logger = get_root_logger(cfg.log_level)
# prepare data loaders
dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset]
if 'imgs_per_gpu' in cfg.data:
logger.warning('"imgs_per_gpu" is deprecated in MMDet V2.0. '
'Please use "samples_per_gpu" instead')
if 'samples_per_gpu' in cfg.data:
logger.warning(
f'Got "imgs_per_gpu"={cfg.data.imgs_per_gpu} and '
f'"samples_per_gpu"={cfg.data.samples_per_gpu}, "imgs_per_gpu"'
f'={cfg.data.imgs_per_gpu} is used in this experiments')
else:
logger.warning(
'Automatically set "samples_per_gpu"="imgs_per_gpu"='
f'{cfg.data.imgs_per_gpu} in this experiments')
cfg.data.samples_per_gpu = cfg.data.imgs_per_gpu
data_loaders = [
build_dataloader(
ds,
cfg.data.samples_per_gpu,
cfg.data.workers_per_gpu,
# cfg.gpus will be ignored if distributed
len(cfg.gpu_ids),
dist=distributed,
nonoverlap_sampler=cfg.get('nonoverlap_sampler', False),
seed=cfg.seed) for ds in dataset
]
# put model on gpus
if distributed:
find_unused_parameters = cfg.get('find_unused_parameters', False)
# Sets the `find_unused_parameters` parameter in
# torch.nn.parallel.DistributedDataParallel
model = MMDistributedDataParallel(
model.cuda(),
device_ids=[torch.cuda.current_device()],
broadcast_buffers=False,
find_unused_parameters=find_unused_parameters)
else:
model = MMDataParallel(
model.cuda(cfg.gpu_ids[0]), device_ids=cfg.gpu_ids)
# build runner
optimizer = build_optimizer(model, cfg.optimizer)
runner = EpochBasedRunner(
model,
optimizer=optimizer,
work_dir=cfg.work_dir,
logger=logger,
meta=meta)
# an ugly workaround to make .log and .log.json filenames the same
runner.timestamp = timestamp
if distributed and 'type' not in cfg.optimizer_config:
optimizer_config = OptimizerHook(**cfg.optimizer_config)
else:
optimizer_config = cfg.optimizer_config
# register hooks
runner.register_training_hooks(cfg.lr_config, optimizer_config,
cfg.checkpoint_config, cfg.log_config,
cfg.get('momentum_config', None))
if distributed:
runner.register_hook(DistSamplerSeedHook())
if cfg.resume_from:
runner.resume(cfg.resume_from)
elif cfg.load_from:
runner.load_checkpoint(cfg.load_from)
runner.run(data_loaders, cfg.workflow, cfg.total_epochs)
| 35.526786
| 79
| 0.634079
|
4a0aac62403724e03620d85e424dcf4ae8885617
| 359
|
py
|
Python
|
accounts/models.py
|
michaelachrisco/djangorest
|
562446291d1cdbd82b68fd366bc65d0e1a5a6b7f
|
[
"MIT"
] | 1
|
2020-02-20T23:29:29.000Z
|
2020-02-20T23:29:29.000Z
|
accounts/models.py
|
michaelachrisco/djangorest
|
562446291d1cdbd82b68fd366bc65d0e1a5a6b7f
|
[
"MIT"
] | 8
|
2020-03-04T07:15:33.000Z
|
2021-09-22T18:41:10.000Z
|
accounts/models.py
|
michaelachrisco/djangorest
|
562446291d1cdbd82b68fd366bc65d0e1a5a6b7f
|
[
"MIT"
] | null | null | null |
from django.db import models
class Account(models.Model):
name = models.CharField(max_length=30)
description = models.TextField(null=True, blank=True)
class Transaction(models.Model):
amount = models.DecimalField(decimal_places=2, default=0.00, max_digits=7)
action_datetime = models.DateField()
note = models.CharField(max_length=30)
| 27.615385
| 78
| 0.749304
|
4a0aad8776b099ecf40a975a300b5e63d2c61392
| 4,034
|
py
|
Python
|
sublime_tower.py
|
dersimn/sublime_tower_plugin
|
7cf895426d94ff0fc5dcfa0aa1f89dce8d72367f
|
[
"MIT"
] | null | null | null |
sublime_tower.py
|
dersimn/sublime_tower_plugin
|
7cf895426d94ff0fc5dcfa0aa1f89dce8d72367f
|
[
"MIT"
] | null | null | null |
sublime_tower.py
|
dersimn/sublime_tower_plugin
|
7cf895426d94ff0fc5dcfa0aa1f89dce8d72367f
|
[
"MIT"
] | null | null | null |
"""
Open git repos from Sublime Text in Tower.
If you regularly open a shell to run `$ gittower .`, this is faster.
"""
import os.path
import subprocess
import sublime
import sublime_plugin
def build_cmd_is_in_repo(path):
return 'cd "{}" && git rev-parse --is-inside-work-tree'.format(path)
def build_cmd_get_repo_root(path):
return 'cd "{}" && git rev-parse --show-toplevel'.format(path)
def build_cmd_open_in_tower(path):
return 'gittower "{}"'.format(path)
def is_in_repo(path):
"""
Return true if current file is inside of a repo.
"""
cmd = build_cmd_is_in_repo(path)
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT,
shell=True, universal_newlines=True,
timeout=2)
return output.strip() == 'true'
except subprocess.CalledProcessError as e:
return False
def get_repo_root(path):
"""
Determine the repo root directory from a nested path.
"""
cmd = build_cmd_get_repo_root(path)
output = subprocess.check_output(cmd, shell=True, universal_newlines=True,
timeout=2)
return output.strip()
def open_in_tower(path):
"""
Open a repo in Tower.app [0], launching it if not running.
[0]: https://www.git-tower.com/
"""
cmd = build_cmd_open_in_tower(path)
try:
subprocess.check_output(cmd, shell=True, timeout=5)
except subprocess.CalledProcessError as e:
sublime.error_message(
'Error: Tower CLI is not installed.\n\nEnable it at: Tower > '
'Preferences... > Integration > Tower Command Line Utility'
)
class TowerOpenCommand(sublime_plugin.TextCommand):
"""
Open the repo of the currently viewed file in Tower.
"""
def run(self, edit):
"""
Sublime entrypoint.
"""
current_file_path = self.view.file_name()
if not current_file_path:
msg = 'Error: Cannot open an unsaved file in Tower.'
sublime.error_message(msg)
return
current_dir = os.path.dirname(current_file_path)
if is_in_repo(current_dir):
path = get_repo_root(current_dir)
open_in_tower(path)
def is_visible(self):
current_file_path = self.view.file_name()
if not current_file_path:
return False
current_dir = os.path.dirname(current_file_path)
return is_in_repo(current_dir)
class TowerOpenFromSidebarCommand(sublime_plugin.WindowCommand):
"""
Open the repo of the given paths[] in Tower.
paths[] may contain multiple files/directories if the user selected multiple
elements from the Side Bar, hide the menu entry.
"""
def run(self, paths):
given_path = paths[0]
if os.path.isfile(given_path):
current_dir = os.path.dirname(given_path)
else:
current_dir = given_path
if is_in_repo(current_dir):
path = get_repo_root(current_dir)
open_in_tower(path)
def is_visible(self, paths):
if len(paths) != 1:
return False
given_path = paths[0]
if os.path.isfile(given_path):
current_dir = os.path.dirname(given_path)
else:
current_dir = given_path
return is_in_repo(current_dir)
class TowerCreateNewRepositoryFromSidebarCommand(sublime_plugin.WindowCommand):
"""
If a single directory is given as argument, initialize Git repository
with Tower.
"""
def run(self, dirs):
given_path = dirs[0]
open_in_tower(given_path)
def is_visible(self, dirs):
if len(dirs) != 1:
return False
given_path = dirs[0]
if os.path.isfile(given_path):
current_dir = os.path.dirname(given_path)
else:
current_dir = given_path
return not is_in_repo(current_dir)
| 26.194805
| 80
| 0.615766
|
4a0aae1572af5f9628d1c2b43041eee53a401027
| 1,984
|
py
|
Python
|
embedding/w2v/train.py
|
akkefa/Islam-360
|
2fa49872f92e1abcb9a31a893b4654f7485711ae
|
[
"MIT"
] | null | null | null |
embedding/w2v/train.py
|
akkefa/Islam-360
|
2fa49872f92e1abcb9a31a893b4654f7485711ae
|
[
"MIT"
] | null | null | null |
embedding/w2v/train.py
|
akkefa/Islam-360
|
2fa49872f92e1abcb9a31a893b4654f7485711ae
|
[
"MIT"
] | null | null | null |
# coding: utf8
"""Train"""
import csv
from multiprocessing import cpu_count
import gensim
from gensim.models.word2vec import LineSentence
from sklearn.metrics import accuracy_score
from sklearn.model_selection import ParameterGrid
# logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
# index: 0 - 0.9 - alpha=0.001|hs=0|iter=20|min_count=5|negative=5|sample=0|sg=1|size=25|window=5|workers=7| (Best)
# index: 1 - 0.9 - alpha=0.001|hs=0|iter=20|min_count=5|negative=5|sample=0|sg=1|size=30|window=5|workers=7|
PARAMS = {"alpha": [0.001],
"hs": [0],
"iter": [20],
"min_count": [5],
"negative": [5],
"sample": [0],
"sg": [1],
"size": [25],
"window": [5],
"workers": [cpu_count() - 1]
}
SENTENCES = "/home/ikram/workplace/projects/Islam-360/embedding/w2v/translation_sentences.txt"
for index, param in enumerate(ParameterGrid(PARAMS)):
file_name = ""
for key, value in param.items():
file_name += f"{key}={value}|"
print(f"Training: {file_name}")
file = LineSentence(SENTENCES)
model = gensim.models.Word2Vec(file, **param)
predication = []
with open('../urdu_similar_words.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
for row in csv_reader:
try:
if model.wv.similarity(row[0], row[1]) > 0.7:
predication.append(1)
else:
predication.append(0)
except KeyError:
continue
data = [1] * len(predication)
accuracy = round(accuracy_score(data, predication), 2)
file_writer = open("results.txt", "a+", encoding="utf8")
file_writer.write(f"index: {index} - {accuracy} - {file_name} \n")
file_writer.close()
print(f"index: {index} - {accuracy} - {file_name}")
model_name = f"file-{accuracy}-{index}.w2v"
model.save(model_name)
| 32.52459
| 115
| 0.602823
|
4a0aae2692324cb26a284e3182d7e39beceec202
| 224
|
py
|
Python
|
pytrackmate/tests/test_trackmate.py
|
zindy/pytrackmate
|
a1eda3610b9822d71601c8a69c1b4de5abdc73b4
|
[
"BSD-3-Clause"
] | 5
|
2020-09-14T13:57:23.000Z
|
2021-11-30T22:02:32.000Z
|
pytrackmate/tests/test_trackmate.py
|
zindy/pytrackmate
|
a1eda3610b9822d71601c8a69c1b4de5abdc73b4
|
[
"BSD-3-Clause"
] | 3
|
2020-11-16T21:07:45.000Z
|
2021-07-02T12:26:28.000Z
|
pytrackmate/tests/test_trackmate.py
|
zindy/pytrackmate
|
a1eda3610b9822d71601c8a69c1b4de5abdc73b4
|
[
"BSD-3-Clause"
] | 5
|
2020-09-14T12:25:01.000Z
|
2022-02-14T10:36:19.000Z
|
import os
from pytrackmate import trackmate_peak_import
def test_import():
fname = os.path.join(os.path.dirname(__file__), "FakeTracks.xml")
spots = trackmate_peak_import(fname)
assert spots.shape == (12, 17)
| 22.4
| 69
| 0.732143
|
4a0aaf425d917b692acaf76690c5c3c69b964a08
| 740
|
py
|
Python
|
woid/apps/services/migrations/0018_tweet.py
|
gsdeeksha/makeiteasy
|
e05afc739dabf6fcd76ca1511b3f8a0395879d61
|
[
"Apache-2.0"
] | 2
|
2019-07-09T08:55:12.000Z
|
2019-10-16T10:24:42.000Z
|
woid/apps/services/migrations/0018_tweet.py
|
gsdeeksha/makeiteasy
|
e05afc739dabf6fcd76ca1511b3f8a0395879d61
|
[
"Apache-2.0"
] | 3
|
2020-02-12T00:54:26.000Z
|
2021-06-10T21:39:41.000Z
|
woid/apps/services/migrations/0018_tweet.py
|
gsdeeksha/makeiteasy
|
e05afc739dabf6fcd76ca1511b3f8a0395879d61
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.1.5 on 2019-06-28 06:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0017_auto_20190109_1956'),
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tweet_id', models.CharField(blank=True, max_length=250, null=True)),
('twwet_text', models.TextField()),
('published_date', models.DateTimeField(blank=True, null=True)),
('is_active', models.BooleanField(default=True)),
],
),
]
| 30.833333
| 114
| 0.583784
|
4a0aafc844635a48e675337a8bed299d0e769f32
| 9,910
|
py
|
Python
|
accounts/views.py
|
chandra-prakash-code/editor
|
727b3523ca58a91afe91260850c8a0da6a2b37a0
|
[
"Apache-2.0"
] | 1
|
2020-10-15T08:21:13.000Z
|
2020-10-15T08:21:13.000Z
|
accounts/views.py
|
adad20/editor
|
2446a70dfb8cb0d2ac9bd75c5552418e2f413edd
|
[
"Apache-2.0"
] | null | null | null |
accounts/views.py
|
adad20/editor
|
2446a70dfb8cb0d2ac9bd75c5552418e2f413edd
|
[
"Apache-2.0"
] | null | null | null |
from zipfile import ZipFile
import json
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from accounts.forms import NumbasRegistrationForm, DeactivateUserForm, ReassignContentForm
from accounts.forms import UserProfileForm, ChangePasswordForm
from accounts.models import RegistrationProfile
from accounts.util import find_users, user_json
from django import apps
from django.conf import settings
from django.views.generic import UpdateView, DetailView, ListView, TemplateView
from django.contrib.auth.models import User
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.shortcuts import get_current_site
from django.shortcuts import redirect
from django.urls import reverse
from django.contrib import messages
from django.http import Http404, HttpResponse
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.contrib.sites.models import Site
from editor.models import NewQuestion, NewExam
import editor.models
from editor.views import editoritem
from registration import signals
import registration.views
class RegistrationView(registration.views.RegistrationView):
form_class = NumbasRegistrationForm
def register(self, form, *args, **kwargs):
d = form.cleaned_data
username, email, password = d['username'], d['email'], d['password1']
first_name, last_name = d['first_name'], d['last_name']
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(self.request)
new_user = RegistrationProfile.objects.create_inactive_user(username, first_name, last_name, email,
password, site)
signals.user_registered.send(sender=self.__class__,
user=new_user,
request=self.request,
subscribe=form.cleaned_data.get('subscribe')
)
return new_user
def get_success_url(self, *args, **kwargs):
return reverse('registration_complete')
def registration_allowed(self):
return settings.ALLOW_REGISTRATION
class RegistrationCompleteView(TemplateView):
template_name='registration/registration_complete.html'
def get(self, request, *args, **kwargs):
if not self.request.user.is_anonymous:
return redirect(reverse('editor_index'))
return super().get(request,*args,**kwargs)
class ActivationView(registration.views.ActivationView):
template_name = 'registration/activation_complete.html'
def activate(self, activation_key):
activated_user = RegistrationProfile.objects.activate_user(activation_key, get_current_site(self.request))
if activated_user:
signals.user_activated.send(sender=self.__class__,
user=activated_user,
request=self.request)
return activated_user
def get_success_url(self, user):
return ('registration_activation_complete', (), {})
class CurrentUserUpdateView(UpdateView):
model = User
def get_object(self, queryset=None):
return self.request.user
class UserUpdateView(CurrentUserUpdateView):
template_name = 'registration/update.html'
form_class = UserProfileForm
def get_context_data(self, *args, **kwargs):
context = super(UserUpdateView, self).get_context_data(*args, **kwargs)
context['profile_page'] = 'bio'
context['view_user'] = self.get_object()
context['mailing_list_active'] = apps.registry.apps.is_installed('numbasmailing')
if context['mailing_list_active']:
context['unsubscribe_url'] = settings.MAILCHIMP.get('UNSUBSCRIBE_URL')
return context
def form_valid(self, form):
messages.success(self.request, 'Your profile has been updated.')
return super(UserUpdateView, self).form_valid(form)
def get_success_url(self):
user = self.get_object()
return reverse('view_profile', args=(user.pk,))
class ChangePasswordView(CurrentUserUpdateView):
template_name = 'registration/change_password.html'
form_class = ChangePasswordForm
def get_object(self, queryset=None):
return self.request.user
def form_valid(self, form):
messages.success(self.request, 'Your password has been changed.')
return super(ChangePasswordView, self).form_valid(form)
def get_success_url(self):
return reverse('edit_profile')
class UserProfileView(DetailView):
template_name = 'profile/view.html'
model = User
context_object_name = 'view_user'
profile_page = 'bio'
def get_context_data(self, *args, **kwargs):
context = super(UserProfileView, self).get_context_data(*args, **kwargs)
context['is_me'] = self.request.user == self.object
context['profile_page'] = self.profile_page
return context
class UserProjectsView(UserProfileView):
template_name = 'profile/projects.html'
profile_page = 'projects'
def get_context_data(self, *args, **kwargs):
context = super(UserProjectsView, self).get_context_data(*args, **kwargs)
context['projects'] = [p for p in self.object.userprofile.projects() if p.can_be_viewed_by(self.request.user)]
return context
class UserThemesView(UserProfileView):
template_name = 'profile/themes.html'
profile_page = 'themes'
class UserExtensionsView(UserProfileView):
template_name = 'profile/extensions.html'
profile_page = 'extensions'
class UserCustomPartTypesView(UserProfileView):
template_name = 'profile/custom_part_types.html'
profile_page = 'custom_part_types'
class ZipView(DetailView):
def get(self, request, *args, **kwargs):
files, filename = self.get_zip(request, *args, **kwargs)
f = StringIO()
z = ZipFile(f, 'w')
for fname, fbytes in files:
z.writestr(fname, fbytes)
z.close()
rf = f.getvalue()
response = HttpResponse(rf, content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename=%s' % filename
response['Content-Length'] = len(rf)
response['Cache-Control'] = 'max-age=0,no-cache,no-store'
return response
class AllExamsView(ZipView):
def get_zip(self, request, *args, **kwargs):
user = request.user
exams = NewExam.objects.filter(author=user)
files = [('%s.exam' % e.slug, e.as_source()) for e in exams]
return files, '%s-exams.zip' % slugify(user.get_full_name())
class AllQuestionsView(ZipView):
def get_zip(self, request, *args, **kwargs):
user = request.user
questions = NewQuestion.objects.filter(author=user)
files = [('%s.exam' % q.slug, q.as_source()) for q in questions]
return files, '%s-questions.zip' % slugify(user.get_full_name())
class UserSearchView(ListView):
"""Search users."""
model = User
def render_to_response(self, context, **response_kwargs):
if self.request.is_ajax():
return HttpResponse(json.dumps(context['object_list']),
content_type='application/json',
**response_kwargs)
raise Http404
def get_queryset(self):
try:
search_term = self.request.GET['q']
users = find_users(name=search_term)[:5]
except KeyError:
users = User.objects.filter(is_active=True)
return [user_json(u) for u in users]
class AfterFirstLoginView(TemplateView):
template_name = 'registration/after_first_login.html'
def get_context_data(self, *args, **kwargs):
context = super(AfterFirstLoginView, self).get_context_data(*args, **kwargs)
context['invitations'] = editor.models.ProjectInvitation.objects.filter(email=self.request.user.email)
return context
class UserEditorItemSearchView(editoritem.SearchView):
template_name = 'profile/editoritem_search.html'
def dispatch(self, request, pk, *args, **kwargs):
self.user = User.objects.get(pk=pk)
return super(UserEditorItemSearchView, self).dispatch(request, pk, *args, **kwargs)
def base_queryset(self):
return self.user.own_items.all()
def get_context_data(self, *args, **kwargs):
context = super(UserEditorItemSearchView, self).get_context_data(*args, **kwargs)
context['view_user'] = self.user
return context
class DeactivateUserView(CurrentUserUpdateView):
model = User
template_name = 'profile/deactivate.html'
form_class = DeactivateUserForm
def get_context_data(self, *args, **kwargs):
context = super(DeactivateUserView, self).get_context_data(*args, **kwargs)
context['mailing_list_active'] = apps.registry.apps.is_installed('numbasmailing')
return context
def get_success_url(self):
return reverse('logout')
class ReassignContentView(CurrentUserUpdateView):
model = User
template_name = 'profile/reassign_content.html'
form_class = ReassignContentForm
def form_valid(self,form):
res = super().form_valid(form)
template = get_template('profile/content-reassigned.html')
message= template.render({'to_user': form.cleaned_data['to_user']})
messages.success(self.request, message)
return res
def get_success_url(self):
return reverse('editor_index')
| 37.255639
| 119
| 0.662664
|
4a0aaffe64906551a0a17593b731546c5db01776
| 2,575
|
py
|
Python
|
model_history/compat.py
|
rsalmaso/django-model-history
|
41321acfc3c5db4dd29fe3e452c2603f578c59ca
|
[
"MIT"
] | null | null | null |
model_history/compat.py
|
rsalmaso/django-model-history
|
41321acfc3c5db4dd29fe3e452c2603f578c59ca
|
[
"MIT"
] | null | null | null |
model_history/compat.py
|
rsalmaso/django-model-history
|
41321acfc3c5db4dd29fe3e452c2603f578c59ca
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2007-2017, Raffaele Salmaso <raffaele@salmaso.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
__all__ = (
'CreationDateTimeField', 'ModificationDateTimeField', 'TimestampModel',
'DjangoJSONEncoder',
)
try:
from fluo.db.models import CreationDateTimeField, ModificationDateTimeField, TimestampModel
except ImportError:
class CreationDateTimeField(models.DateTimeField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('editable', False)
kwargs.setdefault('blank', True)
kwargs.setdefault('default', timezone.now)
super().__init__(*args, **kwargs)
def get_internal_type(self):
return "DateTimeField"
class ModificationDateTimeField(CreationDateTimeField):
def pre_save(self, model, add):
value = timezone.now()
setattr(model, self.attname, value)
return value
def get_internal_type(self):
return "DateTimeField"
class TimestampModel(models.Model):
created_at = CreationDateTimeField(
verbose_name=_('created'),
)
last_modified_at = ModificationDateTimeField(
verbose_name=_('modified'),
)
class Meta:
abstract = True
try:
from fluo.utils.json import JSONEncoder as DjangoJSONEncoder
except ImportError:
from django.core.serializers.json import DjangoJSONEncoder
| 37.318841
| 95
| 0.71767
|
4a0ab2ba9439879527110fdd2eebe0686097f08c
| 43,471
|
py
|
Python
|
models_all_solvable2/rsyn0805h.py
|
grossmann-group/pyomo-MINLP-benchmarking
|
714f0a0dffd61675649a805683c0627af6b4929e
|
[
"MIT"
] | 7
|
2019-05-08T19:14:34.000Z
|
2021-12-24T00:00:40.000Z
|
models_all_solvable2/rsyn0805h.py
|
grossmann-group/pyomo-MINLP-benchmarking
|
714f0a0dffd61675649a805683c0627af6b4929e
|
[
"MIT"
] | null | null | null |
models_all_solvable2/rsyn0805h.py
|
grossmann-group/pyomo-MINLP-benchmarking
|
714f0a0dffd61675649a805683c0627af6b4929e
|
[
"MIT"
] | 2
|
2020-05-21T22:15:51.000Z
|
2020-06-02T23:02:08.000Z
|
# MINLP written by GAMS Convert at 05/15/20 00:51:13
#
# Equation counts
# Total E G L N X C B
# 430 207 9 214 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 309 272 37 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 1059 1050 9 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b236 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b237 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b238 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b239 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b240 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b241 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b242 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b243 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b244 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b245 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b246 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b247 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b248 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b249 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b250 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b251 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b252 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b253 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b254 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b255 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b256 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b257 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b258 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b259 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b260 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b261 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b262 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b263 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b264 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b265 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b266 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b267 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,10),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,7),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b305 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b306 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b307 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b308 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b309 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= - 4*m.x2 - 8*m.x7 - 5*m.x11 - 8*m.x23 + 23*m.x27 + 19*m.x29 - 10*m.x30 + 2*m.x33 + 3*m.x34
+ 25*m.x35 + 24*m.x36 - 6*m.b237 - 40*m.b238 - 46*m.b239 - 7*m.b241 - 30*m.b242 - 37*m.b243
- 7*m.b245 - 15*m.b246 - 22*m.b247 - 11*m.b249 - 13*m.b250 - 24*m.b251 - 10*m.b253 - 13*m.b254
- 23*m.b255 - 9*m.b257 - 30*m.b258 - 39*m.b259 - 8*m.b261 - 20*m.b262 - 28*m.b263 - 8*m.b265
- 15*m.b266 - 23*m.b267 + 5*m.x274 - 2*m.x279 + 200*m.x280 + 250*m.x281 + 300*m.x282 - 5*m.b305
- 8*m.b306 - 6*m.b307 - 10*m.b308 - 6*m.b309, sense=maximize)
m.c2 = Constraint(expr= m.x2 - 0.2*m.x37 == 0)
m.c3 = Constraint(expr= m.x3 - 0.2*m.x38 == 0)
m.c4 = Constraint(expr= m.x4 - 0.2*m.x39 == 0)
m.c5 = Constraint(expr= m.x5 - 0.2*m.x40 == 0)
m.c6 = Constraint(expr= m.x6 - 0.2*m.x41 == 0)
m.c7 = Constraint(expr= m.x7 - 0.5*m.x42 == 0)
m.c8 = Constraint(expr= m.x8 - 0.5*m.x43 == 0)
m.c9 = Constraint(expr= m.x9 - 0.7*m.x44 == 0)
m.c10 = Constraint(expr= m.x10 - 0.7*m.x45 == 0)
m.c11 = Constraint(expr= m.x11 - 1.2*m.x46 == 0)
m.c12 = Constraint(expr= m.x12 - 1.2*m.x47 == 0)
m.c13 = Constraint(expr= m.x13 - 0.5*m.x48 == 0)
m.c14 = Constraint(expr= m.x14 - 0.7*m.x49 == 0)
m.c15 = Constraint(expr= m.x15 - 1.2*m.x50 == 0)
m.c16 = Constraint(expr= m.x16 - 1.2*m.x51 == 0)
m.c17 = Constraint(expr= m.x17 - 1.2*m.x52 == 0)
m.c18 = Constraint(expr= m.x18 - 1.2*m.x53 == 0)
m.c19 = Constraint(expr= m.x19 - 0.3*m.x54 == 0)
m.c20 = Constraint(expr= m.x20 - 0.9*m.x55 == 0)
m.c21 = Constraint(expr= m.x21 - 0.3*m.x56 == 0)
m.c22 = Constraint(expr= m.x22 - 0.9*m.x57 == 0)
m.c23 = Constraint(expr= m.x23 - 0.4*m.x58 == 0)
m.c24 = Constraint(expr= m.x24 - 0.4*m.x59 == 0)
m.c25 = Constraint(expr= m.x25 - 0.4*m.x60 == 0)
m.c26 = Constraint(expr= m.x26 - 1.6*m.x61 == 0)
m.c27 = Constraint(expr= m.x27 - 1.6*m.x62 == 0)
m.c28 = Constraint(expr= m.x28 - 1.1*m.x63 == 0)
m.c29 = Constraint(expr= m.x29 - 1.1*m.x64 == 0)
m.c30 = Constraint(expr= m.x30 - 0.7*m.x65 == 0)
m.c31 = Constraint(expr= m.x31 - 0.7*m.x66 == 0)
m.c32 = Constraint(expr= m.x32 - 0.7*m.x67 == 0)
m.c33 = Constraint(expr= m.x33 - 0.2*m.x68 == 0)
m.c34 = Constraint(expr= m.x34 - 0.7*m.x69 == 0)
m.c35 = Constraint(expr= m.x35 - 0.3*m.x70 == 0)
m.c36 = Constraint(expr= m.x36 - 0.9*m.x71 == 0)
m.c37 = Constraint(expr= m.x27 >= 0.4)
m.c38 = Constraint(expr= m.x29 >= 0.3)
m.c39 = Constraint(expr= m.x33 >= 0.2)
m.c40 = Constraint(expr= m.x34 >= 0.5)
m.c41 = Constraint(expr= m.x35 >= 0.2)
m.c42 = Constraint(expr= m.x36 >= 0.3)
m.c43 = Constraint(expr= m.x2 <= 35)
m.c44 = Constraint(expr= m.x7 <= 36)
m.c45 = Constraint(expr= m.x11 <= 25)
m.c46 = Constraint(expr= m.x23 <= 24)
m.c47 = Constraint(expr= m.x30 <= 30)
m.c48 = Constraint(expr= m.x2 - m.x3 - m.x4 == 0)
m.c49 = Constraint(expr= m.x5 - m.x6 == 0)
m.c50 = Constraint(expr= m.x7 - m.x8 + m.x13 == 0)
m.c51 = Constraint(expr= m.x9 - m.x10 + m.x14 == 0)
m.c52 = Constraint(expr= m.x11 - m.x12 - m.x15 == 0)
m.c53 = Constraint(expr= m.x16 - m.x17 - m.x18 == 0)
m.c54 = Constraint(expr= m.x19 - m.x21 == 0)
m.c55 = Constraint(expr= m.x20 - m.x22 == 0)
m.c56 = Constraint(expr= m.x23 - m.x24 - m.x25 == 0)
m.c57 = Constraint(expr= m.x26 - m.x27 == 0)
m.c58 = Constraint(expr= m.x28 - m.x29 == 0)
m.c59 = Constraint(expr= m.x30 - m.x31 == 0)
m.c60 = Constraint(expr= m.x3 - m.x5 - m.x72 == 0)
m.c61 = Constraint(expr= m.x4 + m.x8 - m.x9 - m.x73 == 0)
m.c62 = Constraint(expr= m.x12 - m.x13 - m.x14 - m.x74 == 0)
m.c63 = Constraint(expr= m.x15 - m.x16 - m.x75 == 0)
m.c64 = Constraint(expr= m.x18 - m.x19 - m.x20 - m.x76 == 0)
m.c65 = Constraint(expr= m.x17 + m.x24 - m.x26 - m.x77 == 0)
m.c66 = Constraint(expr= m.x25 - m.x28 + m.x32 - m.x78 == 0)
m.c67 = Constraint(expr= m.x31 - m.x32 - m.x79 == 0)
m.c68 = Constraint(expr= m.x39 - m.x43 <= 0)
m.c69 = Constraint(expr= m.x52 - m.x59 <= 0)
m.c70 = Constraint(expr= m.x60 - m.x67 <= 0)
m.c71 = Constraint(expr= m.x40 - m.x140 - m.x141 - m.x142 - m.x143 == 0)
m.c72 = Constraint(expr= m.x38 - m.x132 - m.x133 - m.x134 - m.x135 == 0)
m.c73 = Constraint(expr= m.x44 - m.x144 - m.x145 - m.x146 - m.x147 == 0)
m.c74 = Constraint(expr= m.x39 - m.x136 - m.x137 - m.x138 - m.x139 == 0)
m.c75 = Constraint(expr= m.x48 - m.x152 - m.x153 - m.x154 - m.x155 == 0)
m.c76 = Constraint(expr= m.x49 - m.x156 - m.x157 - m.x158 - m.x159 == 0)
m.c77 = Constraint(expr= m.x47 - m.x148 - m.x149 - m.x150 - m.x151 == 0)
m.c78 = Constraint(expr= m.x51 - m.x164 - m.x165 - m.x166 - m.x167 == 0)
m.c79 = Constraint(expr= m.x50 - m.x160 - m.x161 - m.x162 - m.x163 == 0)
m.c80 = Constraint(expr= m.x54 - m.x176 - m.x177 - m.x178 - m.x179 == 0)
m.c81 = Constraint(expr= m.x55 - m.x180 - m.x181 - m.x182 - m.x183 == 0)
m.c82 = Constraint(expr= m.x53 - m.x172 - m.x173 - m.x174 - m.x175 == 0)
m.c83 = Constraint(expr= m.x61 - m.x188 - m.x189 - m.x190 - m.x191 == 0)
m.c84 = Constraint(expr= m.x52 - m.x168 - m.x169 - m.x170 - m.x171 == 0)
m.c85 = Constraint(expr= m.x63 - m.x192 - m.x193 - m.x194 - m.x195 == 0)
m.c86 = Constraint(expr= m.x60 - m.x184 - m.x185 - m.x186 - m.x187 == 0)
m.c87 = Constraint(expr= m.x67 - m.x200 - m.x201 - m.x202 - m.x203 == 0)
m.c88 = Constraint(expr= m.x66 - m.x196 - m.x197 - m.x198 - m.x199 == 0)
m.c89 = Constraint(expr= m.x140 - 148.75*m.b236 <= 0)
m.c90 = Constraint(expr= m.x141 - 148.75*m.b237 <= 0)
m.c91 = Constraint(expr= m.x142 - 148.75*m.b238 <= 0)
m.c92 = Constraint(expr= m.x143 - 148.75*m.b239 <= 0)
m.c93 = Constraint(expr= m.x144 - 254.045833333333*m.b240 <= 0)
m.c94 = Constraint(expr= m.x145 - 254.045833333333*m.b241 <= 0)
m.c95 = Constraint(expr= m.x146 - 254.045833333333*m.b242 <= 0)
m.c96 = Constraint(expr= m.x147 - 254.045833333333*m.b243 <= 0)
m.c97 = Constraint(expr= m.x152 - 20.4166666666667*m.b244 <= 0)
m.c98 = Constraint(expr= m.x153 - 20.4166666666667*m.b245 <= 0)
m.c99 = Constraint(expr= m.x154 - 20.4166666666667*m.b246 <= 0)
m.c100 = Constraint(expr= m.x155 - 20.4166666666667*m.b247 <= 0)
m.c101 = Constraint(expr= m.x156 - 20.4166666666667*m.b244 <= 0)
m.c102 = Constraint(expr= m.x157 - 20.4166666666667*m.b245 <= 0)
m.c103 = Constraint(expr= m.x158 - 20.4166666666667*m.b246 <= 0)
m.c104 = Constraint(expr= m.x159 - 20.4166666666667*m.b247 <= 0)
m.c105 = Constraint(expr= m.x164 - 18.75*m.b248 <= 0)
m.c106 = Constraint(expr= m.x165 - 18.75*m.b249 <= 0)
m.c107 = Constraint(expr= m.x166 - 18.75*m.b250 <= 0)
m.c108 = Constraint(expr= m.x167 - 18.75*m.b251 <= 0)
m.c109 = Constraint(expr= m.x176 - 17.8125*m.b252 <= 0)
m.c110 = Constraint(expr= m.x177 - 17.8125*m.b253 <= 0)
m.c111 = Constraint(expr= m.x178 - 17.8125*m.b254 <= 0)
m.c112 = Constraint(expr= m.x179 - 17.8125*m.b255 <= 0)
m.c113 = Constraint(expr= m.x180 - 17.8125*m.b252 <= 0)
m.c114 = Constraint(expr= m.x181 - 17.8125*m.b253 <= 0)
m.c115 = Constraint(expr= m.x182 - 17.8125*m.b254 <= 0)
m.c116 = Constraint(expr= m.x183 - 17.8125*m.b255 <= 0)
m.c117 = Constraint(expr= m.x188 - 66.9375*m.b256 <= 0)
m.c118 = Constraint(expr= m.x189 - 66.9375*m.b257 <= 0)
m.c119 = Constraint(expr= m.x190 - 66.9375*m.b258 <= 0)
m.c120 = Constraint(expr= m.x191 - 66.9375*m.b259 <= 0)
m.c121 = Constraint(expr= m.x192 - 94.4571428571429*m.b260 <= 0)
m.c122 = Constraint(expr= m.x193 - 94.4571428571429*m.b261 <= 0)
m.c123 = Constraint(expr= m.x194 - 94.4571428571429*m.b262 <= 0)
m.c124 = Constraint(expr= m.x195 - 94.4571428571429*m.b263 <= 0)
m.c125 = Constraint(expr= m.x200 - 39.4285714285714*m.b264 <= 0)
m.c126 = Constraint(expr= m.x201 - 39.4285714285714*m.b265 <= 0)
m.c127 = Constraint(expr= m.x202 - 39.4285714285714*m.b266 <= 0)
m.c128 = Constraint(expr= m.x203 - 39.4285714285714*m.b267 <= 0)
m.c129 = Constraint(expr= m.x132 - 175*m.b236 <= 0)
m.c130 = Constraint(expr= m.x133 - 175*m.b237 <= 0)
m.c131 = Constraint(expr= m.x134 - 175*m.b238 <= 0)
m.c132 = Constraint(expr= m.x135 - 175*m.b239 <= 0)
m.c133 = Constraint(expr= m.x136 - 175*m.b240 <= 0)
m.c134 = Constraint(expr= m.x137 - 175*m.b241 <= 0)
m.c135 = Constraint(expr= m.x138 - 175*m.b242 <= 0)
m.c136 = Constraint(expr= m.x139 - 175*m.b243 <= 0)
m.c137 = Constraint(expr= m.x148 - 20.8333333333333*m.b244 <= 0)
m.c138 = Constraint(expr= m.x149 - 20.8333333333333*m.b245 <= 0)
m.c139 = Constraint(expr= m.x150 - 20.8333333333333*m.b246 <= 0)
m.c140 = Constraint(expr= m.x151 - 20.8333333333333*m.b247 <= 0)
m.c141 = Constraint(expr= m.x160 - 20.8333333333333*m.b248 <= 0)
m.c142 = Constraint(expr= m.x161 - 20.8333333333333*m.b249 <= 0)
m.c143 = Constraint(expr= m.x162 - 20.8333333333333*m.b250 <= 0)
m.c144 = Constraint(expr= m.x163 - 20.8333333333333*m.b251 <= 0)
m.c145 = Constraint(expr= m.x172 - 18.75*m.b252 <= 0)
m.c146 = Constraint(expr= m.x173 - 18.75*m.b253 <= 0)
m.c147 = Constraint(expr= m.x174 - 18.75*m.b254 <= 0)
m.c148 = Constraint(expr= m.x175 - 18.75*m.b255 <= 0)
m.c149 = Constraint(expr= m.x168 - 18.75*m.b256 <= 0)
m.c150 = Constraint(expr= m.x169 - 18.75*m.b257 <= 0)
m.c151 = Constraint(expr= m.x170 - 18.75*m.b258 <= 0)
m.c152 = Constraint(expr= m.x171 - 18.75*m.b259 <= 0)
m.c153 = Constraint(expr= m.x184 - 60*m.b260 <= 0)
m.c154 = Constraint(expr= m.x185 - 60*m.b261 <= 0)
m.c155 = Constraint(expr= m.x186 - 60*m.b262 <= 0)
m.c156 = Constraint(expr= m.x187 - 60*m.b263 <= 0)
m.c157 = Constraint(expr= m.x196 - 42.8571428571429*m.b264 <= 0)
m.c158 = Constraint(expr= m.x197 - 42.8571428571429*m.b265 <= 0)
m.c159 = Constraint(expr= m.x198 - 42.8571428571429*m.b266 <= 0)
m.c160 = Constraint(expr= m.x199 - 42.8571428571429*m.b267 <= 0)
m.c161 = Constraint(expr= - 0.8*m.x132 + m.x140 == 0)
m.c162 = Constraint(expr= - 0.85*m.x133 + m.x141 == 0)
m.c163 = Constraint(expr= - 0.8*m.x134 + m.x142 == 0)
m.c164 = Constraint(expr= - 0.85*m.x135 + m.x143 == 0)
m.c165 = Constraint(expr= - 0.9*m.x136 + m.x144 == 0)
m.c166 = Constraint(expr= - 0.95*m.x137 + m.x145 == 0)
m.c167 = Constraint(expr= - 0.9*m.x138 + m.x146 == 0)
m.c168 = Constraint(expr= - 0.95*m.x139 + m.x147 == 0)
m.c169 = Constraint(expr= - 0.85*m.x148 + m.x152 == 0)
m.c170 = Constraint(expr= - 0.98*m.x149 + m.x153 == 0)
m.c171 = Constraint(expr= - 0.85*m.x150 + m.x154 == 0)
m.c172 = Constraint(expr= - 0.98*m.x151 + m.x155 == 0)
m.c173 = Constraint(expr= - 0.85*m.x148 + m.x156 == 0)
m.c174 = Constraint(expr= - 0.98*m.x149 + m.x157 == 0)
m.c175 = Constraint(expr= - 0.85*m.x150 + m.x158 == 0)
m.c176 = Constraint(expr= - 0.98*m.x151 + m.x159 == 0)
m.c177 = Constraint(expr= - 0.85*m.x160 + m.x164 == 0)
m.c178 = Constraint(expr= - 0.9*m.x161 + m.x165 == 0)
m.c179 = Constraint(expr= - 0.85*m.x162 + m.x166 == 0)
m.c180 = Constraint(expr= - 0.9*m.x163 + m.x167 == 0)
m.c181 = Constraint(expr= - 0.75*m.x172 + m.x176 == 0)
m.c182 = Constraint(expr= - 0.95*m.x173 + m.x177 == 0)
m.c183 = Constraint(expr= - 0.9*m.x174 + m.x178 == 0)
m.c184 = Constraint(expr= - 0.95*m.x175 + m.x179 == 0)
m.c185 = Constraint(expr= - 0.75*m.x172 + m.x180 == 0)
m.c186 = Constraint(expr= - 0.95*m.x173 + m.x181 == 0)
m.c187 = Constraint(expr= - 0.9*m.x174 + m.x182 == 0)
m.c188 = Constraint(expr= - 0.95*m.x175 + m.x183 == 0)
m.c189 = Constraint(expr= - 0.8*m.x168 + m.x188 == 0)
m.c190 = Constraint(expr= - 0.85*m.x169 + m.x189 == 0)
m.c191 = Constraint(expr= - 0.8*m.x170 + m.x190 == 0)
m.c192 = Constraint(expr= - 0.85*m.x171 + m.x191 == 0)
m.c193 = Constraint(expr= - 0.85*m.x184 + m.x192 == 0)
m.c194 = Constraint(expr= - 0.95*m.x185 + m.x193 == 0)
m.c195 = Constraint(expr= - 0.85*m.x186 + m.x194 == 0)
m.c196 = Constraint(expr= - 0.95*m.x187 + m.x195 == 0)
m.c197 = Constraint(expr= - 0.8*m.x196 + m.x200 == 0)
m.c198 = Constraint(expr= - 0.92*m.x197 + m.x201 == 0)
m.c199 = Constraint(expr= - 0.8*m.x198 + m.x202 == 0)
m.c200 = Constraint(expr= - 0.92*m.x199 + m.x203 == 0)
m.c201 = Constraint(expr= m.x3 - m.x88 - m.x89 - m.x90 - m.x91 == 0)
m.c202 = Constraint(expr= m.x4 - m.x92 - m.x93 - m.x94 - m.x95 == 0)
m.c203 = Constraint(expr= m.x8 - m.x96 - m.x97 - m.x98 - m.x99 == 0)
m.c204 = Constraint(expr= m.x12 - m.x100 - m.x101 - m.x102 - m.x103 == 0)
m.c205 = Constraint(expr= m.x15 - m.x104 - m.x105 - m.x106 - m.x107 == 0)
m.c206 = Constraint(expr= m.x18 - m.x112 - m.x113 - m.x114 - m.x115 == 0)
m.c207 = Constraint(expr= m.x17 - m.x108 - m.x109 - m.x110 - m.x111 == 0)
m.c208 = Constraint(expr= m.x24 - m.x116 - m.x117 - m.x118 - m.x119 == 0)
m.c209 = Constraint(expr= m.x25 - m.x120 - m.x121 - m.x122 - m.x123 == 0)
m.c210 = Constraint(expr= m.x32 - m.x128 - m.x129 - m.x130 - m.x131 == 0)
m.c211 = Constraint(expr= m.x31 - m.x124 - m.x125 - m.x126 - m.x127 == 0)
m.c212 = Constraint(expr= m.x88 - 35*m.b236 <= 0)
m.c213 = Constraint(expr= m.x89 - 35*m.b237 <= 0)
m.c214 = Constraint(expr= m.x90 - 35*m.b238 <= 0)
m.c215 = Constraint(expr= m.x91 - 35*m.b239 <= 0)
m.c216 = Constraint(expr= m.x92 - 35*m.b240 <= 0)
m.c217 = Constraint(expr= m.x93 - 35*m.b241 <= 0)
m.c218 = Constraint(expr= m.x94 - 35*m.b242 <= 0)
m.c219 = Constraint(expr= m.x95 - 35*m.b243 <= 0)
m.c220 = Constraint(expr= m.x96 - 61*m.b240 <= 0)
m.c221 = Constraint(expr= m.x97 - 61*m.b241 <= 0)
m.c222 = Constraint(expr= m.x98 - 61*m.b242 <= 0)
m.c223 = Constraint(expr= m.x99 - 61*m.b243 <= 0)
m.c224 = Constraint(expr= m.x100 - 25*m.b244 <= 0)
m.c225 = Constraint(expr= m.x101 - 25*m.b245 <= 0)
m.c226 = Constraint(expr= m.x102 - 25*m.b246 <= 0)
m.c227 = Constraint(expr= m.x103 - 25*m.b247 <= 0)
m.c228 = Constraint(expr= m.x104 - 25*m.b248 <= 0)
m.c229 = Constraint(expr= m.x105 - 25*m.b249 <= 0)
m.c230 = Constraint(expr= m.x106 - 25*m.b250 <= 0)
m.c231 = Constraint(expr= m.x107 - 25*m.b251 <= 0)
m.c232 = Constraint(expr= m.x112 - 25*m.b252 <= 0)
m.c233 = Constraint(expr= m.x113 - 25*m.b253 <= 0)
m.c234 = Constraint(expr= m.x114 - 25*m.b254 <= 0)
m.c235 = Constraint(expr= m.x115 - 25*m.b255 <= 0)
m.c236 = Constraint(expr= m.x108 - 25*m.b256 <= 0)
m.c237 = Constraint(expr= m.x109 - 25*m.b257 <= 0)
m.c238 = Constraint(expr= m.x110 - 25*m.b258 <= 0)
m.c239 = Constraint(expr= m.x111 - 25*m.b259 <= 0)
m.c240 = Constraint(expr= m.x116 - 24*m.b256 <= 0)
m.c241 = Constraint(expr= m.x117 - 24*m.b257 <= 0)
m.c242 = Constraint(expr= m.x118 - 24*m.b258 <= 0)
m.c243 = Constraint(expr= m.x119 - 24*m.b259 <= 0)
m.c244 = Constraint(expr= m.x120 - 24*m.b260 <= 0)
m.c245 = Constraint(expr= m.x121 - 24*m.b261 <= 0)
m.c246 = Constraint(expr= m.x122 - 24*m.b262 <= 0)
m.c247 = Constraint(expr= m.x123 - 24*m.b263 <= 0)
m.c248 = Constraint(expr= m.x128 - 30*m.b260 <= 0)
m.c249 = Constraint(expr= m.x129 - 30*m.b261 <= 0)
m.c250 = Constraint(expr= m.x130 - 30*m.b262 <= 0)
m.c251 = Constraint(expr= m.x131 - 30*m.b263 <= 0)
m.c252 = Constraint(expr= m.x124 - 30*m.b264 <= 0)
m.c253 = Constraint(expr= m.x125 - 30*m.b265 <= 0)
m.c254 = Constraint(expr= m.x126 - 30*m.b266 <= 0)
m.c255 = Constraint(expr= m.x127 - 30*m.b267 <= 0)
m.c256 = Constraint(expr= m.x88 - 10*m.b236 <= 0)
m.c257 = Constraint(expr= m.x89 - 10*m.b237 <= 0)
m.c258 = Constraint(expr= m.x90 - 50*m.b238 <= 0)
m.c259 = Constraint(expr= m.x91 - 50*m.b239 <= 0)
m.c260 = Constraint(expr= m.x92 + m.x96 - 40*m.b240 <= 0)
m.c261 = Constraint(expr= m.x93 + m.x97 - 40*m.b241 <= 0)
m.c262 = Constraint(expr= m.x94 + m.x98 - 60*m.b242 <= 0)
m.c263 = Constraint(expr= m.x95 + m.x99 - 60*m.b243 <= 0)
m.c264 = Constraint(expr= m.x100 - 15*m.b244 <= 0)
m.c265 = Constraint(expr= m.x101 - 15*m.b245 <= 0)
m.c266 = Constraint(expr= m.x102 - 25*m.b246 <= 0)
m.c267 = Constraint(expr= m.x103 - 25*m.b247 <= 0)
m.c268 = Constraint(expr= m.x104 - 15*m.b248 <= 0)
m.c269 = Constraint(expr= m.x105 - 15*m.b249 <= 0)
m.c270 = Constraint(expr= m.x106 - 20*m.b250 <= 0)
m.c271 = Constraint(expr= m.x107 - 20*m.b251 <= 0)
m.c272 = Constraint(expr= m.x112 - 10*m.b252 <= 0)
m.c273 = Constraint(expr= m.x113 - 10*m.b253 <= 0)
m.c274 = Constraint(expr= m.x114 - 20*m.b254 <= 0)
m.c275 = Constraint(expr= m.x115 - 20*m.b255 <= 0)
m.c276 = Constraint(expr= m.x108 + m.x116 - 20*m.b256 <= 0)
m.c277 = Constraint(expr= m.x109 + m.x117 - 20*m.b257 <= 0)
m.c278 = Constraint(expr= m.x110 + m.x118 - 55*m.b258 <= 0)
m.c279 = Constraint(expr= m.x111 + m.x119 - 55*m.b259 <= 0)
m.c280 = Constraint(expr= m.x120 + m.x128 - 25*m.b260 <= 0)
m.c281 = Constraint(expr= m.x121 + m.x129 - 25*m.b261 <= 0)
m.c282 = Constraint(expr= m.x122 + m.x130 - 50*m.b262 <= 0)
m.c283 = Constraint(expr= m.x123 + m.x131 - 50*m.b263 <= 0)
m.c284 = Constraint(expr= m.x124 - 15*m.b264 <= 0)
m.c285 = Constraint(expr= m.x125 - 15*m.b265 <= 0)
m.c286 = Constraint(expr= m.x126 - 35*m.b266 <= 0)
m.c287 = Constraint(expr= m.x127 - 35*m.b267 <= 0)
m.c288 = Constraint(expr= m.x80 - m.x204 - m.x205 - m.x206 - m.x207 == 0)
m.c289 = Constraint(expr= m.x81 - m.x208 - m.x209 - m.x210 - m.x211 == 0)
m.c290 = Constraint(expr= m.x82 - m.x212 - m.x213 - m.x214 - m.x215 == 0)
m.c291 = Constraint(expr= m.x83 - m.x216 - m.x217 - m.x218 - m.x219 == 0)
m.c292 = Constraint(expr= m.x84 - m.x220 - m.x221 - m.x222 - m.x223 == 0)
m.c293 = Constraint(expr= m.x85 - m.x224 - m.x225 - m.x226 - m.x227 == 0)
m.c294 = Constraint(expr= m.x86 - m.x228 - m.x229 - m.x230 - m.x231 == 0)
m.c295 = Constraint(expr= m.x87 - m.x232 - m.x233 - m.x234 - m.x235 == 0)
m.c296 = Constraint(expr= m.x204 <= 0)
m.c297 = Constraint(expr= m.x205 - 6*m.b237 <= 0)
m.c298 = Constraint(expr= m.x206 - 40*m.b238 <= 0)
m.c299 = Constraint(expr= m.x207 - 46*m.b239 <= 0)
m.c300 = Constraint(expr= m.x208 <= 0)
m.c301 = Constraint(expr= m.x209 - 7*m.b241 <= 0)
m.c302 = Constraint(expr= m.x210 - 30*m.b242 <= 0)
m.c303 = Constraint(expr= m.x211 - 37*m.b243 <= 0)
m.c304 = Constraint(expr= m.x212 <= 0)
m.c305 = Constraint(expr= m.x213 - 7*m.b245 <= 0)
m.c306 = Constraint(expr= m.x214 - 15*m.b246 <= 0)
m.c307 = Constraint(expr= m.x215 - 22*m.b247 <= 0)
m.c308 = Constraint(expr= m.x216 <= 0)
m.c309 = Constraint(expr= m.x217 - 11*m.b249 <= 0)
m.c310 = Constraint(expr= m.x218 - 13*m.b250 <= 0)
m.c311 = Constraint(expr= m.x219 - 24*m.b251 <= 0)
m.c312 = Constraint(expr= m.x220 <= 0)
m.c313 = Constraint(expr= m.x221 - 10*m.b253 <= 0)
m.c314 = Constraint(expr= m.x222 - 13*m.b254 <= 0)
m.c315 = Constraint(expr= m.x223 - 23*m.b255 <= 0)
m.c316 = Constraint(expr= m.x224 <= 0)
m.c317 = Constraint(expr= m.x225 - 9*m.b257 <= 0)
m.c318 = Constraint(expr= m.x226 - 30*m.b258 <= 0)
m.c319 = Constraint(expr= m.x227 - 39*m.b259 <= 0)
m.c320 = Constraint(expr= m.x228 <= 0)
m.c321 = Constraint(expr= m.x229 - 8*m.b261 <= 0)
m.c322 = Constraint(expr= m.x230 - 20*m.b262 <= 0)
m.c323 = Constraint(expr= m.x231 - 28*m.b263 <= 0)
m.c324 = Constraint(expr= m.x232 <= 0)
m.c325 = Constraint(expr= m.x233 - 8*m.b265 <= 0)
m.c326 = Constraint(expr= m.x234 - 15*m.b266 <= 0)
m.c327 = Constraint(expr= m.x235 - 23*m.b267 <= 0)
m.c328 = Constraint(expr= m.x204 == 0)
m.c329 = Constraint(expr= m.x205 - 6*m.b237 == 0)
m.c330 = Constraint(expr= m.x206 - 40*m.b238 == 0)
m.c331 = Constraint(expr= m.x207 - 46*m.b239 == 0)
m.c332 = Constraint(expr= m.x208 == 0)
m.c333 = Constraint(expr= m.x209 - 7*m.b241 == 0)
m.c334 = Constraint(expr= m.x210 - 30*m.b242 == 0)
m.c335 = Constraint(expr= m.x211 - 37*m.b243 == 0)
m.c336 = Constraint(expr= m.x212 == 0)
m.c337 = Constraint(expr= m.x213 - 7*m.b245 == 0)
m.c338 = Constraint(expr= m.x214 - 15*m.b246 == 0)
m.c339 = Constraint(expr= m.x215 - 22*m.b247 == 0)
m.c340 = Constraint(expr= m.x216 == 0)
m.c341 = Constraint(expr= m.x217 - 11*m.b249 == 0)
m.c342 = Constraint(expr= m.x218 - 13*m.b250 == 0)
m.c343 = Constraint(expr= m.x219 - 24*m.b251 == 0)
m.c344 = Constraint(expr= m.x220 == 0)
m.c345 = Constraint(expr= m.x221 - 10*m.b253 == 0)
m.c346 = Constraint(expr= m.x222 - 13*m.b254 == 0)
m.c347 = Constraint(expr= m.x223 - 23*m.b255 == 0)
m.c348 = Constraint(expr= m.x224 == 0)
m.c349 = Constraint(expr= m.x225 - 9*m.b257 == 0)
m.c350 = Constraint(expr= m.x226 - 30*m.b258 == 0)
m.c351 = Constraint(expr= m.x227 - 39*m.b259 == 0)
m.c352 = Constraint(expr= m.x228 == 0)
m.c353 = Constraint(expr= m.x229 - 8*m.b261 == 0)
m.c354 = Constraint(expr= m.x230 - 20*m.b262 == 0)
m.c355 = Constraint(expr= m.x231 - 28*m.b263 == 0)
m.c356 = Constraint(expr= m.x232 == 0)
m.c357 = Constraint(expr= m.x233 - 8*m.b265 == 0)
m.c358 = Constraint(expr= m.x234 - 15*m.b266 == 0)
m.c359 = Constraint(expr= m.x235 - 23*m.b267 == 0)
m.c360 = Constraint(expr= 4*m.x2 + 8*m.x7 + 5*m.x11 + 8*m.x23 + 10*m.x30 + m.x80 + m.x81 + m.x82 + m.x83 + m.x84
+ m.x85 + m.x86 + m.x87 <= 4000)
m.c361 = Constraint(expr= m.b236 + m.b237 + m.b238 + m.b239 == 1)
m.c362 = Constraint(expr= m.b240 + m.b241 + m.b242 + m.b243 == 1)
m.c363 = Constraint(expr= m.b244 + m.b245 + m.b246 + m.b247 == 1)
m.c364 = Constraint(expr= m.b248 + m.b249 + m.b250 + m.b251 == 1)
m.c365 = Constraint(expr= m.b252 + m.b253 + m.b254 + m.b255 == 1)
m.c366 = Constraint(expr= m.b256 + m.b257 + m.b258 + m.b259 == 1)
m.c367 = Constraint(expr= m.b260 + m.b261 + m.b262 + m.b263 == 1)
m.c368 = Constraint(expr= m.b264 + m.b265 + m.b266 + m.b267 == 1)
m.c369 = Constraint(expr= m.x6 - m.x33 - m.x268 == 0)
m.c370 = Constraint(expr= m.x10 - m.x34 - m.x279 == 0)
m.c371 = Constraint(expr= m.x21 - m.x35 == 0)
m.c372 = Constraint(expr= m.x22 - m.x36 == 0)
m.c373 = Constraint(expr= m.x268 - m.x269 - m.x270 == 0)
m.c374 = Constraint(expr= - m.x271 - m.x272 + m.x273 == 0)
m.c375 = Constraint(expr= m.x273 - m.x274 - m.x275 == 0)
m.c376 = Constraint(expr= m.x275 - m.x276 - m.x277 - m.x278 == 0)
m.c377 = Constraint(expr=(m.x287/(1e-6 + m.b305) - log(1 + m.x283/(1e-6 + m.b305)))*(1e-6 + m.b305) <= 0)
m.c378 = Constraint(expr= m.x284 == 0)
m.c379 = Constraint(expr= m.x288 == 0)
m.c380 = Constraint(expr= m.x269 - m.x283 - m.x284 == 0)
m.c381 = Constraint(expr= m.x271 - m.x287 - m.x288 == 0)
m.c382 = Constraint(expr= m.x283 - 10*m.b305 <= 0)
m.c383 = Constraint(expr= m.x284 + 10*m.b305 <= 10)
m.c384 = Constraint(expr= m.x287 - 2.39789527279837*m.b305 <= 0)
m.c385 = Constraint(expr= m.x288 + 2.39789527279837*m.b305 <= 2.39789527279837)
m.c386 = Constraint(expr=(m.x289/(1e-6 + m.b306) - 1.2*log(1 + m.x285/(1e-6 + m.b306)))*(1e-6 + m.b306) <= 0)
m.c387 = Constraint(expr= m.x286 == 0)
m.c388 = Constraint(expr= m.x290 == 0)
m.c389 = Constraint(expr= m.x270 - m.x285 - m.x286 == 0)
m.c390 = Constraint(expr= m.x272 - m.x289 - m.x290 == 0)
m.c391 = Constraint(expr= m.x285 - 10*m.b306 <= 0)
m.c392 = Constraint(expr= m.x286 + 10*m.b306 <= 10)
m.c393 = Constraint(expr= m.x289 - 2.87747432735804*m.b306 <= 0)
m.c394 = Constraint(expr= m.x290 + 2.87747432735804*m.b306 <= 2.87747432735804)
m.c395 = Constraint(expr= - 0.75*m.x291 + m.x299 == 0)
m.c396 = Constraint(expr= m.x292 == 0)
m.c397 = Constraint(expr= m.x300 == 0)
m.c398 = Constraint(expr= m.x276 - m.x291 - m.x292 == 0)
m.c399 = Constraint(expr= m.x280 - m.x299 - m.x300 == 0)
m.c400 = Constraint(expr= m.x291 - 2.87747432735804*m.b307 <= 0)
m.c401 = Constraint(expr= m.x292 + 2.87747432735804*m.b307 <= 2.87747432735804)
m.c402 = Constraint(expr= m.x299 - 2.15810574551853*m.b307 <= 0)
m.c403 = Constraint(expr= m.x300 + 2.15810574551853*m.b307 <= 2.15810574551853)
m.c404 = Constraint(expr=(m.x301/(1e-6 + m.b308) - 1.5*log(1 + m.x293/(1e-6 + m.b308)))*(1e-6 + m.b308) <= 0)
m.c405 = Constraint(expr= m.x294 == 0)
m.c406 = Constraint(expr= m.x302 == 0)
m.c407 = Constraint(expr= m.x277 - m.x293 - m.x294 == 0)
m.c408 = Constraint(expr= m.x281 - m.x301 - m.x302 == 0)
m.c409 = Constraint(expr= m.x293 - 2.87747432735804*m.b308 <= 0)
m.c410 = Constraint(expr= m.x294 + 2.87747432735804*m.b308 <= 2.87747432735804)
m.c411 = Constraint(expr= m.x301 - 2.03277599268042*m.b308 <= 0)
m.c412 = Constraint(expr= m.x302 + 2.03277599268042*m.b308 <= 2.03277599268042)
m.c413 = Constraint(expr= - m.x295 + m.x303 == 0)
m.c414 = Constraint(expr= - 0.5*m.x297 + m.x303 == 0)
m.c415 = Constraint(expr= m.x296 == 0)
m.c416 = Constraint(expr= m.x298 == 0)
m.c417 = Constraint(expr= m.x304 == 0)
m.c418 = Constraint(expr= m.x278 - m.x295 - m.x296 == 0)
m.c419 = Constraint(expr= m.x279 - m.x297 - m.x298 == 0)
m.c420 = Constraint(expr= m.x282 - m.x303 - m.x304 == 0)
m.c421 = Constraint(expr= m.x295 - 2.87747432735804*m.b309 <= 0)
m.c422 = Constraint(expr= m.x296 + 2.87747432735804*m.b309 <= 2.87747432735804)
m.c423 = Constraint(expr= m.x297 - 7*m.b309 <= 0)
m.c424 = Constraint(expr= m.x298 + 7*m.b309 <= 7)
m.c425 = Constraint(expr= m.x303 - 3.5*m.b309 <= 0)
m.c426 = Constraint(expr= m.x304 + 3.5*m.b309 <= 3.5)
m.c427 = Constraint(expr= m.b305 + m.b306 == 1)
m.c428 = Constraint(expr= m.b305 + m.b306 - m.b307 >= 0)
m.c429 = Constraint(expr= m.b305 + m.b306 - m.b308 >= 0)
m.c430 = Constraint(expr= m.b305 + m.b306 - m.b309 >= 0)
| 36.256047
| 119
| 0.639737
|
4a0ab2fbbb9894365a04cf6252833961e5fc303a
| 9,657
|
py
|
Python
|
asposeslidescloud/models/image_export_options.py
|
rizwanniazigroupdocs/aspose-slides-cloud-python
|
f692a7082387350f80f0b389c1914e33b800a76f
|
[
"MIT"
] | null | null | null |
asposeslidescloud/models/image_export_options.py
|
rizwanniazigroupdocs/aspose-slides-cloud-python
|
f692a7082387350f80f0b389c1914e33b800a76f
|
[
"MIT"
] | null | null | null |
asposeslidescloud/models/image_export_options.py
|
rizwanniazigroupdocs/aspose-slides-cloud-python
|
f692a7082387350f80f0b389c1914e33b800a76f
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose">
# Copyright (c) 2018 Aspose.Slides for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
from asposeslidescloud.models.export_options import ExportOptions
class ImageExportOptions(ExportOptions):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'default_regular_font': 'str',
'format': 'str',
'notes_position': 'str',
'comments_position': 'str',
'comments_area_width': 'int',
'comments_area_color': 'str'
}
attribute_map = {
'default_regular_font': 'defaultRegularFont',
'format': 'format',
'notes_position': 'notesPosition',
'comments_position': 'commentsPosition',
'comments_area_width': 'commentsAreaWidth',
'comments_area_color': 'commentsAreaColor'
}
type_determiners = {
'format': 'image',
}
def __init__(self, default_regular_font=None, format='image', notes_position=None, comments_position=None, comments_area_width=None, comments_area_color=None): # noqa: E501
"""ImageExportOptions - a model defined in Swagger""" # noqa: E501
super(ImageExportOptions, self).__init__(default_regular_font, format)
self._notes_position = None
self._comments_position = None
self._comments_area_width = None
self._comments_area_color = None
self.format: 'image'
self.notes_position = notes_position
self.comments_position = comments_position
self.comments_area_width = comments_area_width
if comments_area_color is not None:
self.comments_area_color = comments_area_color
@property
def notes_position(self):
"""Gets the notes_position of this ImageExportOptions. # noqa: E501
Gets or sets the position of the notes on the page. # noqa: E501
:return: The notes_position of this ImageExportOptions. # noqa: E501
:rtype: str
"""
return self._notes_position
@notes_position.setter
def notes_position(self, notes_position):
"""Sets the notes_position of this ImageExportOptions.
Gets or sets the position of the notes on the page. # noqa: E501
:param notes_position: The notes_position of this ImageExportOptions. # noqa: E501
:type: str
"""
if notes_position is not None:
allowed_values = ["None", "BottomFull", "BottomTruncated"] # noqa: E501
if notes_position.isdigit():
int_notes_position = int(notes_position)
if int_notes_position < 0 or int_notes_position >= len(allowed_values):
raise ValueError(
"Invalid value for `notes_position` ({0}), must be one of {1}" # noqa: E501
.format(notes_position, allowed_values)
)
self._notes_position = allowed_values[int_notes_position]
return
if notes_position not in allowed_values:
raise ValueError(
"Invalid value for `notes_position` ({0}), must be one of {1}" # noqa: E501
.format(notes_position, allowed_values)
)
self._notes_position = notes_position
@property
def comments_position(self):
"""Gets the comments_position of this ImageExportOptions. # noqa: E501
Gets or sets the position of the comments on the page. # noqa: E501
:return: The comments_position of this ImageExportOptions. # noqa: E501
:rtype: str
"""
return self._comments_position
@comments_position.setter
def comments_position(self, comments_position):
"""Sets the comments_position of this ImageExportOptions.
Gets or sets the position of the comments on the page. # noqa: E501
:param comments_position: The comments_position of this ImageExportOptions. # noqa: E501
:type: str
"""
if comments_position is not None:
allowed_values = ["None", "Bottom", "Right"] # noqa: E501
if comments_position.isdigit():
int_comments_position = int(comments_position)
if int_comments_position < 0 or int_comments_position >= len(allowed_values):
raise ValueError(
"Invalid value for `comments_position` ({0}), must be one of {1}" # noqa: E501
.format(comments_position, allowed_values)
)
self._comments_position = allowed_values[int_comments_position]
return
if comments_position not in allowed_values:
raise ValueError(
"Invalid value for `comments_position` ({0}), must be one of {1}" # noqa: E501
.format(comments_position, allowed_values)
)
self._comments_position = comments_position
@property
def comments_area_width(self):
"""Gets the comments_area_width of this ImageExportOptions. # noqa: E501
Gets or sets the width of the comment output area in pixels (Applies only if comments are displayed on the right). # noqa: E501
:return: The comments_area_width of this ImageExportOptions. # noqa: E501
:rtype: int
"""
return self._comments_area_width
@comments_area_width.setter
def comments_area_width(self, comments_area_width):
"""Sets the comments_area_width of this ImageExportOptions.
Gets or sets the width of the comment output area in pixels (Applies only if comments are displayed on the right). # noqa: E501
:param comments_area_width: The comments_area_width of this ImageExportOptions. # noqa: E501
:type: int
"""
self._comments_area_width = comments_area_width
@property
def comments_area_color(self):
"""Gets the comments_area_color of this ImageExportOptions. # noqa: E501
Gets or sets the color of comments area (Applies only if comments are displayed on the right). # noqa: E501
:return: The comments_area_color of this ImageExportOptions. # noqa: E501
:rtype: str
"""
return self._comments_area_color
@comments_area_color.setter
def comments_area_color(self, comments_area_color):
"""Sets the comments_area_color of this ImageExportOptions.
Gets or sets the color of comments area (Applies only if comments are displayed on the right). # noqa: E501
:param comments_area_color: The comments_area_color of this ImageExportOptions. # noqa: E501
:type: str
"""
self._comments_area_color = comments_area_color
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ImageExportOptions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 39.416327
| 177
| 0.623382
|
4a0ab3467b0e8d79f90d067c5362b24cc643f7bf
| 3,855
|
py
|
Python
|
src/passthrough/label_tools.py
|
ExoMars-PanCam/passthrough
|
7ff9f82e4c85c40a4f2dab20bbee1c46d79d61a5
|
[
"MIT"
] | 2
|
2021-05-04T04:30:37.000Z
|
2021-05-04T12:17:22.000Z
|
src/passthrough/label_tools.py
|
ExoMars-PanCam/passthrough
|
7ff9f82e4c85c40a4f2dab20bbee1c46d79d61a5
|
[
"MIT"
] | 4
|
2021-05-04T16:56:49.000Z
|
2021-05-12T17:00:07.000Z
|
src/passthrough/label_tools.py
|
ExoMars-PanCam/passthrough
|
7ff9f82e4c85c40a4f2dab20bbee1c46d79d61a5
|
[
"MIT"
] | null | null | null |
"""PDS4 label interrogation and manipulation functionality"""
__all__ = [
"LabelLike",
"PDS_NS_PREFIX",
"ATTR_PATHS",
"labellike_to_etree",
"add_default_ns",
"is_populated",
"PathManipulator",
]
from pathlib import Path
from typing import Dict, Optional, Union
from lxml import etree
try:
from pds4_tools.reader.general_objects import StructureList
from pds4_tools.reader.label_objects import Label
except ModuleNotFoundError:
StructureList = None
Label = None
if None not in (StructureList, Label):
LabelLike = Union[etree._ElementTree, StructureList, Label, Path, str]
else:
LabelLike = Union[etree._ElementTree, Path, str]
PDS_NS_PREFIX = "pds"
# Common PDS4 attribute XPath shorthands
ATTR_PATHS = {
"lid": "//pds:Identification_Area/pds:logical_identifier",
"start": "//pds:Time_Coordinates/pds:start_date_time",
"stop": "//pds:Time_Coordinates/pds:stop_date_time",
# "type": "//msn:Mission_Information/msn:product_type_name",
# "sub_instrument": "//psa:Sub-Instrument/psa:identifier",
# "exposure_duration": "//img:Exposure/img:exposure_duration",
}
def labellike_to_etree(labellike: LabelLike) -> etree._ElementTree:
if isinstance(labellike, etree._ElementTree):
return labellike
if isinstance(labellike, Path):
labellike = str(labellike.expanduser().resolve())
# continue to handling of str
if isinstance(labellike, str):
return etree.parse(labellike)
base_url = None
if StructureList is not None and isinstance(labellike, StructureList):
prefix = "Processing label: "
log = labellike.read_in_log.split("\n")[0]
if log.startswith(prefix):
# *should* always resolve to the abs path of the XML label
base_url = log[len(prefix) :]
labellike = labellike.label
# continue to handling of Label
if Label is not None and isinstance(labellike, Label):
return etree.fromstring(
labellike.to_string(unmodified=True), base_url=base_url
).getroottree()
raise TypeError(
f"unknown label format {type(labellike)}, expected one of {LabelLike}"
)
def add_default_ns(nsmap: Dict[Optional[str], str]) -> Dict[str, str]:
nsmap[PDS_NS_PREFIX] = nsmap[None]
del nsmap[None]
return nsmap
def is_populated(elem: etree._Element):
if elem.text is not None and bool(elem.text.strip()):
return True
if (
"xsi" in elem.nsmap
and elem.attrib.get(f"{{{elem.nsmap['xsi']}}}nil", False) == "true"
):
return True
return False
class PathManipulator:
def __init__(self, nsmap: dict, default_prefix: str = PDS_NS_PREFIX):
self._nsmap = nsmap
self._default_prefix = default_prefix
def clark_to_prefix(self, path: str):
"""
Transforms paths provided in Clark notation (`{nsURI}tag`) to XPath-valid prefix
notation (`nsPrefix:tag`).
:param path: path string in Clark notation (e.g. ElementPath)
:return: path string in prefix notation
"""
for prefix, uri in self._nsmap.items():
path = path.replace(f"{{{uri}}}", f"{prefix}:")
return path
def prefix_default_ns(self, path: str):
segments = []
for segment in path.split("/"):
if segment.startswith("*"):
raise RuntimeError(f"path segment not yet supported: '{segment}'")
elif ":" in segment: # assume : marks the end of a prefix in this segment
segments.append(segment)
elif len(segment): # empty segments occur for abs. paths or //
segments.append(f"{self._default_prefix}:{segment}")
segments.append("/")
else:
segments.pop() # remove trailing /
return "".join(segments)
| 33.232759
| 88
| 0.649027
|
4a0ab36a012b894aa17aaef3cec1dc4dc70cc32e
| 546
|
py
|
Python
|
tsaotun/lib/Docker/Addon/disable.py
|
qazbnm456/tsaotun
|
70186faebd5303961d5996c758f7c9147c4439ba
|
[
"Apache-2.0"
] | 47
|
2017-01-15T08:33:46.000Z
|
2022-02-11T22:37:48.000Z
|
tsaotun/lib/Docker/Addon/disable.py
|
qazbnm456/dokcer
|
70186faebd5303961d5996c758f7c9147c4439ba
|
[
"Apache-2.0"
] | null | null | null |
tsaotun/lib/Docker/Addon/disable.py
|
qazbnm456/dokcer
|
70186faebd5303961d5996c758f7c9147c4439ba
|
[
"Apache-2.0"
] | 2
|
2017-01-16T13:10:22.000Z
|
2019-03-28T17:05:04.000Z
|
"""This module contains `tsaotun addon disable` class"""
from .command import Command
from ....lib.Addon.loader import Loader
class Disable(Command):
"""This class implements `tsaotun addon disable` command"""
name = "addon disable"
require = []
def __init__(self):
Command.__init__(self)
self.settings[self.name] = None
def eval_command(self, args):
loader = Loader()
self.settings[self.name] = loader.disable(args["addon"])
def final(self):
return self.settings[self.name]
| 23.73913
| 64
| 0.652015
|
4a0ab4810aa683c3902507c036516293dfbd8701
| 4,517
|
py
|
Python
|
converter/cli.py
|
OasisLMF/OasisDataConverter
|
c1cad71431558db8eab065e7b085603452300210
|
[
"BSD-3-Clause"
] | 2
|
2021-06-11T13:18:35.000Z
|
2021-07-14T16:25:04.000Z
|
converter/cli.py
|
OasisLMF/OasisDataConverter
|
c1cad71431558db8eab065e7b085603452300210
|
[
"BSD-3-Clause"
] | 17
|
2020-08-11T11:35:06.000Z
|
2021-07-22T15:15:34.000Z
|
converter/cli.py
|
OasisLMF/OasisDataConverter
|
c1cad71431558db8eab065e7b085603452300210
|
[
"BSD-3-Clause"
] | 2
|
2020-11-11T12:02:04.000Z
|
2021-03-29T13:56:32.000Z
|
import logging
import os
import sys
from datetime import datetime
from logging.config import dictConfig as loggingDictConfig
import click
import yaml
from converter.config import Config
from converter.controller import Controller
class ColorFormatter(logging.Formatter):
"""
Changes the color of the log message based on the log level. Errors are
red, warnings are yellow and debug messages are blue.
:param colors: Mapping of log level to colors
"""
colors = {
logging.ERROR: "red",
logging.CRITICAL: "red",
logging.DEBUG: "blue",
logging.WARNING: "yellow",
}
def format(self, record) -> str:
"""
Adds the color to the log message.
:param record: The record to format
:return: The formatted message
"""
return click.style(
super().format(record), fg=self.colors.get(record.levelno),
)
class ClickEchoHandler(logging.Handler):
"""
Sends the log message onto `click.echo`
"""
def emit(self, record):
click.echo(
self.format(record), err=record.levelno >= logging.WARNING,
)
def init_logging(verbosity, no_color):
"""
Sets up the logging config for the console and files
:param verbosity: The verbosity level
0 - errors and warnings only
1 - info
2 - debug
:param no_color: Don't add the color to the output
"""
if not os.path.exists('log'):
os.mkdir('log')
time_string = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
filename_time = os.path.join('log',time_string)
console_log_level = [logging.WARNING, logging.INFO, logging.DEBUG][
min(2, verbosity) # max verbosity level is 2
]
loggingDictConfig(
{
"version": 1,
"formatters": {
"console": {
"class": (
"logging.Formatter"
if no_color
else "converter.cli.ColorFormatter"
),
},
"file": {"format": "%(asctime)s %(levelname)-7s: %(message)s"},
},
"filters": {"info_only": {"class": "converter.cli.InfoFilter"}},
"handlers": {
"console": {
"class": "converter.cli.ClickEchoHandler",
"formatter": "console",
"level": console_log_level,
},
"log-file": {
"class": "logging.FileHandler",
"formatter": "file",
"filename": f"{filename_time}-converter.log",
"level": logging.DEBUG,
"mode": "w",
},
},
"root": {"level": "DEBUG", "handlers": ["console", "log-file"]},
}
)
logging.captureWarnings(True)
@click.group()
@click.option(
"--option",
"-o",
nargs=2,
multiple=True,
help=(
"Sets a configuration option, a path and value are required "
"eg -o extractor.options.foo.bar bash"
),
)
@click.option(
"--config",
"-c",
default="./config.yml",
envvar="CONVERTER_CONFIG",
help="Path to the configuration file.",
)
@click.option(
"--verbose",
"-v",
count=True,
help=(
"Specifies the verbosity level, if used multiple "
"times the verbosity is increased further"
),
)
@click.option(
"--no-color",
help="Disables colorised output.",
is_flag=True,
flag_value=False,
)
@click.pass_context
def cli(ctx, config, verbose, no_color, option):
"""
Initialises the cli grouping with default options.
"""
ctx.ensure_object(dict)
init_logging(verbose, no_color)
options = dict(option)
ctx.obj["config"] = Config(
config_path=config,
argv={k: yaml.load(v, yaml.SafeLoader) for k, v in options.items()},
env=os.environ,
)
@cli.command()
@click.pass_context
def show_config(ctx):
"""
Prints the resolved config to the console
"""
click.echo(ctx.obj["config"].to_yaml())
@cli.command()
@click.pass_context
def run(ctx):
"""
Runs the data conversion
"""
try:
logging.debug(f"Running with config:\n{ctx.obj['config'].to_yaml()}")
Controller(ctx.obj["config"]).run()
except Exception as e:
logging.exception(e)
sys.exit(1)
else:
logging.info("Transformation Complete")
| 24.818681
| 79
| 0.556121
|
4a0ab5404f67a5b2a3a0df23367083d11fc772e3
| 5,570
|
py
|
Python
|
sailfish/setups/envelope_shock.py
|
macfadyen/sailfish
|
44752a6769a2a7566a90dd9c8df21d4e2c49d720
|
[
"MIT"
] | 9
|
2021-06-29T15:43:58.000Z
|
2022-03-20T10:13:26.000Z
|
sailfish/setups/envelope_shock.py
|
macfadyen/sailfish
|
44752a6769a2a7566a90dd9c8df21d4e2c49d720
|
[
"MIT"
] | 1
|
2021-12-23T00:00:46.000Z
|
2021-12-23T00:00:46.000Z
|
sailfish/setups/envelope_shock.py
|
macfadyen/sailfish
|
44752a6769a2a7566a90dd9c8df21d4e2c49d720
|
[
"MIT"
] | 8
|
2021-06-09T09:11:15.000Z
|
2021-11-02T20:25:27.000Z
|
"""
Contains a setup for studying a relativistic type-II shockwave.
"""
from math import pi, exp, log10
from sailfish.setup import Setup, SetupError, param
from sailfish.mesh import LogSphericalMesh
__all__ = ["EnvelopeShock"]
class EnvelopeShock(Setup):
"""
A relativistic shell launched into a homologous, relativistic envelope.
"""
u_shell = param(30.0, "gamma-beta of the launched shell")
m_shell = param(1.0, "mass coordinate of the launched shell")
w_shell = param(1.0, "width of the shell in dm/m")
q_shell = param(0.1, "opening angle of the shell")
t_start = param(1.0, "time when the simulation starts")
r_inner = param(0.1, "inner radius (comoving if expand=True)")
r_outer = param(1.0, "outer radius (comoving if expand=True)")
expand = param(True, "whether to expand the mesh homologously")
polar_extent = param(0.0, "polar domain extent over pi (equator is 0.5, 1D is 0.0)")
@property
def polar(self):
return self.polar_extent > 0.0
def primitive(self, t, coord, primitive):
envelope_fastest_beta = 0.999
psi = 0.25
m1 = 1.0
r = coord[0] if self.polar else coord
s = min(r / t, envelope_fastest_beta)
g = (1.0 - s * s) ** -0.5
u = s * g
m = m1 * u ** (-1.0 / psi)
d = m * g / (4 * pi * r ** 3 * psi)
p = 1e-6 * d
def u_prof(m):
if m < self.m_shell:
return 0.0
else:
return exp(-(m / self.m_shell - 1.0) / self.w_shell)
if not self.polar:
primitive[0] = d
primitive[1] = u + u_prof(m) * self.u_shell
primitive[2] = p
if m > self.m_shell and m < self.m_shell * (1.0 + self.w_shell):
primitive[3] = 1.0
else:
primitive[3] = 0.0
else:
q_bar = coord[1] / self.q_shell
primitive[0] = d
primitive[1] = u + u_prof(m) * self.u_shell * exp(-(q_bar ** 2.0))
primitive[2] = 0.0
primitive[3] = p
def mesh(self, num_zones_per_decade):
return LogSphericalMesh(
r0=self.r_inner,
r1=self.r_outer,
num_zones_per_decade=num_zones_per_decade,
scale_factor_derivative=(1.0 / self.t_start) if self.expand else None,
polar_grid=self.polar,
polar_extent=self.polar_extent * pi,
)
@property
def solver(self):
if not self.polar:
return "srhd_1d"
else:
return "srhd_2d"
@property
def start_time(self):
return self.t_start
@property
def boundary_condition(self):
return "outflow"
@property
def default_end_time(self):
return 1.0
@property
def default_resolution(self):
if self.polar:
return 800
else:
return 20000
# ---------------------------------------------------------
# Code below can probably be removed
# ---------------------------------------------------------
#
# from typing import NamedTuple
# try:
# from functools import cached_property
# except ImportError:
# # revert to ordinary property on Python < 3.8
# cached_property = property
# def r_shell(self) -> float:
# u = self.m_shell ** -0.25
# s = u / (1.0 + u * u) ** 0.5
# return self.t_start * s
# @cached_property
# def ambient(self):
# return RelativisticEnvelope(
# envelope_m1=1.0,
# envelope_fastest_beta=0.999,
# envelope_slowest_beta=0.00,
# envelope_psi=0.25,
# wind_mdot=100.0,
# )
# def gamma_shell(self) -> float:
# return (1.0 + self.u_shell ** 2) ** 0.5
# def shell_energy(self) -> float:
# return self.w_shell * self.m_shell * (self.gamma_shell() - 1.0)
# ZONE_ENVELOPE = 0
# ZONE_WIND = 1
# class RelativisticEnvelope(NamedTuple):
# """
# Describes a homologous expanding medium with power-law mass coordinate.
# """
# envelope_m1: float
# """ Mass coordinate of the u=1 shell """
# envelope_slowest_beta: float
# """ Beta (v/c) of the slowest envelope shell """
# envelope_fastest_beta: float
# """ Beta (v/c) of the outer shell """
# envelope_psi: float
# """ Index psi in u(m) ~ m^-psi """
# wind_mdot: float
# """ The mass loss rate for the wind """
# def zone(self, r: float, t: float) -> int:
# v_min = self.envelope_slowest_beta
# r_wind_envelop_interface = v_min * t
# if r > r_wind_envelop_interface:
# return ZONE_ENVELOPE
# else:
# return ZONE_WIND
# def gamma_beta(self, r: float, t: float) -> float:
# if self.zone(r, t) == ZONE_WIND:
# return self.envelope_slowest_u()
# if self.zone(r, t) == ZONE_ENVELOPE:
# b = min(r / t, self.envelope_fastest_beta)
# u = b / (1.0 - b * b) ** 0.5
# return u
# def mass_rate_per_steradian(self, r: float, t: float) -> float:
# if self.zone(r, t) == ZONE_WIND:
# return self.wind_mdot
# if self.zone(r, t) == ZONE_ENVELOPE:
# y = self.envelope_psi
# s = min(r / t, self.envelope_fastest_beta)
# f = s ** (-1.0 / y) * (1.0 - s * s) ** (0.5 / y - 1.0)
# return self.envelope_m1 / (4.0 * pi * y * t) * f
# def comoving_mass_density(self, r: float, t: float) -> float:
# return self.mass_rate_per_steradian(r, t) / (self.gamma_beta(r, t) * r * r)
| 29.162304
| 88
| 0.551885
|
4a0ab5f1a06e21dc55e3453a873f644435367846
| 9,754
|
py
|
Python
|
pysster/Motif.py
|
sproft/pysster
|
b2634262f5bdfffc46461ec0f753e9fc743107ec
|
[
"MIT"
] | null | null | null |
pysster/Motif.py
|
sproft/pysster
|
b2634262f5bdfffc46461ec0f753e9fc743107ec
|
[
"MIT"
] | null | null | null |
pysster/Motif.py
|
sproft/pysster
|
b2634262f5bdfffc46461ec0f753e9fc743107ec
|
[
"MIT"
] | null | null | null |
import numpy as np
from os.path import dirname
from collections import Counter
from copy import deepcopy
from math import log
from PIL import Image, ImageDraw, ImageFont, ImageChops, ImageColor
class Motif:
"""
The Motif class is a convenience class to compute and plot a position-weight matrix (PWM).
The only functionality is the plot function. The PWM and corresponding entropy values
can be accessed using the self.pwm and self.entropies members, if so desired. All uppercase
alphanumeric characters and the following additional characters can be part of the
alphabet: "()[]{}<>,.|*".
"""
def __init__(self, alphabet, sequences = None, pwm = None):
""" Initialize a motif by providing sequences or a PWM.
Either a list of sequences or a PWM with shape (sequence length, alphabet length)
must be provided.
Parameters
----------
alphabet : str
The alphabet of the sequences.
sequences : [str]
A list of strings. All strings must have the same length.
pwm : numpy.ndarray
A matrix of shape (sequence length, alphabet length) containing probabilities.
"""
self.alphabet = alphabet
if sequences != None:
self._compute_counts(sequences)
else:
self.pwm = deepcopy(pwm)
self._add_pseudocounts()
self._compute_entropies()
def _compute_counts(self, sequences):
by_pos = zip(*sequences)
counts_per_pos = map(Counter, by_pos)
self.pwm = np.empty(len(self.alphabet) * len(sequences[0]))
self.pwm.shape = (len(sequences[0]), len(self.alphabet))
for i, pos in enumerate(counts_per_pos):
for j, char in enumerate(self.alphabet):
self.pwm[i, j] = pos[char]
def _add_pseudocounts(self):
for i, pos in enumerate(self.pwm):
fun = np.vectorize(lambda x: 0.999*(x/sum(pos)) + 0.001*(1./len(self.alphabet)))
self.pwm[i] = fun(pos)
def _compute_entropies(self):
fun = np.vectorize(lambda x: x*log(x, 2))
self.entropies = np.empty(self.pwm.shape[0])
for i, pos in enumerate(self.pwm):
self.entropies[i] = -sum(fun(pos))
def plot(self, colors={}, scale=1):
""" Plot the motif.
The color of individual letters can be defined via the colors dict using RGB values, e.g.
{'A': '#FF0000', 'C': '#0000FF'} will result in red A's and blue C's. Non-defined characters
will be plotted black.
The alphabets 'ACGT', 'ACGU', and 'HIMS' have predefined colors (that can be overwritten):
'"ACGT" -> {'A': '#00CC00', 'C': '#0000CC', 'G': '#FFB300', 'T': '#CC0000'}
'"ACGU" -> {'A': '#00CC00', 'C': '#0000CC', 'G': '#FFB300', 'U': '#CC0000'}
'"HIMS" -> {'H': '#CC0000', 'I': '#FFB300', 'M': '#00CC00', 'S': '#CC00FF'}
Using, for instance, a scale parameter of 0.5 halves both height and width of the plot.
Parameters
----------
colors : dict of char->str
A dict with individual alphabet characters as keys and hexadecimal RGB specifiers as values.
scale : float
Adjust the size of the plot (should be > 0).
Returns
-------
image : PIL.image.image
A Pillow image object.
"""
# prepare colors
self.colors = deepcopy(colors)
if self.colors == {}:
if self.alphabet == 'ACGT':
self.colors = {'A': '#00CC00', 'C': '#0000CC', 'G': '#FFB300', 'T': '#CC0000'}
elif self.alphabet == 'ACGU':
self.colors = {'A': '#00CC00', 'C': '#0000CC', 'G': '#FFB300', 'U': '#CC0000'}
elif self.alphabet == 'HIMS':
self.colors = {'H': '#CC0000', 'I': '#FFB300', 'M': '#00CC00', 'S': '#CC00FF'}
# translate hex to decimal
for char in self.colors:
if len(self.colors[char]) != 7 or not self.colors[char].startswith("#"):
raise RuntimeError("Error: '{}' is not a valid color specifier.".format(self.colors[char]))
self.colors[char] = ImageColor.getrgb(self.colors[char])
# cache all alphabet character images
img_chars = self._load_characters()
# prepapre image dimensions
w_char, h_char = img_chars[self.alphabet[0]].size
w_col, h_col = w_char, h_char*3
h_top, h_bottom = 40, 60
w_total, h_total = w_col + w_col*len(self.pwm) + 40, h_top + h_col + h_bottom
img_motif = Image.new("RGB", (w_total, h_total), "#ffffff")
img_draw = ImageDraw.Draw(img_motif)
# plot axes
self._add_y_axis(img_motif, img_draw, w_col, h_col, h_top)
self._add_x_axis(img_motif, img_draw, w_col, h_col, h_top)
# plot sequence motif
self._add_motif(img_motif, w_col, h_col, h_top, img_chars)
# default height is 754 pixels
if scale != 1:
w_scaled, h_scaled = int(w_total*scale), int(h_total*scale)
img_motif = img_motif.resize((w_scaled, h_scaled), Image.BICUBIC)
for x in img_chars:
img_chars[x].close()
return img_motif
def _load_characters(self):
folder = dirname(__file__)
img_chars = {}
for char in self.alphabet:
# these three characters can not be used in filenames
if char == '|':
img_chars[char] = Image.open("{}/resources/motif/char_except0.png".format(folder))
elif char == '<':
img_chars[char] = Image.open("{}/resources/motif/char_except1.png".format(folder))
elif char == '>':
img_chars[char] = Image.open("{}/resources/motif/char_except2.png".format(folder))
elif char == '*':
img_chars[char] = Image.open("{}/resources/motif/char_except3.png".format(folder))
else:
img_chars[char] = Image.open("{}/resources/motif/char{}.png".format(folder, char))
# change the color if needed
if char in self.colors:
#img_chars[char] = img_chars[char].convert('RGBA')
data = np.array(img_chars[char])
red, green, blue, _alpha = data.T
not_white = (red != 255) & (blue != 255) & (green != 255)
data[..., :-1][not_white.T] = self.colors[char]
img_chars[char] = Image.fromarray(data)
return img_chars
def _trim(self, img):
bg = Image.new(img.mode, img.size, img.getpixel((0,0)))
diff = ImageChops.difference(img, bg)
diff = ImageChops.add(diff, diff, 2.0, -100)
return img.crop(diff.getbbox())
def _get_and_rotate_bits(self):
folder = dirname(__file__)
font_bits = ImageFont.truetype("{}/resources/motif/LiberationSans-Regular.ttf".format(folder), 70)
img_bits = Image.new("RGB", (500, 500), "#ffffff")
draw_bits = ImageDraw.Draw(img_bits)
draw_bits.text((250,250), "bits", (0, 0, 0), font = font_bits)
img_bits = img_bits.rotate(90)
return self._trim(img_bits)
def _add_y_axis(self, img_motif, img_draw, w_col, h_col, h_top):
# draw the rotated "bits" label
img_bits = self._get_and_rotate_bits()
w_bits, h_bits = img_bits.size
img_motif.paste(img_bits, (w_col//2 - int(1.5*w_bits), h_col//2 - h_bits//2 + h_top))
img_bits.close()
# draw y axis
img_draw.line((w_col, 0+h_top, w_col, h_col+h_top), fill = "#000000", width = 5)
# draw y ticks and labels
folder = dirname(__file__)
font = ImageFont.truetype("{}/resources/motif/LiberationSans-Regular.ttf".format(folder), 50)
info_content = log(len(self.alphabet), 2)
ticks = np.arange(0.0, info_content + 0.5, 0.5)
for x in ticks:
if x > info_content: break
y_tick = h_top + h_col - h_col*(x/info_content)
img_draw.line((w_col-20, y_tick, w_col, y_tick), fill = "#000000", width = 5)
textwidth, textheight = img_draw.textsize(str(x), font)
img_draw.text((w_col-25-textwidth, y_tick - textheight//2 - 3),
str(x), (0, 0, 0), font = font)
def _add_x_axis(self, img_motif, img_draw, w_col, h_col, h_top):
x_tick = w_col + 10
folder = dirname(__file__)
font = ImageFont.truetype("{}/resources/motif/LiberationSans-Regular.ttf".format(folder), 50)
for i, _ in enumerate(self.pwm):
textwidth, _ = img_draw.textsize(str(i+1), font)
img_draw.text((x_tick + w_col//2 - textwidth//2, h_col + h_top),
str(i+1), (0, 0, 0), font = font)
x_tick += w_col
def _add_motif(self, img_motif, w_col, h_col, h_top, img_chars):
x_tick = w_col + 10
info_content = log(len(self.alphabet), 2)
for i, pos in enumerate(self.pwm):
total = h_col - self.entropies[i] * (h_col/info_content)
size_chars = [(char, int(pos[x]*total)) for x, char in enumerate(self.alphabet)]
size_chars.sort(key = lambda x: x[1])
y_tick = h_col + h_top
for x in size_chars:
y_tick = y_tick - x[1]
scaled = img_chars[x[0]].resize((w_col, max(1, x[1])), Image.BICUBIC)
img_motif.paste(scaled, (x_tick, y_tick), mask = scaled)
x_tick += w_col
| 43.936937
| 108
| 0.565409
|
4a0ab6383b70a4b343e4c77257f7c13826aafbc8
| 172,875
|
py
|
Python
|
src/sage/graphs/generators/smallgraphs.py
|
Ivo-Maffei/DistanceRegular
|
d4dedd5c3e7da73111168fcce60d1f180fe24019
|
[
"BSL-1.0"
] | 1
|
2020-05-19T22:34:03.000Z
|
2020-05-19T22:34:03.000Z
|
src/sage/graphs/generators/smallgraphs.py
|
Ivo-Maffei/DistanceRegular
|
d4dedd5c3e7da73111168fcce60d1f180fe24019
|
[
"BSL-1.0"
] | null | null | null |
src/sage/graphs/generators/smallgraphs.py
|
Ivo-Maffei/DistanceRegular
|
d4dedd5c3e7da73111168fcce60d1f180fe24019
|
[
"BSL-1.0"
] | 3
|
2020-03-29T17:13:36.000Z
|
2021-05-03T18:11:28.000Z
|
# -*- coding: utf-8 -*-
r"""
Small graphs
The methods defined here appear in :mod:`sage.graphs.graph_generators`.
"""
#*****************************************************************************
# Copyright (C) 2006 Robert L. Miller <rlmillster@gmail.com>
# and Emily A. Kirkman
# Copyright (C) 2009 Michael C. Yurko <myurko@gmail.com>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# https://www.gnu.org/licenses/
#*****************************************************************************
from __future__ import print_function, absolute_import, division
import six
from six.moves import range
# import from Sage library
from sage.graphs.graph import Graph
from sage.rings.rational_field import QQ
from sage.functions.other import sqrt
from math import sin, cos, pi
#######################################################################
# Named Graphs
#######################################################################
def HarborthGraph():
r"""
Return the Harborth Graph.
The Harborth graph has 104 edges and 52 vertices, and is the smallest known
example of a 4-regular matchstick graph. For more information, see the
:wikipedia:`Harborth_graph`.
EXAMPLES::
sage: g = graphs.HarborthGraph(); g
Harborth Graph: Graph on 52 vertices
sage: g.is_regular(4)
True
"""
g = Graph(':s_OGKI?@_?g[QABAo__YEFCp@?iIEbqHWuWLbbh?}[OfcXpGhNHdYPY_SgdYX]'+
'pZkfJPuo[lfZHys^mFcDs}`pG{UNNgoHC}DIgrI[qjMhTyDQrQlVydrBYmWkn',
loops=False, multiedges=False)
g.set_pos({ 0: ( 51.5, 400.0), 1: ( 90.6, 308.0), 2: ( 90.6, 492.0),
3: (129.8, 216.0), 4: (129.8, 584.0), 5: (150.7, 387.9),
6: (150.7, 412.1), 7: (169.0, 124.0), 8: (169.0, 676.0),
9: (189.9, 295.9), 10: (189.9, 504.1), 11: (229.1, 203.9),
12: (229.1, 596.1), 13: (250.0, 400.0), 14: (251.4, 180.6),
15: (251.4, 619.4), 16: (256.1, 300.2), 17: (256.1, 499.8),
18: (259.3, 080.9), 19: (259.3, 719.1), 20: (333.8, 237.2),
21: (333.8, 562.8), 22: (341.7, 137.5), 23: (341.7, 662.5),
24: (350.0, 037.9), 25: (350.0, 336.0), 26: (350.0, 464.0),
27: (350.0, 762.1), 28: (358.3, 137.5), 29: (358.3, 662.5),
30: (366.2, 237.2), 31: (366.2, 562.8), 32: (440.7, 080.9),
33: (440.7, 719.1), 34: (443.9, 300.2), 35: (443.9, 499.8),
36: (448.6, 180.6), 37: (448.6, 619.4), 38: (450.0, 400.0),
39: (470.9, 203.9), 40: (470.9, 596.1), 41: (510.1, 295.9),
42: (510.1, 504.1), 43: (531.0, 124.0), 44: (531.0, 676.0),
45: (549.3, 387.9), 46: (549.3, 412.1), 47: (570.2, 216.0),
48: (570.2, 584.0), 49: (609.4, 308.0), 50: (609.4, 492.0),
51: (648.5, 400.0)})
g.name("Harborth Graph")
return g
def HarriesGraph(embedding=1):
r"""
Return the Harries Graph.
The Harries graph is a Hamiltonian 3-regular graph on 70
vertices. See the :wikipedia:`Harries_graph`.
The default embedding here is to emphasize the graph's 4 orbits. This graph
actually has a funny construction. The following procedure gives an idea of
it, though not all the adjacencies are being properly defined.
#. Take two disjoint copies of a :meth:`Petersen graph
<PetersenGraph>`. Their vertices will form an orbit of the final graph.
#. Subdivide all the edges once, to create 15+15=30 new vertices, which
together form another orbit.
#. Create 15 vertices, each of them linked to 2 corresponding vertices of
the previous orbit, one in each of the two subdivided Petersen graphs. At
the end of this step all vertices from the previous orbit have degree 3,
and the only vertices of degree 2 in the graph are those that were just
created.
#. Create 5 vertices connected only to the ones from the previous orbit so
that the graph becomes 3-regular.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.HarriesGraph()
sage: g.order()
70
sage: g.size()
105
sage: g.girth()
10
sage: g.diameter()
6
sage: g.show(figsize=[10, 10]) # long time
sage: graphs.HarriesGraph(embedding=2).show(figsize=[10, 10]) # long time
TESTS::
sage: graphs.HarriesGraph(embedding=3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(70, [-29, -19, -13, 13, 21, -27, 27, 33, -13, 13,
19, -21, -33, 29], 5)
g.name("Harries Graph")
if embedding == 1:
gpos = g.get_pos()
ppos = PetersenGraph().get_pos()
# The graph's four orbits
o = [None]*4
o[0] = [0, 2, 6, 8, 14, 16, 20, 22, 28, 30, 34, 36, 42, 44, 48, 50,
56, 58, 62, 64]
o[1] = [1, 3, 5, 7, 9, 13, 15, 17, 19, 21, 23, 27, 29, 31, 33, 35,
37, 41, 43, 45, 47, 49, 51, 55, 57, 59, 61, 63, 65, 69]
o[2] = [60, 10, 12, 4, 24, 26, 18, 38, 40, 32, 52, 54, 46, 66, 68]
o[3] = [11, 25, 39, 53, 67]
# Correspondence between the vertices of one of the two Petersen graphs
# on o[0] and the vertices of a standard Petersen graph object
g_to_p = {0: 0, 2: 1, 42: 5, 44: 8, 14: 7, 16: 2, 56: 9, 58: 6,
28: 4, 30: 3}
# Correspondence between the vertices of the other Petersen graph on
# o[0] and the vertices of the first one
g_to_g = {64: 44, 34: 0, 36: 28, 6: 2, 8: 58, 48: 16, 50: 30,
20: 14, 22: 56, 62: 42}
# Position for the vertices from the first copy
for v, i in six.iteritems(g_to_p):
gpos[v] = ppos[i]
# Position for the vertices in the second copy. Moves the first, too.
offset = 3.5
for v, i in six.iteritems(g_to_g):
x, y = gpos[i]
gpos[v] = (x + offset*0.5, y)
gpos[i] = (x - offset*0.5, y)
# Vertices from o[1]. These are actually the "edges" of the copies of
# Petersen.
for v in o[1]:
p1, p2 = [gpos[x] for x in g.neighbors(v) if x in o[0]]
gpos[v] = ((p1[0] + p2[0])/2, (p1[1] + p2[1])/2)
# 15 vertices from o[2]
for i, v in enumerate(o[2]):
gpos[v] = (-1.75 + i*.25, 2)
# 5 vertices from o[3]
for i, v in enumerate(o[3]):
gpos[v] = (-1 + i*.5, 2.5)
return g
elif embedding == 2:
return g
else:
raise ValueError("the value of embedding must be 1 or 2")
def HarriesWongGraph(embedding=1):
r"""
Return the Harries-Wong Graph.
See the :wikipedia:`Harries-Wong_graph`.
*About the default embedding:*
The default embedding is an attempt to emphasize the graph's 8 (!!!)
different orbits. In order to understand this better, one can picture the
graph as being built in the following way:
#. One first creates a 3-dimensional cube (8 vertices, 12 edges), whose
vertices define the first orbit of the final graph.
#. The edges of this graph are subdivided once, to create 12 new
vertices which define a second orbit.
#. The edges of the graph are subdivided once more, to create 24 new
vertices giving a third orbit.
#. 4 vertices are created and made adjacent to the vertices of the
second orbit so that they have degree 3. These 4 vertices also define
a new orbit.
#. In order to make the vertices from the third orbit 3-regular (they
all miss one edge), one creates a binary tree on 1 + 3 + 6 + 12
vertices. The leaves of this new tree are made adjacent to the 12
vertices of the third orbit, and the graph is now 3-regular. This
binary tree contributes 4 new orbits to the Harries-Wong graph.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.HarriesWongGraph()
sage: g.order()
70
sage: g.size()
105
sage: g.girth()
10
sage: g.diameter()
6
sage: orbits = g.automorphism_group(orbits=True)[-1] # long time
sage: g.show(figsize=[15, 15], partition=orbits) # long time
Alternative embedding::
sage: graphs.HarriesWongGraph(embedding=2).show()
TESTS::
sage: graphs.HarriesWongGraph(embedding=3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
L = [9, 25, 31, -17, 17, 33, 9, -29, -15, -9, 9, 25, -25, 29, 17, -9,
9, -27, 35, -9, 9, -17, 21, 27, -29, -9, -25, 13, 19, -9, -33,
-17, 19, -31, 27, 11, -25, 29, -33, 13, -13, 21, -29, -21, 25,
9, -11, -19, 29, 9, -27, -19, -13, -35, -9, 9, 17, 25, -9, 9, 27,
-27, -21, 15, -9, 29, -29, 33, -9, -25]
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(70, L, 1)
g.name("Harries-Wong graph")
if embedding == 1:
d = g.get_pos()
# Binary tree (left side)
d[66] = (-9.5, 0)
g._line_embedding([37, 65, 67], first=(-8, 2.25),
last=(-8, -2.25))
g._line_embedding([36, 38, 64, 24, 68, 30], first=(-7, 3),
last=(-7, -3))
g._line_embedding([35, 39, 63, 25, 59, 29, 11, 5, 55, 23, 69, 31],
first=(-6, 3.5), last=(-6, -3.5))
# Cube, corners: [9, 15, 21, 27, 45, 51, 57, 61]
g._circle_embedding([61, 9], center=(0, -1.5), shift=.2,
radius=4)
g._circle_embedding([27, 15], center=(0, -1.5), shift=.7,
radius=4*.707)
g._circle_embedding([51, 21], center=(0, 2.5), shift=.2,
radius=4)
g._circle_embedding([45, 57], center=(0, 2.5), shift=.7,
radius=4*.707)
# Cube, subdivision
g._line_embedding([21, 22, 43, 44, 45], first=d[21], last=d[45])
g._line_embedding([21, 4, 3, 56, 57], first=d[21], last=d[57])
g._line_embedding([57, 12, 13, 14, 15], first=d[57], last=d[15])
g._line_embedding([15, 6, 7, 8, 9], first=d[15], last=d[9])
g._line_embedding([9, 10, 19, 20, 21], first=d[9], last=d[21])
g._line_embedding([45, 54, 53, 52, 51], first=d[45], last=d[51])
g._line_embedding([51, 50, 49, 58, 57], first=d[51], last=d[57])
g._line_embedding([51, 32, 33, 34, 61], first=d[51], last=d[61])
g._line_embedding([61, 62, 41, 40, 27], first=d[61], last=d[27])
g._line_embedding([9, 0, 1, 26, 27], first=d[9], last=d[27])
g._line_embedding([27, 28, 47, 46, 45], first=d[27], last=d[45])
g._line_embedding([15, 16, 17, 60, 61], first=d[15], last=d[61])
# Top vertices
g._line_embedding([2, 18, 42, 48], first=(-1, 7), last=(3, 7))
return g
elif embedding == 2:
return g
else:
raise ValueError("the value of embedding must be 1 or 2")
def WellsGraph():
r"""
Return the Wells graph.
For more information on the Wells graph (also called Armanios-Wells graph),
see `this page <https://www.win.tue.nl/~aeb/graphs/Wells.html>`_.
The implementation follows the construction given on page 266 of
[BCN1989]_. This requires to create intermediate graphs and run a small
isomorphism test, while everything could be replaced by a pre-computed list
of edges : I believe that it is better to keep "the recipe" in the code,
however, as it is quite unlikely that this could become the most
time-consuming operation in any sensible algorithm, and .... "preserves
knowledge", which is what open-source software is meant to do.
EXAMPLES::
sage: g = graphs.WellsGraph(); g
Wells graph: Graph on 32 vertices
sage: g.order()
32
sage: g.size()
80
sage: g.girth()
5
sage: g.diameter()
4
sage: g.chromatic_number()
4
sage: g.is_regular(k=5)
True
"""
from .platonic_solids import DodecahedralGraph
from .basic import CompleteBipartiteGraph
# Following the construction from the book "Distance-regular graphs"
dodecahedron = DodecahedralGraph()
# Vertices at distance 3 in the Dodecahedron
distance3 = dodecahedron.distance_graph([3])
# Building the graph whose line graph is the dodecahedron.
b = CompleteBipartiteGraph(5,5)
b.delete_edges([(0,5), (1,6), (2,7), (3,8), (4,9)])
# Computing the isomorphism between the two
b = b.line_graph(labels = False)
_, labels = distance3.is_isomorphic(b, certificate=True)
# The relabeling that the books claims to exist.
for v,new_name in labels.items():
x,y = new_name
labels[v] = (x%5,y%5)
dodecahedron.relabel(labels)
# Checking that the above computations indeed produces a good labeling.
for u in dodecahedron:
for v in dodecahedron:
if u == v:
continue
if (u[0] != v[0]) and (u[1] != v[1]):
continue
if dodecahedron.distance(u,v) != 3:
raise ValueError("there is something wrong going on !")
# The graph we will return, starting from the dodecahedron
g = dodecahedron
# Good ! Now adding 12 new vertices
for i in range(5):
g.add_edge((i,'+'),('inf','+'))
g.add_edge((i,'-'),('inf','-'))
for k in range(5):
if k == i:
continue
g.add_edge((i,'+'),(i,k))
g.add_edge((i,'-'),(k,i))
g.name("Wells graph")
# Giving our graph a "not-so-bad" layout
g.relabel({
(1, 3): 8, (3, 0): 18, (3, '+'): 22, (2, 1): 13,
(1, '+'): 10, (0, 3): 2, (2, '+'): 16, ('inf', '-'): 31,
(4, 0): 24, (1, 2): 7, (4, '+'): 28, (0, '-'): 5,
(0, 4): 3, (4, 1): 25, (2, '-'): 17, (3, 2): 20,
(3, '-'): 23, (1, '-'): 11, (1, 4): 9, (2, 3): 14,
('inf', '+'): 30, (4, 2): 26, (1, 0): 6, (0, 1): 0,
(3, 1): 19, (0, 2): 1, (2, 0): 12, (4, '-'): 29,
(0, '+'): 4, (4, 3): 27, (3, 4): 21, (2, 4): 15})
p = [(1, 29, 20, 13, 12, 28, 14, 7),
(2, 5, 30, 23, 18, 4, 31, 22),
(3, 17, 21, 9, 24, 16, 27, 25),
(6, 10, 8, 15, 0, 11, 19, 26)]
g._circle_embedding(p[0], radius = 1)
g._circle_embedding(p[1], radius = .9)
g._circle_embedding(p[2], radius = .8)
g._circle_embedding(p[3], radius = .7)
return g
def Cell600(embedding=1):
r"""
Return the 600-Cell graph.
This is the adjacency graph of the 600-cell. It has 120 vertices and 720
edges. For more information, see the :wikipedia:`600-cell`.
INPUT:
- ``embedding`` (1 (default) or 2) -- two different embeddings for a plot.
EXAMPLES::
sage: g = graphs.Cell600() # long time
sage: g.size() # long time
720
sage: g.is_regular(12) # long time
True
sage: g.is_vertex_transitive() # long time
True
"""
from sage.rings.polynomial.polynomial_ring import polygen
from sage.rings.number_field.number_field import NumberField
from sage.modules.free_module import VectorSpace
from sage.groups.perm_gps.permgroup_named import AlternatingGroup
x = polygen(QQ, 'x')
K = NumberField(x ** 2 - x - 1, 'f')
f = K.gen()
K4 = VectorSpace(K, 4)
# first 96 vertices
step = [[a * f / 2, b * K(1) / 2, c * (f - 1) / 2, 0]
for a in [-1, 1] for b in [-1, 1] for c in [-1, 1]]
vert96 = [K4([v[s(1) - 1], v[s(2) - 1], v[s(3) - 1], v[s(4) - 1]])
for v in step for s in AlternatingGroup(4)]
# 16 more vertices
vert16 = [K4([K(a) / 2, K(b) / 2, K(c) / 2, K(d) / 2])
for a in [-1, 1] for b in [-1, 1]
for c in [-1, 1] for d in [-1, 1]]
# 8 last vertices
vert8 = [K4([1, 0, 0, 0]), K4([-1, 0, 0, 0]),
K4([0, 1, 0, 0]), K4([0, -1, 0, 0]),
K4([0, 0, 1, 0]), K4([0, 0, -1, 0]),
K4([0, 0, 0, 1]), K4([0, 0, 0, -1])]
# all vertices together
U = vert96 + vert16 + vert8
g = Graph([list(range(120)),
lambda i, j: U[i].inner_product(U[j]) == f / 2])
# Embedding
if embedding == 1:
pos = [0, 1, 3, 13, 78, 90, 93, 110, 29, 104, 11, 48, 107, 83, 92, 55,
32, 16, 117, 24, 26, 56, 52, 47, 75, 72, 66, 112, 27, 115, 21,
33, 118, 79, 91, 37, 2, 5, 96, 31, 82, 88, 94, 74, 50, 28, 20,
105, 45, 99, 70, 25, 101, 54, 46, 51, 17, 35, 98, 41, 84, 85,
87, 73, 18, 6, 9, 97, 65, 103, 95, 36, 100, 23, 8, 43, 68, 76,
116, 60, 62, 44, 40, 59, 15, 12, 30, 113, 63, 114, 81, 69, 119,
19, 7, 49, 86, 89, 111, 67, 22, 4, 10, 14, 38, 64, 80, 102, 57,
108, 34, 61, 106, 42, 58, 39, 77, 71, 109, 53]
else:
pos = [0, 1, 2, 3, 4, 6, 7, 8, 10, 13, 14, 21, 37, 103, 36, 65, 113,
25, 80, 26, 12, 78, 24, 83, 54, 66, 114, 46, 63, 101, 109, 93,
79, 75, 51, 44, 31, 119, 43, 5, 57, 100, 11, 108, 34, 41, 69,
96, 82, 116, 68, 64, 47, 102, 52, 35, 17, 76, 110, 38, 84, 85,
86, 87, 88, 90, 91, 92, 94, 73, 74, 81, 49, 104, 48, 29, 112,
61, 20, 62, 72, 18, 60, 23, 42, 30, 115, 58, 27, 106, 98, 9, 19,
15, 39, 56, 67, 118, 55, 89, 45, 107, 95, 99, 70, 53, 33, 111,
22, 117, 32, 28, 59, 105, 40, 71, 77, 16, 97, 50]
g._circle_embedding(pos)
return g
def Cell120():
r"""
Return the 120-Cell graph.
This is the adjacency graph of the 120-cell. It has 600 vertices and 1200
edges. For more information, see the :wikipedia:`120-cell`.
EXAMPLES::
sage: g = graphs.Cell120() # long time
sage: g.size() # long time
1200
sage: g.is_regular(4) # long time
True
sage: g.is_vertex_transitive() # long time
True
"""
from sage.rings.polynomial.polynomial_ring import polygen
from sage.rings.number_field.number_field import NumberField
from sage.modules.free_module import VectorSpace
from sage.groups.perm_gps.permgroup_named import AlternatingGroup
from sage.combinat.permutation import Permutations
x = polygen(QQ, 'x')
K = NumberField(x ** 2 - x - 1, 'f')
f = K.gen()
K4 = VectorSpace(K, 4)
# first 216 vertices
step = [(0, 0, K(a) * 2, K(b) * 2)
for a in [-1, 1] for b in [-1, 1]]
step += [(a * K(1), b * K(1), c * K(1), d * (2 * f - 1))
for a in [-1, 1] for b in [-1, 1]
for c in [-1, 1] for d in [-1, 1]]
step += [(a * (2 - f), b * f, c * f, d * f)
for a in [-1, 1] for b in [-1, 1]
for c in [-1, 1] for d in [-1, 1]]
step += [(a * (f - 1), b * (f - 1), c * (f - 1), d * (f + 1))
for a in [-1, 1] for b in [-1, 1]
for c in [-1, 1] for d in [-1, 1]]
ens1 = frozenset([(v[s(1) - 1], v[s(2) - 1], v[s(3) - 1], v[s(4) - 1])
for v in step for s in Permutations(4)])
vert1 = [K4(w) for w in ens1]
# 384 more vertices
step = [(0, a * (2 - f), b * K(1), c * (f + 1))
for a in [-1, 1] for b in [-1, 1] for c in [-1, 1]]
step += [(0, a * (f - 1), b * f, c * (2 * f - 1))
for a in [-1, 1] for b in [-1, 1] for c in [-1, 1]]
step += [(a * (f - 1), b * K(1), c * f, d * K(2))
for a in [-1, 1] for b in [-1, 1]
for c in [-1, 1] for d in [-1, 1]]
vert2 = [K4([v[s(1) - 1], v[s(2) - 1], v[s(3) - 1], v[s(4) - 1]])
for v in step for s in AlternatingGroup(4)]
# all vertices together
U = vert1 + vert2
g = Graph([list(range(600)),
lambda i, j: U[i].inner_product(U[j]) == 6*f-2])
pos = [0, 1, 3, 5, 6, 7, 8, 9, 11, 12, 14, 15, 16, 17, 20, 21, 23, 24, 25,
27, 33, 40, 47, 49, 76, 77, 216, 217, 218, 219, 220, 222, 224, 225,
226, 230, 231, 232, 233, 235, 238, 241, 242, 245, 247, 249, 251, 253,
260, 261, 211, 66, 26, 307, 598, 305, 187, 374, 311, 205, 296, 108,
366, 172, 255, 89, 229, 81, 529, 548, 439, 382, 166, 496, 313, 484,
402, 234, 530, 256, 358, 406, 553, 577, 583, 401, 334, 417, 257, 438,
373, 544, 509, 365, 378, 487, 377, 390, 349, 325, 65, 78, 184, 13,
185, 18, 210, 84, 145, 83, 180, 158, 118, 109, 103, 130, 105, 51,
178, 155, 110, 85, 206, 95, 204, 190, 514, 513, 515, 466, 467, 441,
442, 587, 585, 576, 565, 564, 566, 540, 506, 436, 435, 424, 507, 543,
545, 547, 582, 440, 169, 63, 29, 575, 237, 549, 37, 375, 430, 159,
457, 61, 331, 208, 498, 39, 578, 48, 244, 486, 411, 364, 73, 455,
321, 240, 381, 542, 243, 500, 343, 333, 271, 518, 552, 357, 314, 299,
499, 412, 376, 596, 561, 319, 400, 264, 388, 362, 355, 386, 87, 186,
52, 99, 125, 113, 36, 121, 41, 127, 149, 100, 31, 137, 177, 43, 32,
45, 62, 191, 188, 106, 195, 141, 142, 96, 489, 491, 490, 475, 474,
447, 448, 589, 588, 517, 472, 473, 471, 450, 419, 519, 521, 468, 562,
594, 595, 488, 554, 413, 167, 116, 4, 557, 504, 536, 170, 389, 410,
128, 559, 203, 348, 147, 477, 22, 516, 162, 423, 266, 274, 320, 144,
246, 395, 437, 363, 452, 425, 478, 315, 312, 428, 288, 270, 344, 323,
493, 479, 275, 387, 286, 284, 347, 359, 462, 336, 368, 392, 324, 44,
75, 69, 46, 57, 138, 35, 80, 88, 199, 70, 152, 161, 181, 34, 207,
164, 71, 115, 55, 163, 72, 171, 93, 165, 124, 300, 301, 302, 303,
304, 306, 308, 309, 310, 290, 291, 292, 293, 295, 298, 277, 278, 281,
283, 285, 287, 265, 272, 273, 19, 10, 107, 223, 418, 221, 67, 338,
227, 196, 236, 91, 354, 154, 267, 30, 289, 215, 469, 464, 571, 346,
151, 508, 397, 520, 318, 294, 470, 268, 370, 322, 445, 421, 427, 317,
394, 597, 269, 570, 337, 460, 497, 353, 342, 523, 341, 330, 361, 385,
126, 92, 94, 176, 135, 117, 114, 197, 214, 179, 60, 42, 198, 202,
102, 101, 174, 104, 146, 90, 38, 111, 122, 157, 153, 133, 502, 501,
503, 550, 551, 573, 574, 431, 429, 420, 433, 432, 434, 456, 494, 568,
567, 580, 495, 459, 461, 463, 426, 572, 182, 58, 82, 443, 297, 465,
86, 339, 586, 209, 541, 140, 391, 143, 510, 28, 422, 213, 280, 522,
591, 352, 120, 563, 405, 276, 345, 458, 279, 512, 379, 393, 259, 482,
444, 369, 398, 239, 511, 592, 340, 416, 453, 403, 316, 252, 328, 350,
367, 326, 2, 175, 97, 139, 74, 131, 173, 134, 193, 192, 132, 79, 50,
200, 64, 150, 201, 194, 212, 183, 54, 56, 98, 123, 112, 156, 525,
527, 526, 535, 534, 555, 556, 409, 408, 481, 532, 533, 531, 558, 599,
483, 485, 528, 454, 414, 415, 524, 446, 593, 160, 59, 68, 449, 492,
476, 148, 329, 590, 119, 451, 189, 360, 53, 537, 129, 480, 136, 579,
254, 262, 404, 168, 282, 335, 569, 351, 560, 581, 538, 399, 396, 584,
228, 258, 380, 407, 505, 539, 263, 327, 250, 248, 383, 371, 546, 372,
356, 332, 384]
g._circle_embedding(pos)
return g
def SuzukiGraph():
r"""
Return the Suzuki Graph.
The Suzuki graph has 1782 vertices, and is strongly regular with parameters
`(1782,416,100,96)`. Known as S.15 in [Hub1975]_.
.. NOTE::
It takes approximately 50 seconds to build this graph. Do not be too
impatient.
EXAMPLES::
sage: g = graphs.SuzukiGraph(); g # optional internet # not tested
Suzuki graph: Graph on 1782 vertices
sage: g.is_strongly_regular(parameters=True) # optional internet # not tested
(1782, 416, 100, 96)
"""
from sage.groups.perm_gps.permgroup_named import SuzukiSporadicGroup
g = Graph()
g.add_edges(SuzukiSporadicGroup().orbit((1,2),"OnSets"))
g.relabel()
g.name("Suzuki graph")
return g
def HallJankoGraph(from_string=True):
r"""
Return the Hall-Janko graph.
For more information on the Hall-Janko graph, see the
:wikipedia:`Hall-Janko_graph`.
The construction used to generate this graph in Sage is by a 100-point
permutation representation of the Janko group `J_2`, as described in version
3 of the ATLAS of Finite Group representations, in particular on the page
`ATLAS: J2 -- Permutation representation on 100 points
<http://brauer.maths.qmul.ac.uk/Atlas/v3/permrep/J2G1-p100B0>`_.
INPUT:
- ``from_string`` (boolean) -- whether to build the graph from its sparse6
string or through GAP. The two methods return the same graph though doing
it through GAP takes more time. It is set to ``True`` by default.
EXAMPLES::
sage: g = graphs.HallJankoGraph()
sage: g.is_regular(36)
True
sage: g.is_vertex_transitive()
True
Is it really strongly regular with parameters 14, 12? ::
sage: nu = set(g.neighbors(0))
sage: for v in range(1, 100):
....: if v in nu:
....: expected = 14
....: else:
....: expected = 12
....: nv = set(g.neighbors(v))
....: nv.discard(0)
....: if len(nu & nv) != expected:
....: print("Something is wrong here!!!")
....: break
Some other properties that we know how to check::
sage: g.diameter()
2
sage: g.girth()
3
sage: factor(g.characteristic_polynomial())
(x - 36) * (x - 6)^36 * (x + 4)^63
TESTS::
sage: gg = graphs.HallJankoGraph(from_string=False) # long time
sage: g == gg # long time
True
"""
string = (":~?@c__E@?g?A?w?A@GCA_?CA`OWF`W?EAW?@?_OD@_[GAgcIaGGB@OcIA"
"wCE@o_K_?GB@?WGAouC@OsN_?GB@O[GB`A@@_e?@OgLB_{Q_?GC@O[GAOs"
"OCWGBA?kKBPA@?_[KB_{OCPKT`o_RD`]A?o[HBOwODW?DA?cIB?wRDP[X`"
"ogKB_{QD@]B@o_KBPWXE`mC@o_JB?{PDPq@?oWGA_{OCPKTDp_YEwCA@_c"
"IBOwOC`OX_OGB@?WPDPcYFg?C@_gKBp?SE@cYF`{_`?SGAOoOC`_\\FwCE"
"A?gKBO{QD@k[FqI??_OFA_oQE@k\\Fq?`GgCB@pGRD@_XFP{a_?SE@ocIA"
"ooNCPOUEqU@?oODA?cJB_{UEqYC@_kLC@CREPk]GAGbHgCA@?SMBpCSD`["
"YFq?`Ga]BA?gPC`KSD`_\\Fa?cHWGB@?[IAooPD`[WF@s^HASeIg?@@OcP"
"C`KYF@w^GQ[h`O[HAooMC@CQCpSVEPk\\GaSeIG?FA?kLB_{OC`OVE@cYG"
"QUA@?WLBp?PC`KVEqKgJg?DA?sMBpCSDP[WEQKfIay@?_KD@_[GC`SUE@k"
"[FaKdHa[k_?OLC@CRD@WVEpo^HAWfIAciIqoo_?CB@?kMCpOUE`o\\GAKg"
"IQgq_?GD@_[GB?{OCpWVE@cYFACaHAWhJR?q_?CC@_kKBpC\\GACdHa[kJ"
"a{o_?CA?oOFBpGRD@o\\GaKdIQonKrOt_?WHA`?PC`KTD`k]FqSeIaolJr"
"CqLWCA@OkKCPGRDpcYGAKdIAgjJAsmJr?t__OE@ogJB_{XEps`HA[gIQwn"
"KWKGAOoMBpGUE`k[Fa?aHqckJbSuLw?@?_SHA_kLC@OTFPw^GaOkLg?B@?"
"[HA_{PDP_XFaCbHa[gIqooKRWx_?CFBpOTE@cZFPw^GACcHQgoKrSvMwWG"
"BOwQCp_YFP{`HASfJAwnKRSx_OSSDP[WEq?aGqSfIQsoKR_zNWCE@o_HA_"
"sREPg^GAGcHQWfIAciKbOxNg?A@__IAooMC`KTD`g\\GAKcIasoKrOtLb["
"wMbyCA?cKBp?TD`[WE`s^GQGbHqcjJrK{NRw~_oODA?sNC@CQCpOZF@s]G"
"QOfIaolJrGsLbk}_?OFA_sRD@SVE`k[HQcjJa{qLb[xMb|?_OOFA?cIAos"
"RDP_ZFa?aGqOfIAsuMbk{Ns@@OsQAA_sPDPWXE`o\\FqKdIQkkJrCuLr_x"
"Mro}NsDAPG?@@OWFApKUE@o`IQolKRKsLrc|NsQC@OWGAOgJCpOWE`o_GQ"
"KiIqwnKr_~OcLCPS]A?oWHA_oMBpKSDP[\\FagjKBWxMbk{OSQ@@O_IAoo"
"LBpCSD`g\\FaGbHQWgIQgmKRKwMRl?PgGC@OWHB@KSE@c[FqCaGqSeIAkk"
"KBCqLBSuMBpGQWCA@?cKBOwRDPWVE@k^GqOfJr?pKbKtLrs}OSHDQwKIBO"
"wPD@WWEQ?`HQWfIQglKBOtLbo}Ns@@OsTE_?kLCpWWHA[gIqomKBGwMRgz"
"NBw~OSPDPc\\H_?CFAOoLCPSVE`o\\GAOeJAwpKbKtMrx?Qcq??OKFA?gJ"
"B`?QDpcYEpo]FqKfIAgjJB?qKr_{NS@A__SE@o_HBO{PC`OTD`{_HaciIq"
"{vMbt?OcPFQCeB@?SKBOwRD@SXE`k[FPw`HQ_lKRKxNRxBPC\\HQclK_?K"
"EB?sOC`OTDa?`GqWgJRCrNBw~OSHFQStMRtDQ_?KC@OoQE`k_GaOdHa[gI"
"q{tMBg|Nb|?OcPMSDDQSwCB@_cJB_{OCpOVFP{dHa[jJQwqKrk}NsHBQCd"
"MRtMA?oSEA_wPDp_YEpo]GAOeIq{pLBk}NsLEQCtNTDU??OKEA_oLC@[[G"
"aKnKBOtLbk~OCPFQStNSDLSTgGKC@GSD`[WEpw_GQGcIAciJAwpKb_xMbk"
"~QShJRc|R`_wNCPcZF@s^GAGbHA_hJR?qKrOvMRg|NsDEPsxTTgCB@?gJB"
"?sMC@CUDp_]FqCaHQcjJQwtLrhCPS\\IRCtQTw?B@?SHA_wPC`_aGqOiJa"
"{oKRKvMRpFQChKRtXVUTi??ocNC@KUE@cYFaGdHa_mJrKsLb[yMro|OcXI"
"RdPTTddZaOgJB@?UEPk[FQCfIaolJrSvMBczNR|AOsXFQCtOTtaB@?WGAP"
"?TEPo\\GAGdHqgmKBCqLR[xMb|?PC`HQs|TTt`XUtu@?o[HB?sNCPGXF@{"
"_GQKcIqolJb_yNCLDPs`MRtDRTTdYUwSEA?kLB`CWF@s]FqGgIqooLRgzN"
"RxFQSlMSDDQTDXVUTi@?_KDAOoLBpKUEQOfIa{oLB_xMrt?Os\\HQcpMST"
"HSTtl[VT}A@ocJBOwSD`_XEpo_Ha_mJrKtLbgzNSTGQspLRtDUUDp\\WG["
"HB`CQCp[WFQGgIQgkJQ{rLbc{Nc@APsdLRt@PSt\\WUtt_Wn")
if from_string:
g = Graph(string, loops = False, multiedges = False)
else:
# The following construction is due to version 3 of the ATLAS of
# Finite Group Representations, specifically the page at
# http://brauer.maths.qmul.ac.uk/Atlas/v3/permrep/J2G1-p100B0 .
from sage.interfaces.gap import gap
gap.eval("g1 := (1,84)(2,20)(3,48)(4,56)(5,82)(6,67)(7,55)(8,41)"
"(9,35)(10,40)(11,78)(12,100)(13,49)(14,37)(15,94)(16,76)"
"(17,19)(18,44)(21,34)(22,85)(23,92)(24,57)(25,75)(26,28)"
"(27,64)(29,90)(30,97)(31,38)(32,68)(33,69)(36,53)(39,61)"
"(42,73)(43,91)(45,86)(46,81)(47,89)(50,93)(51,96)(52,72)"
"(54,74)(58,99)(59,95)(60,63)(62,83)(65,70)(66,88)(71,87)"
"(77,98)(79,80);")
gap.eval("g2 := (1,80,22)(2,9,11)(3,53,87)(4,23,78)(5,51,18)"
"(6,37,24)(8,27,60)(10,62,47)(12,65,31)(13,64,19)"
"(14,61,52)(15,98,25)(16,73,32)(17,39,33)(20,97,58)"
"(21,96,67)(26,93,99)(28,57,35)(29,71,55)(30,69,45)"
"(34,86,82)(38,59,94)(40,43,91)(42,68,44)(46,85,89)"
"(48,76,90)(49,92,77)(50,66,88)(54,95,56)(63,74,72)"
"(70,81,75)(79,100,83);")
gap.eval("G := Group([g1,g2]);")
edges = gap('Orbit(G,[1,5],OnSets)').sage()
g = Graph([(int(u), int(v)) for u,v in edges])
g.relabel(range(100))
g._circle_embedding(list(range(100)))
g.name("Hall-Janko graph")
return g
def Balaban10Cage(embedding=1):
r"""
Return the Balaban 10-cage.
The Balaban 10-cage is a 3-regular graph with 70 vertices and 105 edges. See
the :wikipedia:`Balaban_10-cage`.
The default embedding gives a deeper understanding of the graph's
automorphism group. It is divided into 4 layers (each layer being a set of
points at equal distance from the drawing's center). From outside to inside:
- L1: The outer layer (vertices which are the furthest from the origin) is
actually the disjoint union of two cycles of length 10.
- L2: The second layer is an independent set of 20 vertices.
- L3: The third layer is a matching on 10 vertices.
- L4: The inner layer (vertices which are the closest from the origin) is
also the disjoint union of two cycles of length 10.
This graph is not vertex-transitive, and its vertices are partitioned into 3
orbits: L2, L3, and the union of L1 of L4 whose elements are equivalent.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to be either 1 or 2.
EXAMPLES::
sage: g = graphs.Balaban10Cage()
sage: g.girth()
10
sage: g.chromatic_number()
2
sage: g.diameter()
6
sage: g.is_hamiltonian()
True
sage: g.show(figsize=[10,10]) # long time
TESTS::
sage: graphs.Balaban10Cage(embedding='foo')
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
L = [-9, -25, -19, 29, 13, 35, -13, -29, 19, 25, 9, -29, 29, 17, 33,
21, 9,-13, -31, -9, 25, 17, 9, -31, 27, -9, 17, -19, -29, 27,
-17, -9, -29, 33, -25,25, -21, 17, -17, 29, 35, -29, 17, -17,
21, -25, 25, -33, 29, 9, 17, -27, 29, 19, -17, 9, -27, 31, -9,
-17, -25, 9, 31, 13, -9, -21, -33, -17, -29, 29]
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(70, L, 1)
g.name("Balaban 10-cage")
if embedding == 2:
return g
elif embedding != 1:
raise ValueError("the value of embedding must be 1 or 2")
L3 = [5, 24, 35, 46, 29, 40, 51, 34, 45, 56]
g._circle_embedding(L3, center=(0, 0), radius = 4.3)
L2 = [6, 4, 23, 25, 60, 36, 1, 47, 28, 30, 39, 41, 50, 52, 33, 9, 44,
20, 55, 57]
g._circle_embedding(L2, center=(0, 0), radius = 5, shift=-.5)
L1a = [69, 68, 67, 66, 65, 64, 63, 62, 61, 0]
L1b = [19, 18, 17, 16, 15, 14, 13, 12, 11, 10]
g._circle_embedding(L1a, center=(0, 0), radius = 6, shift = 3.25)
g._circle_embedding(L1b, center=(0, 0), radius = 6, shift = -1.25)
L4a = [37, 2, 31, 38, 53, 32, 21, 54, 3, 22]
g._circle_embedding(L4a, center=(0, 0), radius = 3, shift = 1.9)
L4b = [26, 59, 48, 27, 42, 49, 8, 43, 58, 7]
g._circle_embedding(L4b, center=(0, 0), radius = 3, shift = 1.1)
return g
def Balaban11Cage(embedding = 1):
r"""
Return the Balaban 11-cage.
For more information, see the :wikipedia:`Balaban_11-cage`.
INPUT:
- ``embedding`` -- three embeddings are available, and can be selected by
setting ``embedding`` to be 1, 2, or 3.
- The first embedding is the one appearing on page 9 of the Fifth Annual
Graph Drawing Contest report [EMMN1998]_. It separates vertices based on
their eccentricity (see :meth:`eccentricity()
<sage.graphs.generic_graph.GenericGraph.eccentricity>`).
- The second embedding has been produced just for Sage and is meant to
emphasize the automorphism group's 6 orbits.
- The last embedding is the default one produced by the :meth:`LCFGraph`
constructor.
.. NOTE::
The vertex labeling changes according to the value of ``embedding=1``.
EXAMPLES:
Basic properties::
sage: g = graphs.Balaban11Cage()
sage: g.order()
112
sage: g.size()
168
sage: g.girth()
11
sage: g.diameter()
8
sage: g.automorphism_group().cardinality()
64
Our many embeddings::
sage: g1 = graphs.Balaban11Cage(embedding=1)
sage: g2 = graphs.Balaban11Cage(embedding=2)
sage: g3 = graphs.Balaban11Cage(embedding=3)
sage: g1.show(figsize=[10,10]) # long time
sage: g2.show(figsize=[10,10]) # long time
sage: g3.show(figsize=[10,10]) # long time
Proof that the embeddings are the same graph::
sage: g1.is_isomorphic(g2) # g2 and g3 are obviously isomorphic
True
TESTS::
sage: graphs.Balaban11Cage(embedding='xyzzy')
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1, 2, or 3
"""
if embedding == 1:
pos_dict = {}
for j in range(8):
for i in range(8):
pos_dict[str(j) + str(i)]= [
0.8 * float(cos(2*((8*j + i)*pi/64 + pi/128))),
0.8 * float(sin(2*((8*j + i)*pi/64 + pi/128)))
]
for i in range(4):
pos_dict['1' + str(j) + str(i)] = [
1.1 * float(cos(2*((4*j + i)*pi/32 + pi/64))),
1.1 * float(sin(2*((4*j + i)*pi/32 + pi/64)))
]
for i in range(2):
pos_dict['1' + str(j) + str(i + 4)] = [
1.4 * float(cos(2*((2*j + i)*pi/16 + pi/32))),
1.4 * float(sin(2*((2*j + i)*pi/16 + pi/32)))
]
edge_dict = {
"00": ["11"], "01": ["10"], "02": ["53"], "03": ["52"],
"11": ["20"], "10": ["21"], "53": ["22"], "52": ["23"],
"20": ["31"], "21": ["30"], "22": ["33"], "23": ["32"],
"31": ["40"], "30": ["41"], "33": ["43"], "32": ["42"],
"40": ["50"], "41": ["51"], "43": ["12"], "42": ["13"],
"50": ["61"], "51": ["60"], "12": ["63"], "13": ["62"],
"61": ["70"], "60": ["71"], "63": ["72"], "62": ["73"],
"70": ["01"], "71": ["00"], "72": ["03"], "73": ["02"],
"04": ["35"], "05": ["34"], "06": ["37"], "07": ["36"],
"35": ["64"], "34": ["65"], "37": ["66"], "36": ["67"],
"64": ["55"], "65": ["54"], "66": ["17"], "67": ["16"],
"55": ["45"], "54": ["44"], "17": ["46"], "16": ["47"],
"45": ["74"], "44": ["75"], "46": ["76"], "47": ["77"],
"74": ["25"], "75": ["24"], "76": ["27"], "77": ["26"],
"25": ["14"], "24": ["15"], "27": ["56"], "26": ["57"],
"14": ["05"], "15": ["04"], "56": ["07"], "57": ["06"],
"100": ["03", "04"], "110": ["10", "12"],
"101": ["01", "06"], "111": ["11", "13"],
"102": ["00", "07"], "112": ["14", "16"],
"103": ["02", "05"], "113": ["15", "17"],
"120": ["22", "24"], "130": ["33", "36"],
"121": ["20", "26"], "131": ["32", "37"],
"122": ["21", "27"], "132": ["31", "34"],
"123": ["23", "25"], "133": ["30", "35"],
"140": ["43", "45"], "150": ["50", "52"],
"141": ["40", "46"], "151": ["51", "53"],
"142": ["41", "47"], "152": ["54", "56"],
"143": ["42", "44"], "153": ["55", "57"],
"160": ["60", "66"], "170": ["73", "76"],
"161": ["63", "65"], "171": ["72", "77"],
"162": ["62", "64"], "172": ["71", "74"],
"163": ["61", "67"], "173": ["70", "75"],
"104": ["100", "102", "105"], "114": ["110", "111", "115"],
"105": ["101", "103", "104"], "115": ["112", "113", "114"],
"124": ["120", "121", "125"], "134": ["130", "131", "135"],
"125": ["122", "123", "124"], "135": ["132", "133", "134"],
"144": ["140", "141", "145"], "154": ["150", "151", "155"],
"145": ["142", "143", "144"], "155": ["152", "153", "154"],
"164": ["160", "161", "165"], "174": ["170", "171", "175"],
"165": ["162", "163", "164"], "175": ["172", "173", "174"]
}
return Graph(edge_dict, pos=pos_dict, name="Balaban 11-cage")
elif embedding == 2 or embedding == 3:
L = [44, 26, -47, -15, 35, -39, 11, -27, 38, -37, 43, 14, 28, 51,
-29, -16, 41, -11, -26, 15, 22, -51, -35, 36, 52, -14, -33,
-26, -46, 52, 26, 16, 43, 33, -15, 17, -53, 23, -42, -35, -28,
30, -22, 45, -44, 16, -38, -16, 50, -55, 20, 28, -17, -43,
47, 34, -26, -41, 11, -36, -23, -16, 41, 17, -51, 26, -33,
47, 17, -11, -20, -30, 21, 29, 36, -43, -52, 10, 39, -28, -17,
-52, 51, 26, 37, -17, 10, -10, -45, -34, 17, -26, 27, -21,
46, 53, -10, 29, -50, 35, 15, -47, -29, -41, 26, 33, 55, -17,
42, -26, -36, 16]
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(112, L, 1)
g.name("Balaban 11-cage")
if embedding == 3:
return g
v1 = [34, 2, 54, 43, 66, 20, 89, 100, 72, 76, 6, 58, 16, 78, 74,
70, 36, 94, 27, 25, 10, 8, 45, 60, 14, 64, 80, 82, 109, 107,
49, 98]
v2 = [88, 3, 19, 55, 67, 42, 101, 33, 77, 5, 17, 57, 69, 71, 73,
75, 11, 61, 28, 9, 37, 26, 46, 95, 13, 63, 81, 83, 108, 106,
48, 97]
l1 = [35, 93, 1, 24, 53, 7, 44, 59, 15, 65, 79, 21, 110, 90, 50,
99]
l2 = [87, 4, 18, 56, 68, 41, 102, 32, 12, 62, 29, 84, 38, 105, 47,
96]
d = g.get_pos()
for i,v in enumerate(v1):
d[v] = (-2, 16.5-i)
for i,v in enumerate(l1):
d[v] = (-10, 8-i)
for i,v in enumerate(l2):
d[v] = (10, 8.5-i)
for i,v in enumerate(v2):
d[v] = (2, 16.5-i)
for i,v in enumerate([0, 111, 92, 91, 52, 51, 23, 22]):
d[v] = (-20, 14.5-4*i)
for i,v in enumerate([104, 103, 86, 85, 40, 39, 31, 30]):
d[v] = (20, 14.5-4*i)
return g
else:
raise ValueError("the value of embedding must be 1, 2, or 3")
def BidiakisCube():
r"""
Return the Bidiakis cube.
For more information, see the :wikipedia:`Bidiakis_cube`.
EXAMPLES:
The Bidiakis cube is a 3-regular graph having 12 vertices and 18 edges. This
means that each vertex has a degree of 3. ::
sage: g = graphs.BidiakisCube(); g
Bidiakis cube: Graph on 12 vertices
sage: g.show() # long time
sage: g.order()
12
sage: g.size()
18
sage: g.is_regular(3)
True
It is a Hamiltonian graph with diameter 3 and girth 4::
sage: g.is_hamiltonian()
True
sage: g.diameter()
3
sage: g.girth()
4
It is a planar graph with characteristic polynomial
`(x - 3) (x - 2) (x^4) (x + 1) (x + 2) (x^2 + x - 4)^2` and
chromatic number 3::
sage: g.is_planar()
True
sage: bool(g.characteristic_polynomial() == expand((x - 3) * (x - 2) * (x^4) * (x + 1) * (x + 2) * (x^2 + x - 4)^2))
True
sage: g.chromatic_number()
3
"""
edge_dict = {
0:[1,6,11], 1:[2,5], 2:[3,10], 3:[4,9], 4:[5,8],
5:[6], 6:[7], 7:[8,11], 8:[9], 9:[10], 10:[11]}
pos_dict = {
0: [0, 1],
1: [0.5, 0.866025403784439],
2: [0.866025403784439, 0.500000000000000],
3: [1, 0],
4: [0.866025403784439, -0.5],
5: [0.5, -0.866025403784439],
6: [0, -1],
7: [-0.5, -0.866025403784439],
8: [-0.866025403784439, -0.5],
9: [-1, 0],
10: [-0.866025403784439, 0.5],
11: [-0.5, 0.866025403784439]}
return Graph(edge_dict, pos=pos_dict, name="Bidiakis cube")
def BiggsSmithGraph(embedding=1):
r"""
Return the Biggs-Smith graph.
For more information, see the :wikipedia:`Biggs-Smith_graph`.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to be 1 or 2.
EXAMPLES:
Basic properties::
sage: g = graphs.BiggsSmithGraph()
sage: g.order()
102
sage: g.size()
153
sage: g.girth()
9
sage: g.diameter()
7
sage: g.automorphism_group().cardinality() # long time
2448
sage: g.show(figsize=[10, 10]) # long time
The other embedding::
sage: graphs.BiggsSmithGraph(embedding=2).show() # long time
TESTS::
sage: graphs.BiggsSmithGraph(embedding='xyzzy')
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
L = [16, 24, -38, 17, 34, 48, -19, 41, -35, 47, -20, 34, -36,
21, 14, 48, -16, -36, -43, 28, -17, 21, 29, -43, 46, -24,
28, -38, -14, -50, -45, 21, 8, 27, -21, 20, -37, 39, -34,
-44, -8, 38, -21, 25, 15, -34, 18, -28, -41, 36, 8, -29,
-21, -48, -28, -20, -47, 14, -8, -15, -27, 38, 24, -48, -18,
25, 38, 31, -25, 24, -46, -14, 28, 11, 21, 35, -39, 43, 36,
-38, 14, 50, 43, 36, -11, -36, -24, 45, 8, 19, -25, 38, 20,
-24, -14, -21, -8, 44, -31, -38, -28, 37]
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(102, L, 1)
g.name("Biggs-Smith graph")
if embedding == 1:
orbs = [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 0],
[17, 101, 25, 66, 20, 38, 53, 89, 48, 75, 56, 92, 45, 78,
34, 28, 63],
[18, 36, 26, 65, 19, 37, 54, 90, 47, 76, 55, 91, 46, 77,
35, 27, 64],
[21, 39, 52, 88, 49, 74, 57, 93, 44, 79, 33, 29, 62, 83,
100, 24, 67],
[22, 97, 51, 96, 50, 95, 58, 94, 59, 80, 60, 81, 61, 82,
99, 23, 98],
[30, 86, 84, 72, 70, 68, 42, 40, 31, 87, 85, 73, 71, 69,
43, 41, 32]]
# central orbits
g._circle_embedding(orbs[1], center=(-.4, 0), radius=.2)
g._circle_embedding(orbs[3], center=(.4, 0), radius=.2, shift=4)
# lower orbits
g._circle_embedding(orbs[0], center=(-.9, -.5), radius=.3, shift=2)
g._circle_embedding(orbs[2], center=(-.9, .5), radius=.3)
# upper orbits
g._circle_embedding(orbs[4], center=(.9, -.5), radius=.3, shift=4)
g._circle_embedding(orbs[5], center=(.9, .5), radius=.3, shift=-2)
elif embedding == 2:
pass
else:
raise ValueError("the value of embedding must be 1 or 2")
return g
def BlanusaFirstSnarkGraph():
r"""
Return the first Blanusa Snark Graph.
The Blanusa graphs are two snarks on 18 vertices and 27 edges. For more
information on them, see the :wikipedia:`Blanusa_snarks`.
.. SEEALSO::
* :meth:`~sage.graphs.graph_generators.GraphGenerators.BlanusaSecondSnarkGraph`.
EXAMPLES::
sage: g = graphs.BlanusaFirstSnarkGraph()
sage: g.order()
18
sage: g.size()
27
sage: g.diameter()
4
sage: g.girth()
5
sage: g.automorphism_group().cardinality()
8
"""
g = Graph({17:[4,7,1],0:[5],
3:[8],13:[9],12:[16],
10:[15],11:[6],14:[2]},
name="Blanusa First Snark Graph")
g.add_cycle(list(range(17)))
g._circle_embedding(list(range(17)), shift=0.25)
g.get_pos()[17] = (0,0)
return g
def BlanusaSecondSnarkGraph():
r"""
Return the second Blanusa Snark Graph.
The Blanusa graphs are two snarks on 18 vertices and 27 edges. For more
information on them, see the :wikipedia:`Blanusa_snarks`.
.. SEEALSO::
* :meth:`~sage.graphs.graph_generators.GraphGenerators.BlanusaFirstSnarkGraph`.
EXAMPLES::
sage: g = graphs.BlanusaSecondSnarkGraph()
sage: g.order()
18
sage: g.size()
27
sage: g.diameter()
4
sage: g.girth()
5
sage: g.automorphism_group().cardinality()
4
"""
c0 = (-1, 0)
c1 = (-1, 1)
g = Graph({c0: [(0, 0), (1, 4), c1], c1: [(0, 3), (1, 1)],
(0, 2): [(0, 5)], (0, 6): [(0, 4)],
(0, 7): [(0, 1)], (1, 7): [(1, 2)],
(1, 0): [(1, 6)], (1, 3): [(1, 5)]},
name="Blanusa Second Snark Graph")
g.add_cycle([(0, i) for i in range(5)])
g.add_cycle([(1, i) for i in range(5)])
g.add_cycle([(0, 5), (0, 6), (0, 7), (1, 5), (1, 6), (1, 7)])
g._circle_embedding([(0, (2 * i) % 5) for i in range(5)],
center=(-1.5, 0),
shift=.5)
g._circle_embedding([(1, (2 * i) % 5) for i in range(5)],
center=(1.5, 0))
g._circle_embedding([(0, i) for i in range(5, 8)] + [c0] * 4,
center=(-1.2, 0),
shift=2.5,
radius=2.2)
g._circle_embedding([(1, i) for i in range(5, 8)] + [c0] * 4,
center=(1.2, 0),
shift=-1,
radius=2.2)
g._circle_embedding([c0, c1], shift=.5)
g.relabel()
return g
def BrinkmannGraph():
r"""
Return the Brinkmann graph.
For more information, see the :wikipedia:`Brinkmann_graph`.
EXAMPLES:
The Brinkmann graph is a 4-regular graph having 21 vertices and 42
edges. This means that each vertex has degree 4. ::
sage: G = graphs.BrinkmannGraph(); G
Brinkmann graph: Graph on 21 vertices
sage: G.show() # long time
sage: G.order()
21
sage: G.size()
42
sage: G.is_regular(4)
True
It is an Eulerian graph with radius 3, diameter 3, and girth 5. ::
sage: G.is_eulerian()
True
sage: G.radius()
3
sage: G.diameter()
3
sage: G.girth()
5
The Brinkmann graph is also Hamiltonian with chromatic number 4::
sage: G.is_hamiltonian()
True
sage: G.chromatic_number()
4
Its automorphism group is isomorphic to `D_7`::
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(7))
True
"""
edge_dict = {
0: [2,5,7,13],
1: [3,6,7,8],
2: [4,8,9],
3: [5,9,10],
4: [6,10,11],
5: [11,12],
6: [12,13],
7: [15,20],
8: [14,16],
9: [15,17],
10: [16,18],
11: [17,19],
12: [18,20],
13: [14,19],
14: [17,18],
15: [18,19],
16: [19,20],
17: [20]}
pos_dict = {
0: [0, 4],
1: [3.12732592987212, 2.49395920743493],
2: [3.89971164872729, -0.890083735825258],
3: [1.73553495647023, -3.60387547160968],
4: [-1.73553495647023, -3.60387547160968],
5: [-3.89971164872729, -0.890083735825258],
6: [-3.12732592987212, 2.49395920743493],
7: [0.867767478235116, 1.80193773580484],
8: [1.94985582436365, 0.445041867912629],
9: [1.56366296493606, -1.24697960371747],
10: [0, -2],
11: [-1.56366296493606, -1.24697960371747],
12: [-1.94985582436365, 0.445041867912629],
13: [-0.867767478235116, 1.80193773580484],
14: [0.433883739117558, 0.900968867902419],
15: [0.974927912181824, 0.222520933956314],
16: [0.781831482468030, -0.623489801858733],
17: [0, -1],
18: [-0.781831482468030, -0.623489801858733],
19: [-0.974927912181824, 0.222520933956315],
20: [-0.433883739117558, 0.900968867902419]}
return Graph(edge_dict, pos=pos_dict, name="Brinkmann graph")
def BrouwerHaemersGraph():
r"""
Return the Brouwer-Haemers Graph.
The Brouwer-Haemers is the only strongly regular graph of parameters
`(81,20,1,6)`. It is build in Sage as the Affine Orthogonal graph
`VO^-(6,3)`. For more information on this graph, see its `corresponding page
on Andries Brouwer's website
<https://www.win.tue.nl/~aeb/graphs/Brouwer-Haemers.html>`_.
EXAMPLES::
sage: g = graphs.BrouwerHaemersGraph()
sage: g
Brouwer-Haemers: Graph on 81 vertices
It is indeed strongly regular with parameters `(81,20,1,6)`::
sage: g.is_strongly_regular(parameters = True) # long time
(81, 20, 1, 6)
Its has as eigenvalues `20,2` and `-7`::
sage: set(g.spectrum()) == {20,2,-7}
True
"""
from sage.rings.finite_rings.finite_field_constructor import FiniteField
from sage.modules.free_module import VectorSpace
from sage.matrix.constructor import Matrix
from sage.matrix.constructor import identity_matrix
d = 4
q = 3
F = FiniteField(q,"x")
V = VectorSpace(F,d)
M = Matrix(F,identity_matrix(d))
M[1,1]=-1
G = Graph([[tuple(_) for _ in V], lambda x,y:(V(x)-V(y))*(M*(V(x)-V(y))) == 0], loops = False)
G.relabel()
ordering = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25, 26, 48, 49, 50, 51, 52, 53,
45, 46, 47, 30, 31, 32, 33, 34, 35, 27, 28, 29, 39, 40, 41,
42, 43, 44, 36, 37, 38, 69, 70, 71, 63, 64, 65, 66, 67, 68,
78, 79, 80, 72, 73, 74, 75, 76, 77, 60, 61, 62, 54, 55, 56,
57, 58, 59]
G._circle_embedding(ordering)
G.name("Brouwer-Haemers")
return G
def BuckyBall():
r"""
Create the Bucky Ball graph.
This graph is a 3-regular 60-vertex planar graph. Its vertices and edges
correspond precisely to the carbon atoms and bonds in buckminsterfullerene.
When embedded on a sphere, its 12 pentagon and 20 hexagon faces are arranged
exactly as the sections of a soccer ball.
EXAMPLES:
The Bucky Ball is planar. ::
sage: g = graphs.BuckyBall()
sage: g.is_planar()
True
The Bucky Ball can also be created by extracting the 1-skeleton of the Bucky
Ball polyhedron, but this is much slower. ::
sage: g = polytopes.buckyball().vertex_graph()
sage: g.remove_loops()
sage: h = graphs.BuckyBall()
sage: g.is_isomorphic(h)
True
The graph is returned along with an attractive embedding. ::
sage: g = graphs.BuckyBall()
sage: g.plot(vertex_labels=False, vertex_size=10).show() # long time
"""
edges = [(0, 2), (0, 48), (0, 59), (1, 3), (1, 9), (1, 58),
(2, 3), (2, 36), (3, 17), (4, 6), (4, 8), (4, 12),
(5, 7), (5, 9), (5, 16), (6, 7), (6, 20), (7, 21),
(8, 9), (8, 56), (10, 11), (10, 12), (10, 20), (11, 27),
(11, 47), (12, 13), (13, 46), (13, 54), (14, 15), (14, 16),
(14, 21), (15, 25), (15, 41), (16, 17), (17, 40), (18, 19),
(18, 20), (18, 26), (19, 21), (19, 24), (22, 23), (22, 31),
(22, 34), (23, 25), (23, 38), (24, 25), (24, 30), (26, 27),
(26, 30), (27, 29), (28, 29), (28, 31), (28, 35), (29, 44),
(30, 31), (32, 34), (32, 39), (32, 50), (33, 35), (33, 45),
(33, 51), (34, 35), (36, 37), (36, 40), (37, 39), (37, 52),
(38, 39), (38, 41), (40, 41), (42, 43), (42, 46), (42, 55),
(43, 45), (43, 53), (44, 45), (44, 47), (46, 47), (48, 49),
(48, 52), (49, 53), (49, 57), (50, 51), (50, 52), (51, 53),
(54, 55), (54, 56), (55, 57), (56, 58), (57, 59), (58, 59)
]
g = Graph()
g.add_edges(edges)
g.name("Bucky Ball")
pos = {
0 : (1.00000000000000, 0.000000000000000),
1 : (-1.00000000000000, 0.000000000000000),
2 : (0.500000000000000, 0.866025403784439),
3 : (-0.500000000000000, 0.866025403784439),
4 : (-0.252886764483159, -0.146004241548845),
5 : (-0.368953972399043, 0.0928336233191176),
6 : (-0.217853192651371, -0.0480798425451855),
7 : (-0.255589950938772, 0.0495517623332213),
8 : (-0.390242139418333, -0.225306404242310),
9 : (-0.586398703939125, -0.0441575936410641),
10: (-0.113926229169631, -0.101751920396670),
11: (-0.0461308635969359, -0.0928422349110366),
12: (-0.150564961379772, -0.164626477859040),
13: (-0.0848818904865275, -0.246123271631605),
14: (-0.170708060452244, 0.196571509298384),
15: (-0.0672882312715990, 0.212706320404226),
16: (-0.264873262319233, 0.273106701265196),
17: (-0.254957754106411, 0.529914971178085),
18: (-0.103469165775548, 0.00647061768205703),
19: (-0.113590051906687, 0.0655812470455896),
20: (-0.145082862532183, -0.0477870484199328),
21: (-0.179962687765901, 0.103901506225732),
22: (0.0573383021786124, 0.0863716172289798),
23: (0.0311566333625530, 0.149538968816603),
24: (-0.0573383021786121, 0.0863716172289799),
25: (-0.0311566333625527, 0.149538968816603),
26: (-0.0517345828877740, 0.00161765442051429),
27: (-0.0244663616211774, -0.0456122902452611),
28: (0.0517345828877743, 0.00161765442051431),
29: (0.0244663616211777, -0.0456122902452611),
30: (-0.0272682212665964, 0.0439946358247470),
31: (0.0272682212665968, 0.0439946358247470),
32: (0.179962687765901, 0.103901506225732),
33: (0.145082862532184, -0.0477870484199329),
34: (0.113590051906687, 0.0655812470455895),
35: (0.103469165775548, 0.00647061768205698),
36: (0.254957754106411, 0.529914971178085),
37: (0.264873262319233, 0.273106701265196),
38: (0.0672882312715993, 0.212706320404226),
39: (0.170708060452245, 0.196571509298384),
40: (1.59594559789866e-16, 0.450612808484620),
41: (2.01227923213310e-16, 0.292008483097691),
42: (0.0848818904865278, -0.246123271631605),
43: (0.150564961379773, -0.164626477859040),
44: (0.0461308635969362, -0.0928422349110366),
45: (0.113926229169631, -0.101751920396670),
46: (1.66533453693773e-16, -0.207803012451463),
47: (1.80411241501588e-16, -0.131162494091179),
48: (0.586398703939126, -0.0441575936410641),
49: (0.390242139418333, -0.225306404242310),
50: (0.255589950938772, 0.0495517623332212),
51: (0.217853192651372, -0.0480798425451855),
52: (0.368953972399044, 0.0928336233191175),
53: (0.252886764483159, -0.146004241548845),
54: (-0.104080710079810, -0.365940324584313),
55: (0.104080710079811, -0.365940324584313),
56: (-0.331440949832714, -0.485757377537020),
57: (0.331440949832715, -0.485757377537021),
58: (-0.500000000000000, -0.866025403784438),
59: (0.500000000000000, -0.866025403784439)
}
g.set_pos(pos)
return g
def GossetGraph():
r"""
Return the Gosset graph.
The Gosset graph is the skeleton of the
:meth:`~sage.geometry.polyhedron.library.Polytopes.Gosset_3_21` polytope. It
has with 56 vertices and degree 27. For more information, see the
:wikipedia:`Gosset_graph`.
EXAMPLES::
sage: g = graphs.GossetGraph(); g
Gosset Graph: Graph on 56 vertices
sage: g.order(), g.size()
(56, 756)
TESTS::
sage: g.is_isomorphic(polytopes.Gosset_3_21().graph()) # not tested (~16s)
True
"""
string = ('w~~~~rt{~Z\\ZxnvYZYmlfrb}|hDuhLlcmmMNf_^zzQGNYcP\\kcRZbaJjoNBx{'+
'?N~o^}?A`}F_Kbbm_[QZ\\_]Cj\\oN_dm{BzB{?]WIMM@tPQRYBYRPIuAyJgQv?'+
'|Bxb_M[kWIR@jTQcciDjShXCkFMgpwqBKxeKoS`TYqdTCcKtkdKwWQXrbEZ@OdU'+
'mITZ@_e[{KXn?YPABzvY?IcO`zvYg@caC\\zlf?BaGR]zb{?@wOjv`~w??N_n_~'+
'~w???^_^~~{')
G = Graph(string,name="Gosset Graph")
ordering = [0, 2, 4, 6, 43, 23, 50, 18, 28, 9, 8, 7, 44, 3, 26, 35, 16, 14,
33, 15, 54, 30, 17, 21, 10, 13, 36, 31, 55, 53, 51, 49, 12, 32,
5, 37, 27, 46, 47, 48, 11, 52, 29, 20, 39, 41, 22, 40, 1, 25, 38,
34, 45, 42, 19, 24]
G._circle_embedding(ordering)
return G
def DoubleStarSnark():
r"""
Return the double star snark.
The double star snark is a 3-regular graph on 30 vertices. See the
:wikipedia:`Double-star_snark`.
EXAMPLES::
sage: g = graphs.DoubleStarSnark()
sage: g.order()
30
sage: g.size()
45
sage: g.chromatic_number()
3
sage: g.is_hamiltonian()
False
sage: g.automorphism_group().cardinality()
80
sage: g.show()
"""
d = { 0: [1, 14, 15]
, 1: [0, 2, 11]
, 2: [1, 3, 7]
, 3: [2, 4, 18]
, 4: [3, 5, 14]
, 5: [10, 4, 6]
, 6: [5, 21, 7]
, 7: [8, 2, 6]
, 8: [9, 13, 7]
, 9: [24, 8, 10]
, 10: [9, 11, 5]
, 11: [1, 10, 12]
, 12: [11, 27, 13]
, 13: [8, 12, 14]
, 14: [0, 4, 13]
, 15: [0, 16, 29]
, 16: [15, 20, 23]
, 17: [25, 18, 28]
, 18: [3, 17, 19]
, 19: [18, 26, 23]
, 20: [16, 28, 21]
, 21: [20, 6, 22]
, 22: [26, 21, 29]
, 23: [16, 24, 19]
, 24: [25, 9, 23]
, 25: [24, 17, 29]
, 26: [27, 19, 22]
, 27: [12, 26, 28]
, 28: [17, 27, 20]
, 29: [25, 22, 15]
}
g = Graph(d, pos={}, name="Double star snark")
g._circle_embedding(list(range(15)), radius=2)
g._circle_embedding(list(range(15, 30)), radius=1.4)
return g
def MeredithGraph():
r"""
Return the Meredith Graph.
The Meredith Graph is a 4-regular 4-connected non-hamiltonian graph. For
more information on the Meredith Graph, see the :wikipedia:`Meredith_graph`.
EXAMPLES::
sage: g = graphs.MeredithGraph()
sage: g.is_regular(4)
True
sage: g.order()
70
sage: g.size()
140
sage: g.radius()
7
sage: g.diameter()
8
sage: g.girth()
4
sage: g.chromatic_number()
3
sage: g.is_hamiltonian() # long time
False
"""
g = Graph(name="Meredith Graph")
g.add_vertex(0)
# Edges between copies of K_{4,3}
for i in range(5):
g.add_edge(('outer',i,3),('outer',(i+1)%5,0))
g.add_edge(('inner',i,3),('inner',(i+2)%5,0))
g.add_edge(('outer',i,1),('inner',i ,1))
g.add_edge(('outer',i,2),('inner',i ,2))
# Edges inside of the K_{4,3}s.
for i in range(5):
for j in range(4):
for k in range(3):
g.add_edge(('inner',i,j),('inner',i,k+4))
g.add_edge(('outer',i,j),('outer',i,k+4))
g._circle_embedding(sum([[('outer',i,j) for j in range(4)]+10*[0] for i in range(5)],[]), radius = 1, shift = 2)
g._circle_embedding(sum([[('outer',i,j) for j in range(4,7)]+10*[0] for i in range(5)],[]), radius = 1.2, shift = 2.2)
g._circle_embedding(sum([[('inner',i,j) for j in range(4)]+7*[0] for i in range(5)],[]), radius = .6, shift = 1.24)
g._circle_embedding(sum([[('inner',i,j) for j in range(4,7)]+5*[0] for i in range(5)],[]), radius = .4, shift = 1.05)
g.delete_vertex(0)
g.relabel()
return g
def KittellGraph():
r"""
Return the Kittell Graph.
For more information, see the `Wolfram page about the Kittel Graph
<http://mathworld.wolfram.com/KittellGraph.html>`_.
EXAMPLES::
sage: g = graphs.KittellGraph()
sage: g.order()
23
sage: g.size()
63
sage: g.radius()
3
sage: g.diameter()
4
sage: g.girth()
3
sage: g.chromatic_number()
4
"""
g = Graph({0: [1, 2, 4, 5, 6, 7], 1: [0, 2, 7, 10, 11, 13],
2: [0, 1, 11, 4, 14], 3: [16, 12, 4, 5, 14], 4: [0, 2, 3, 5, 14],
5: [0, 16, 3, 4, 6], 6: [0, 5, 7, 15, 16, 17, 18],
7: [0, 1, 6, 8, 13, 18], 8: [9, 18, 19, 13, 7],
9: [8, 10, 19, 20, 13], 10: [1, 9, 11, 13, 20, 21],
11: [1, 2, 10, 12, 14, 15, 21], 12: [11, 16, 3, 14, 15],
13: [8, 1, 10, 9, 7], 14: [11, 12, 2, 3, 4],
15: [6, 11, 12, 16, 17, 21, 22],
16: [3, 12, 5, 6, 15], 17: [18, 19, 22, 6, 15],
18: [8, 17, 19, 6, 7], 19: [8, 9, 17, 18, 20, 22],
20: [9, 10, 19, 21, 22], 21: [10, 11, 20, 22, 15],
22: [17, 19, 20, 21, 15]},
name = "Kittell Graph")
g._circle_embedding(list(range(3)), shift=.75)
g._circle_embedding(list(range(3, 13)), radius = .4)
g._circle_embedding(list(range(15, 22)), radius = .2, shift=-.15)
pos = g.get_pos()
pos[13] = (-.65,-.35)
pos[14] = (.65,-.35)
pos[22] = (0,0)
return g
def CameronGraph():
r"""
Return the Cameron graph.
The Cameron graph is strongly regular with parameters `v = 231, k = 30,
\lambda = 9, \mu = 3`.
For more information on the Cameron graph, see
`<https://www.win.tue.nl/~aeb/graphs/Cameron.html>`_.
EXAMPLES::
sage: g = graphs.CameronGraph()
sage: g.order()
231
sage: g.size()
3465
sage: g.is_strongly_regular(parameters = True) # long time
(231, 30, 9, 3)
"""
from sage.groups.perm_gps.permgroup_named import MathieuGroup
from itertools import combinations
g = Graph(name="Cameron Graph")
sets = MathieuGroup(22).orbit((1,2,3,7,10,20), action = "OnSets")
for s in sets:
for a,b,c,d in combinations(set(s),4):
g.add_edges([((a,b),(c,d)),((a,c),(b,d)), ((a,d),(b,c))])
g.relabel()
ordering = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 15, 18, 19, 20,
21, 24, 25, 26, 27, 29, 31, 34, 35, 38, 39, 96, 97, 101, 105,
51, 117, 198, 32, 196, 201, 131, 167, 199, 197, 86, 102, 195,
200, 186, 144, 202, 177, 44, 53, 58, 45, 48, 54, 43, 57, 50,
46, 59, 133, 169, 104, 188, 118, 208, 157, 52, 207, 209, 132,
204, 13, 187, 33, 203, 70, 145, 103, 168, 178, 87, 124, 123,
125, 111, 120, 116, 119, 112, 95, 114, 115, 137, 218, 213, 108,
76, 77, 74, 62, 64, 67, 63, 68, 69, 61, 41, 75, 73, 66, 71, 72,
60, 22, 230, 151, 184, 138, 193, 109, 228, 174, 214, 219, 93,
126, 143, 150, 146, 224, 181, 16, 223, 171, 90, 135, 106, 205,
211, 121, 148, 160, 216, 222, 190, 36, 55, 185, 175, 94, 139,
110, 215, 152, 220, 229, 194, 40, 128, 99, 141, 173, 154, 82,
156, 164, 159, 28, 127, 158, 65, 162, 163, 153, 161, 155, 140,
98, 47, 113, 84, 180, 30, 129, 179, 183, 165, 176, 142, 100,
49, 134, 210, 170, 147, 91, 37, 206, 182, 191, 56, 136, 225,
221, 149, 227, 217, 17, 107, 172, 212, 122, 226, 23, 85, 42,
80, 92, 81, 89, 78, 83, 88, 79, 130, 192, 189, 166]
g._circle_embedding(ordering)
return g
def ChvatalGraph():
r"""
Return the Chvatal graph.
Chvatal graph is one of the few known graphs to satisfy Grunbaum's
conjecture that for every m, n, there is an m-regular, m-chromatic graph of
girth at least n. For more information, see the
:wikipedia:`Chv%C3%A1tal_graph`.
EXAMPLES:
The Chvatal graph has 12 vertices and 24 edges. It is a 4-regular,
4-chromatic graph with radius 2, diameter 2, and girth 4. ::
sage: G = graphs.ChvatalGraph(); G
Chvatal graph: Graph on 12 vertices
sage: G.order(); G.size()
12
24
sage: G.degree()
[4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4]
sage: G.chromatic_number()
4
sage: G.radius(); G.diameter(); G.girth()
2
2
4
TESTS::
sage: import networkx
sage: G = graphs.ChvatalGraph()
sage: G.is_isomorphic(Graph(networkx.chvatal_graph()))
True
"""
edges = {0:[1, 4, 6, 9], 1:[2, 5, 7], 2:[3, 6, 8], 3:[4, 7, 9], 4:[5, 8],
5:[10, 11], 6:[10, 11], 7:[8, 11], 8:[10], 9:[10, 11]}
pos_dict = {}
for i in range(5, 10):
x = float(cos((pi / 2) + ((2 * pi) / 5) * i))
y = float(sin((pi / 2) + ((2 * pi) / 5) * i))
pos_dict[i] = (x, y)
for i in range(5):
x = float(2 * (cos((pi / 2) + ((2 * pi) / 5) * (i - 5))))
y = float(2 * (sin((pi / 2) + ((2 * pi) / 5) * (i - 5))))
pos_dict[i] = (x, y)
pos_dict[10] = (0.5, 0)
pos_dict[11] = (-0.5, 0)
return Graph(edges, pos=pos_dict, name="Chvatal graph")
def ClebschGraph():
r"""
Return the Clebsch graph.
See the :wikipedia:`Clebsch_graph` for more information.
EXAMPLES::
sage: g = graphs.ClebschGraph()
sage: g.automorphism_group().cardinality()
1920
sage: g.girth()
4
sage: g.chromatic_number()
4
sage: g.diameter()
2
sage: g.show(figsize=[10, 10]) # long time
"""
g = Graph(pos={})
x = 0
for i in range(8):
g.add_edge(x % 16, (x + 1) % 16)
g.add_edge(x % 16, (x + 6) % 16)
g.add_edge(x % 16, (x + 8) % 16)
x += 1
g.add_edge(x % 16, (x + 3) % 16)
g.add_edge(x % 16, (x + 2) % 16)
g.add_edge(x % 16, (x + 8) % 16)
x += 1
g._circle_embedding(list(range(16)), shift=.5)
g.name("Clebsch graph")
return g
def CoxeterGraph():
r"""
Return the Coxeter graph.
See the :wikipedia:`Coxeter_graph`.
EXAMPLES::
sage: g = graphs.CoxeterGraph()
sage: g.automorphism_group().cardinality()
336
sage: g.girth()
7
sage: g.chromatic_number()
3
sage: g.diameter()
4
sage: g.show(figsize=[10, 10]) # long time
"""
g = Graph({
27: [6, 22, 14],
24: [0, 7, 18],
25: [8, 15, 2],
26: [10, 16, 23],
}, pos={})
g.add_cycle(list(range(24)))
g.add_edges([(5, 11), (9, 20), (12, 1), (13, 19), (17, 4), (3, 21)])
g._circle_embedding(list(range(24)))
g._circle_embedding([24, 25, 26], radius=.5)
g.get_pos()[27] = (0, 0)
g.name("Coxeter Graph")
return g
def DejterGraph():
r"""
Return the Dejter graph.
The Dejter graph is obtained from the binary 7-cube by deleting a copy of
the Hamming code of length 7. It is 6-regular, with 112 vertices and 336
edges. For more information, see the :wikipedia:`Dejter_graph`.
EXAMPLES::
sage: g = graphs.DejterGraph(); g
Dejter Graph: Graph on 112 vertices
sage: g.is_regular(k=6)
True
sage: g.girth()
4
"""
from sage.graphs.generators.families import CubeGraph
from sage.coding.hamming_code import HammingCode
from sage.rings.finite_rings.finite_field_constructor import FiniteField
g = CubeGraph(7)
g.delete_vertices(["".join(map(str, x))
for x in HammingCode(FiniteField(2), 3)])
g.name("Dejter Graph")
return g
def DesarguesGraph():
"""
Return the Desargues graph.
PLOTTING: The layout chosen is the same as on the cover of [Har1994]_.
EXAMPLES::
sage: D = graphs.DesarguesGraph()
sage: L = graphs.LCFGraph(20,[5,-5,9,-9],5)
sage: D.is_isomorphic(L)
True
sage: D.show() # long time
"""
from sage.graphs.generators.families import GeneralizedPetersenGraph
G = GeneralizedPetersenGraph(10,3)
G.name("Desargues Graph")
return G
def DurerGraph():
r"""
Return the Dürer graph.
For more information, see the :wikipedia:`D%C3%BCrer_graph`.
EXAMPLES:
The Dürer graph is named after Albrecht Dürer. It is a planar graph
with 12 vertices and 18 edges. ::
sage: G = graphs.DurerGraph(); G
Durer graph: Graph on 12 vertices
sage: G.is_planar()
True
sage: G.order()
12
sage: G.size()
18
The Dürer graph has chromatic number 3, diameter 4, and girth 3. ::
sage: G.chromatic_number()
3
sage: G.diameter()
4
sage: G.girth()
3
Its automorphism group is isomorphic to `D_6`. ::
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(6))
True
"""
edge_dict = {
0: [1,5,6],
1: [2,7],
2: [3,8],
3: [4,9],
4: [5,10],
5: [11],
6: [8,10],
7: [9,11],
8: [10],
9: [11]}
pos_dict = {
0: [2, 0],
1: [1, 1.73205080756888],
2: [-1, 1.73205080756888],
3: [-2, 0],
4: [-1, -1.73205080756888],
5: [1, -1.73205080756888],
6: [1, 0],
7: [0.5, 0.866025403784439],
8: [-0.5, 0.866025403784439],
9: [-1, 0],
10: [-0.5, -0.866025403784439],
11: [0.5, -0.866025403784439]}
return Graph(edge_dict, pos=pos_dict, name="Durer graph")
def DyckGraph():
"""
Return the Dyck graph.
For more information, see the `MathWorld article on the Dyck graph
<http://mathworld.wolfram.com/DyckGraph.html>`_ or the
:wikipedia:`Dyck_graph`.
EXAMPLES:
The Dyck graph was defined by Walther von Dyck in 1881. It has `32` vertices
and `48` edges, and is a cubic graph (regular of degree `3`)::
sage: G = graphs.DyckGraph(); G
Dyck graph: Graph on 32 vertices
sage: G.order()
32
sage: G.size()
48
sage: G.is_regular()
True
sage: G.is_regular(3)
True
It is non-planar and Hamiltonian, as well as bipartite (making it a bicubic
graph)::
sage: G.is_planar()
False
sage: G.is_hamiltonian()
True
sage: G.is_bipartite()
True
It has radius `5`, diameter `5`, and girth `6`::
sage: G.radius()
5
sage: G.diameter()
5
sage: G.girth()
6
Its chromatic number is `2` and its automorphism group is of order `192`::
sage: G.chromatic_number()
2
sage: G.automorphism_group().cardinality()
192
It is a non-integral graph as it has irrational eigenvalues::
sage: G.characteristic_polynomial().factor()
(x - 3) * (x + 3) * (x - 1)^9 * (x + 1)^9 * (x^2 - 5)^6
It is a toroidal graph, and its embedding on a torus is dual to an embedding
of the Shrikhande graph (:meth:`ShrikhandeGraph
<GraphGenerators.ShrikhandeGraph>`).
"""
pos_dict = {}
for i in range(8):
pos_dict[i] = [float(cos((2*i) * pi/8)),
float(sin((2*i) * pi/8))]
pos_dict[8 + i] = [0.75 * pos_dict[i][0],
0.75 * pos_dict[i][1]]
pos_dict[16 + i] = [0.50 * pos_dict[i][0],
0.50 * pos_dict[i][1]]
pos_dict[24 + i] = [0.25 * pos_dict[i][0],
0.25 * pos_dict[i][1]]
edge_dict = {
0O00: [0O07, 0O01, 0O10], 0O10: [0O00, 0O27, 0O21],
0O01: [0O00, 0O02, 0O11], 0O11: [0O01, 0O20, 0O22],
0O02: [0O01, 0O03, 0O12], 0O12: [0O02, 0O21, 0O23],
0O03: [0O02, 0O04, 0O13], 0O13: [0O03, 0O22, 0O24],
0O04: [0O03, 0O05, 0O14], 0O14: [0O04, 0O23, 0O25],
0O05: [0O04, 0O06, 0O15], 0O15: [0O05, 0O24, 0O26],
0O06: [0O05, 0O07, 0O16], 0O16: [0O06, 0O25, 0O27],
0O07: [0O06, 0O00, 0O17], 0O17: [0O07, 0O26, 0O20],
0O20: [0O17, 0O11, 0O30], 0O30: [0O20, 0O35, 0O33],
0O21: [0O10, 0O12, 0O31], 0O31: [0O21, 0O36, 0O34],
0O22: [0O11, 0O13, 0O32], 0O32: [0O22, 0O37, 0O35],
0O23: [0O12, 0O14, 0O33], 0O33: [0O23, 0O30, 0O36],
0O24: [0O13, 0O15, 0O34], 0O34: [0O24, 0O31, 0O37],
0O25: [0O14, 0O16, 0O35], 0O35: [0O25, 0O32, 0O30],
0O26: [0O15, 0O17, 0O36], 0O36: [0O26, 0O33, 0O31],
0O27: [0O16, 0O10, 0O37], 0O37: [0O27, 0O34, 0O32],
}
return Graph(edge_dict, pos=pos_dict, name="Dyck graph")
def HortonGraph():
r"""
Return the Horton Graph.
The Horton graph is a cubic 3-connected non-hamiltonian graph. For more
information, see the :wikipedia:`Horton_graph`.
EXAMPLES::
sage: g = graphs.HortonGraph()
sage: g.order()
96
sage: g.size()
144
sage: g.radius()
10
sage: g.diameter()
10
sage: g.girth()
6
sage: g.automorphism_group().cardinality()
96
sage: g.chromatic_number()
2
sage: g.is_hamiltonian() # not tested -- veeeery long
False
"""
g = Graph(name = "Horton Graph")
# Each group of the 6 groups of vertices is based on the same 3-regular
# graph.
from sage.graphs.generators.families import LCFGraph
lcf = LCFGraph(16, [5, -5], 8)
lcf.delete_edge(15, 0)
lcf.delete_edge(7, 8)
for i in range(6):
for u,v in lcf.edge_iterator(labels=False):
g.add_edge((i, u), (i, v))
# Modifying the groups and linking them together
c0 = (-1, 0)
c1 = (-1, 1)
c2 = (-1, 2)
for i in range(3):
g.add_edge((2 * i, 0), (2 * i + 1, 7))
g.add_edge((2 * i + 1, 8), (2 * i, 7))
g.add_edge((2 * i, 15), (2 * i + 1, 0))
g.add_edge((2 * i, 8), c1)
g.add_edge((2 * i + 1, 14), c2)
g.add_edge((2 * i + 1, 10), c0)
# Embedding
for i in range(6):
g._circle_embedding([(i, j) for j in range(16)], center=(cos(2 * i * pi / 6), sin(2 * i * pi / 6)), radius=.3)
for i in range(3):
g.delete_vertex((2 * i + 1, 15))
g._circle_embedding([c0, c1, c2], radius=.2, shift=-0.75)
g.relabel()
return g
def EllinghamHorton54Graph():
r"""
Return the Ellingham-Horton 54-graph.
For more information, see the :wikipedia:`Ellingham-Horton_graph`.
EXAMPLES:
This graph is 3-regular::
sage: g = graphs.EllinghamHorton54Graph()
sage: g.is_regular(k=3)
True
It is 3-connected and bipartite::
sage: g.vertex_connectivity() # not tested - too long
3
sage: g.is_bipartite()
True
It is not Hamiltonian::
sage: g.is_hamiltonian() # not tested - too long
False
... and it has a nice drawing ::
sage: g.show(figsize=[10, 10]) # not tested - too long
TESTS::
sage: g.show() # long time
"""
edge_dict = {
0: [1, 11, 15], 1: [2, 47], 2: [3, 13], 3: [4, 8], 4: [5, 15],
5: [6, 10], 6: [7, 30], 7: [8, 12], 8: [9], 9: [10, 29], 10: [11],
11: [12], 12: [13], 13: [14], 14: [48, 15], 16: [17, 21, 28],
17: [24, 29], 18: [19, 23, 30], 19: [20, 31], 20: [32, 21], 21: [33],
22: [23, 27, 28], 23: [29], 24: [25, 30], 25: [26, 31], 26: [32, 27],
27: [33], 28: [31], 32: [52], 33: [53], 34: [35, 39, 46], 35: [42, 47],
36: [48, 37, 41], 37: [49, 38], 38: [50, 39], 39: [51],
40: [41, 45, 46], 41: [47], 42: [48, 43], 43: [49, 44], 44: [50, 45],
45: [51], 46: [49], 50: [52], 51: [53], 52: [53]}
g = Graph(data=edge_dict, format="dict_of_lists",
name="Ellingham-Horton 54-graph")
# The set of vertices on top is 0..15
# Bottom left is 16..33
# Bottom right is 34..51
# The two other vertices are 52, 53
# Top
g._circle_embedding(list(range(16)), center=(0, .5), shift=.5, radius=.5)
# Bottom-left
g._circle_embedding(list(range(16, 22)), center=(-1.5, -1))
g._circle_embedding(list(range(22, 28)), center=(-1.5, -1), radius=.5)
g._circle_embedding(list(range(28, 34)), center=(-1.5, -1), radius=.7)
# Bottom right
g._circle_embedding(list(range(34, 40)), center=(1.5, -1))
g._circle_embedding(list(range(40, 46)), center=(1.5, -1), radius=.5)
g._circle_embedding(list(range(46, 52)), center=(1.5, -1), radius=.7)
d = g.get_pos()
d[52] = (-.3, -2.5)
d[53] = (.3, -2.5)
d[31] = (-2.2, -.9)
d[28] = (-.8, -.9)
d[46] = (2.2, -.9)
d[49] = (.8, -.9)
return g
def EllinghamHorton78Graph():
r"""
Return the Ellingham-Horton 78-graph.
For more information, see the :wikipedia:`Ellingham%E2%80%93Horton_graph`
EXAMPLES:
This graph is 3-regular::
sage: g = graphs.EllinghamHorton78Graph()
sage: g.is_regular(k=3)
True
It is 3-connected and bipartite::
sage: g.vertex_connectivity() # not tested - too long
3
sage: g.is_bipartite()
True
It is not Hamiltonian::
sage: g.is_hamiltonian() # not tested - too long
False
... and it has a nice drawing ::
sage: g.show(figsize=[10,10]) # not tested - too long
TESTS::
sage: g.show(figsize=[10, 10]) # not tested - too long
"""
g = Graph({
0: [1, 5, 60], 1: [2, 12], 2: [3, 7], 3: [4, 14], 4: [5, 9],
5: [6], 6: [7, 11], 7: [15], 8: [9, 13, 22], 9: [10],
10: [11, 72], 11: [12], 12: [13], 13: [14], 14: [72],
15: [16, 20], 16: [17, 27], 17: [18, 22], 18: [19, 29],
19: [20, 24], 20: [21], 21: [22, 26], 23: [24, 28, 72],
24: [25], 25: [26, 71], 26: [27], 27: [28], 28: [29],
29: [69], 30: [31, 35, 52], 31: [32, 42], 32: [33, 37],
33: [34, 43], 34: [35, 39], 35: [36], 36: [41, 63],
37: [65, 66], 38: [39, 59, 74], 39: [40], 40: [41, 44],
41: [42], 42: [74], 43: [44, 74], 44: [45], 45: [46, 50],
46: [47, 57], 47: [48, 52], 48: [49, 75], 49: [50, 54],
50: [51], 51: [52, 56], 53: [54, 58, 73], 54: [55],
55: [56, 59], 56: [57], 57: [58], 58: [75], 59: [75],
60: [61, 64], 61: [62, 71], 62: [63, 77], 63: [67],
64: [65, 69], 65: [77], 66: [70, 73], 67: [68, 73],
68: [69, 76], 70: [71, 76], 76: [77]}, pos={})
g._circle_embedding(list(range(15)), center=(-2.5, 1.5))
g._circle_embedding(list(range(15, 30)), center=(-2.5, -1.5))
g._circle_embedding([30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
42, 74, 43, 44], center=(2.5, 1.5))
g._circle_embedding([45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
57, 58, 75, 59], center=(2.5, -1.5))
d = g.get_pos()
d[76] = (-.2, -.1)
d[77] = (.2, .1)
d[38] = (2.2, .1)
d[52] = (2.3, -.1)
d[15] = (-2.1, -.1)
d[72] = (-2.1, .1)
g._line_embedding([60, 61, 62, 63], first=(-1, 2), last=(1, 2))
g._line_embedding([64, 65, 37], first=(-.5, 1.5), last=(1.2, 1.5))
g._line_embedding([66, 73, 67, 68, 69], first=(1.2, -2),
last=(-.8, -2))
g._line_embedding([66, 70, 71], first=(.7, -1.5), last=(-1, -1.5))
g.name("Ellingham-Horton 78-graph")
return g
def ErreraGraph():
r"""
Return the Errera graph.
For more information, see the :wikipedia:`Errera_graph`.
EXAMPLES:
The Errera graph is named after Alfred Errera. It is a planar graph on 17
vertices and having 45 edges. ::
sage: G = graphs.ErreraGraph(); G
Errera graph: Graph on 17 vertices
sage: G.is_planar()
True
sage: G.order()
17
sage: G.size()
45
The Errera graph is Hamiltonian with radius 3, diameter 4, girth 3, and
chromatic number 4. ::
sage: G.is_hamiltonian()
True
sage: G.radius()
3
sage: G.diameter()
4
sage: G.girth()
3
sage: G.chromatic_number()
4
Each vertex degree is either 5 or 6. That is, if `f` counts the number of
vertices of degree 5 and `s` counts the number of vertices of degree 6, then
`f + s` is equal to the order of the Errera graph. ::
sage: D = G.degree_sequence()
sage: D.count(5) + D.count(6) == G.order()
True
The automorphism group of the Errera graph is isomorphic to the dihedral
group of order 20. ::
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(10))
True
"""
edge_dict = {
0: [1,7,14,15,16],
1: [2,9,14,15],
2: [3,8,9,10,14],
3: [4,9,10,11],
4: [5,10,11,12],
5: [6,11,12,13],
6: [7,8,12,13,16],
7: [13,15,16],
8: [10,12,14,16],
9: [11,13,15],
10: [12],
11: [13],
13: [15],
14: [16]}
return Graph(edge_dict, name="Errera graph")
def F26AGraph():
r"""
Return the F26A graph.
The F26A graph is a symmetric bipartite cubic graph with 26 vertices and 39
edges. For more information, see the :wikipedia:`F26A_graph`.
EXAMPLES::
sage: g = graphs.F26AGraph(); g
F26A Graph: Graph on 26 vertices
sage: g.order(),g.size()
(26, 39)
sage: g.automorphism_group().cardinality()
78
sage: g.girth()
6
sage: g.is_bipartite()
True
sage: g.characteristic_polynomial().factor()
(x - 3) * (x + 3) * (x^4 - 5*x^2 + 3)^6
"""
from sage.graphs.generators.families import LCFGraph
g= LCFGraph(26, [7,-7],13)
g.name("F26A Graph")
return g
def FlowerSnark():
"""
Return a Flower Snark.
A flower snark has 20 vertices. It is part of the class of biconnected cubic
graphs with edge chromatic number = 4, known as snarks. (i.e.: the Petersen
graph). All snarks are not Hamiltonian, non-planar and have Petersen graph
graph minors. See the :wikipedia:`Flower_snark`.
PLOTTING: Upon construction, the position dictionary is filled to override
the spring-layout algorithm. By convention, the nodes are drawn 0-14 on the
outer circle, and 15-19 in an inner pentagon.
EXAMPLES: Inspect a flower snark::
sage: F = graphs.FlowerSnark()
sage: F
Flower Snark: Graph on 20 vertices
sage: F.graph6_string()
'ShCGHC@?GGg@?@?Gp?K??C?CA?G?_G?Cc'
Now show it::
sage: F.show() # long time
"""
pos_dict = {}
for i in range(15):
x = float(2.5*(cos((pi/2) + ((2*pi)/15)*i)))
y = float(2.5*(sin((pi/2) + ((2*pi)/15)*i)))
pos_dict[i] = (x,y)
for i in range(15,20):
x = float(cos((pi/2) + ((2*pi)/5)*i))
y = float(sin((pi/2) + ((2*pi)/5)*i))
pos_dict[i] = (x,y)
return Graph({0:[1,14,15],1:[2,11],2:[3,7],3:[2,4,16],4:[5,14], \
5:[6,10],6:[5,7,17],8:[7,9,13],9:[10,18],11:[10,12], \
12:[13,19],13:[14],15:[19],16:[15,17],18:[17,19]}, \
pos=pos_dict, name="Flower Snark")
def FolkmanGraph():
"""
Return the Folkman graph.
See the :wikipedia:`Folkman_graph`.
EXAMPLES::
sage: g = graphs.FolkmanGraph()
sage: g.order()
20
sage: g.size()
40
sage: g.diameter()
4
sage: g.girth()
4
sage: g.charpoly().factor()
(x - 4) * (x + 4) * x^10 * (x^2 - 6)^4
sage: g.chromatic_number()
2
sage: g.is_eulerian()
True
sage: g.is_hamiltonian()
True
sage: g.is_vertex_transitive()
False
sage: g.is_bipartite()
True
"""
from sage.graphs.generators.families import LCFGraph
g= LCFGraph(20, [5, -7, -7, 5], 5)
g.name("Folkman Graph")
return g
def FosterGraph():
"""
Return the Foster graph.
See the :wikipedia:`Foster_graph`.
EXAMPLES::
sage: g = graphs.FosterGraph()
sage: g.order()
90
sage: g.size()
135
sage: g.diameter()
8
sage: g.girth()
10
sage: g.automorphism_group().cardinality()
4320
sage: g.is_hamiltonian()
True
"""
from sage.graphs.generators.families import LCFGraph
g= LCFGraph(90, [17, -9, 37, -37, 9, -17], 15)
g.name("Foster Graph")
return g
def FranklinGraph():
r"""
Return the Franklin graph.
For more information, see the :wikipedia:`Franklin_graph`.
EXAMPLES:
The Franklin graph is named after Philip Franklin. It is a 3-regular graph
on 12 vertices and having 18 edges. ::
sage: G = graphs.FranklinGraph(); G
Franklin graph: Graph on 12 vertices
sage: G.is_regular(3)
True
sage: G.order()
12
sage: G.size()
18
The Franklin graph is a Hamiltonian, bipartite graph with radius 3, diameter
3, and girth 4. ::
sage: G.is_hamiltonian()
True
sage: G.is_bipartite()
True
sage: G.radius()
3
sage: G.diameter()
3
sage: G.girth()
4
It is a perfect, triangle-free graph having chromatic number 2. ::
sage: G.is_perfect()
True
sage: G.is_triangle_free()
True
sage: G.chromatic_number()
2
"""
edge_dict = {
0: [1,5,6],
1: [2,7],
2: [3,8],
3: [4,9],
4: [5,10],
5: [11],
6: [7,9],
7: [10],
8: [9,11],
10: [11]}
pos_dict = {
0: [2, 0],
1: [1, 1.73205080756888],
2: [-1, 1.73205080756888],
3: [-2, 0],
4: [-1, -1.73205080756888],
5: [1, -1.73205080756888],
6: [1, 0],
7: [0.5, 0.866025403784439],
8: [-0.5, 0.866025403784439],
9: [-1, 0],
10: [-0.5, -0.866025403784439],
11: [0.5, -0.866025403784439]}
return Graph(edge_dict, pos=pos_dict, name="Franklin graph")
def FruchtGraph():
"""
Return a Frucht Graph.
A Frucht graph has 12 nodes and 18 edges. It is the smallest cubic identity
graph. It is planar and it is Hamiltonian. See the :wikipedia:`Frucht_graph`.
PLOTTING: Upon construction, the position dictionary is filled to override
the spring-layout algorithm. By convention, the first seven nodes are on the
outer circle, with the next four on an inner circle and the last in the
center.
EXAMPLES::
sage: FRUCHT = graphs.FruchtGraph()
sage: FRUCHT
Frucht graph: Graph on 12 vertices
sage: FRUCHT.graph6_string()
'KhCKM?_EGK?L'
sage: (graphs.FruchtGraph()).show() # long time
TESTS::
sage: import networkx
sage: G = graphs.FruchtGraph()
sage: G.is_isomorphic(Graph(networkx.frucht_graph()))
True
"""
edges = {0:[1, 6, 7], 1:[2, 7], 2:[3, 8], 3:[4, 9], 4:[5, 9],
5:[6, 10], 6:[10], 7:[11], 8:[9, 11], 10:[11]}
pos_dict = {}
for i in range(7):
x = float(2*(cos((pi/2) + ((2*pi)/7)*i)))
y = float(2*(sin((pi/2) + ((2*pi)/7)*i)))
pos_dict[i] = (x,y)
pos_dict[7] = (0,1)
pos_dict[8] = (-1,0)
pos_dict[9] = (0,-1)
pos_dict[10] = (1,0)
pos_dict[11] = (0,0)
return Graph(edges, pos=pos_dict, name="Frucht graph")
def GoldnerHararyGraph():
r"""
Return the Goldner-Harary graph.
For more information, see the :wikipedia:`Goldner%E2%80%93Harary_graph`.
EXAMPLES:
The Goldner-Harary graph is named after A. Goldner and Frank Harary. It is
a planar graph having 11 vertices and 27 edges. ::
sage: G = graphs.GoldnerHararyGraph(); G
Goldner-Harary graph: Graph on 11 vertices
sage: G.is_planar()
True
sage: G.order()
11
sage: G.size()
27
The Goldner-Harary graph is chordal with radius 2, diameter 2, and girth
3. ::
sage: G.is_chordal()
True
sage: G.radius()
2
sage: G.diameter()
2
sage: G.girth()
3
Its chromatic number is 4 and its automorphism group is isomorphic to the
dihedral group `D_6`. ::
sage: G.chromatic_number()
4
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(6))
True
"""
edge_dict = {
0: [1,3,4],
1: [2,3,4,5,6,7,10],
2: [3,7],
3: [7,8,9,10],
4: [3,5,9,10],
5: [10],
6: [7,10],
7: [8,10],
8: [10],
9: [10]}
pos = {
0: (-2, 0),
1: (0, 1.5),
2: (2, 0),
3: (0, -1.5),
4: (-1.5, 0),
5: (-0.5, 0.5),
6: (0.5, 0.5),
7: (1.5, 0),
8: (0.5, -0.5),
9: (-0.5, -0.5),
10: (0, 0)}
return Graph(edge_dict, pos = pos, name="Goldner-Harary graph")
def GolombGraph():
r"""
Return the Golomb graph.
See the :wikipedia:`Golomb_graph` for more information.
EXAMPLES:
The Golomb graph is a planar and Hamiltonian graph with 10 vertices
and 18 edges. It has chromatic number 4, diameter 3, radius 2 and
girth 3. It can be drawn in the plane as a unit distance graph::
sage: G = graphs.GolombGraph(); G
Golomb graph: Graph on 10 vertices
sage: pos = G.get_pos()
sage: dist2 = lambda u,v:(u[0]-v[0])**2 + (u[1]-v[1])**2
sage: all(dist2(pos[u], pos[v]) == 1 for u, v in G.edge_iterator(labels=None))
True
"""
edge_dict = {
0: [1, 2, 3],
1: [2, 5],
2: [7],
3: [4, 8, 9],
4: [5, 9],
5: [6, 9],
6: [7, 9],
7: [8, 9],
8: [9]}
pos_dict = {
0: [QQ('1/6'), QQ('1/6') * sqrt(11)],
1: [QQ('1/12') * sqrt(33) - QQ('1/12'), - sqrt(QQ('1/72') * sqrt(33) + QQ('7/72'))],
2: [- QQ('1/12') * sqrt(33) - QQ('1/12'), - sqrt(- QQ('1/72') * sqrt(33) + QQ('7/72'))],
3: [1, 0],
4: [QQ('1/2'), - QQ('1/2') * sqrt(3)],
5: [- QQ('1/2'), - QQ('1/2') * sqrt(3)],
6: [-1, 0],
7: [- QQ('1/2'), QQ('1/2') * sqrt(3)],
8: [QQ('1/2'), QQ('1/2') * sqrt(3)],
9: [0, 0]}
return Graph(edge_dict, pos=pos_dict, name="Golomb graph")
def GrayGraph(embedding=1):
r"""
Return the Gray graph.
See the :wikipedia:`Gray_graph`.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.GrayGraph()
sage: g.order()
54
sage: g.size()
81
sage: g.girth()
8
sage: g.diameter()
6
sage: g.show(figsize=[10, 10]) # long time
sage: graphs.GrayGraph(embedding = 2).show(figsize=[10, 10]) # long time
TESTS::
sage: graphs.GrayGraph(embedding = 3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1, 2, or 3
"""
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(54, [-25,7,-7,13,-13,25], 9)
g.name("Gray graph")
if embedding == 1:
o = g.automorphism_group(orbits=True)[-1]
g._circle_embedding(o[0], center=(0, 0), radius=1)
g._circle_embedding(o[1], center=(0, 0), radius=.6, shift=-.5)
elif embedding != 2:
raise ValueError("the value of embedding must be 1, 2, or 3")
return g
def GrotzschGraph():
r"""
Return the Grötzsch graph.
The Grötzsch graph is an example of a triangle-free graph with chromatic
number equal to 4. For more information, see the
:wikipedia:`Gr%C3%B6tzsch_graph`.
EXAMPLES:
The Grötzsch graph is named after Herbert Grötzsch. It is a Hamiltonian
graph with 11 vertices and 20 edges. ::
sage: G = graphs.GrotzschGraph(); G
Grotzsch graph: Graph on 11 vertices
sage: G.is_hamiltonian()
True
sage: G.order()
11
sage: G.size()
20
The Grötzsch graph is triangle-free and having radius 2, diameter 2, and
girth 4. ::
sage: G.is_triangle_free()
True
sage: G.radius()
2
sage: G.diameter()
2
sage: G.girth()
4
Its chromatic number is 4 and its automorphism group is isomorphic to the
dihedral group `D_5`. ::
sage: G.chromatic_number()
4
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(5))
True
"""
g = Graph()
g.add_vertices(range(11))
edges = [];
for u in range(1,6):
edges.append( (0,u) )
edges.append( (10,6) )
for u in range(6,10):
edges.append( (u,u+1) )
edges.append( (u,u-4) )
edges.append( (10,1) )
for u in range(7,11):
edges.append( (u,u-6) )
edges.append((6,5))
g.add_edges(edges)
pos = {}
pos[0] = (0,0)
for u in range(1,6):
theta = (u-1)*2*pi/5
pos[u] = (float(5*sin(theta)),float(5*cos(theta)))
pos[u+5] = (2*pos[u][0], 2*pos[u][1])
g.set_pos(pos)
g.name("Grotzsch graph")
return g
def HeawoodGraph():
"""
Return a Heawood graph.
The Heawood graph is a cage graph that has 14 nodes. It is a cubic symmetric
graph. (See also the Möbius-Kantor graph). It is nonplanar and
Hamiltonian. It has diameter = 3, radius = 3, girth = 6, chromatic number =
2. It is 4-transitive but not 5-transitive. See the
:wikipedia:`Heawood_graph`.
PLOTTING: Upon construction, the position dictionary is filled to override
the spring-layout algorithm. By convention, the nodes are positioned in a
circular layout with the first node appearing at the top, and then
continuing counterclockwise.
EXAMPLES::
sage: H = graphs.HeawoodGraph()
sage: H
Heawood graph: Graph on 14 vertices
sage: H.graph6_string()
'MhEGHC@AI?_PC@_G_'
sage: (graphs.HeawoodGraph()).show() # long time
TESTS::
sage: import networkx
sage: G = graphs.HeawoodGraph()
sage: G.is_isomorphic(Graph(networkx.heawood_graph()))
True
"""
edges = {0:[1, 5, 13], 1:[2, 10], 2:[3, 7], 3:[4, 12], 4:[5, 9], 5:[6],
6:[7, 11], 7:[8], 8:[9, 13], 9:[10], 10:[11], 11:[12], 12:[13]}
pos_dict = {}
for i in range(14):
x = float(cos((pi/2) + (pi/7)*i))
y = float(sin((pi/2) + (pi/7)*i))
pos_dict[i] = (x,y)
return Graph(edges, pos=pos_dict, name="Heawood graph")
def HerschelGraph():
r"""
Return the Herschel graph.
For more information, see the :wikipedia:`Herschel_graph`.
EXAMPLES:
The Herschel graph is named after Alexander Stewart Herschel. It is a
planar, bipartite graph with 11 vertices and 18 edges. ::
sage: G = graphs.HerschelGraph(); G
Herschel graph: Graph on 11 vertices
sage: G.is_planar()
True
sage: G.is_bipartite()
True
sage: G.order()
11
sage: G.size()
18
The Herschel graph is a perfect graph with radius 3, diameter 4, and girth
4. ::
sage: G.is_perfect()
True
sage: G.radius()
3
sage: G.diameter()
4
sage: G.girth()
4
Its chromatic number is 2 and its automorphism group is isomorphic to the
dihedral group `D_6`. ::
sage: G.chromatic_number()
2
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(6))
True
"""
edge_dict = {
0: [1,3,4],
1: [2,5,6],
2: [3,7],
3: [8,9],
4: [5,9],
5: [10],
6: [7,10],
7: [8],
8: [10],
9: [10]}
pos_dict = {
0: [2, 0],
1: [0, 2],
2: [-2, 0],
3: [0, -2],
4: [1, 0],
5: [0.5, 0.866025403784439],
6: [-0.5, 0.866025403784439],
7: [-1, 0],
8: [-0.5, -0.866025403784439],
9: [0.5, -0.866025403784439],
10: [0, 0]}
return Graph(edge_dict, pos=pos_dict, name="Herschel graph")
def HigmanSimsGraph(relabel=True):
r"""
Return the Higman-Sims graph.
The Higman-Sims graph is a remarkable strongly regular graph of degree 22 on
100 vertices. For example, it can be split into two sets of 50 vertices
each, so that each half induces a subgraph isomorphic to the
Hoffman-Singleton graph (:meth:`~HoffmanSingletonGraph`). This can be done
in 352 ways (see `Higman-Sims graph
<https://www.win.tue.nl/~aeb/graphs/Higman-Sims.html>`_ by Andries
E. Brouwer, accessed 24 October 2009.)
Its most famous property is that the automorphism group has an index 2
subgroup which is one of the 26 sporadic groups. [HS1968]_
The construction used here follows [Haf2004]_.
See also the :wikipedia:`Higman–Sims_graph`.
INPUT:
- ``relabel`` - default: ``True``. If ``True`` the vertices will be labeled
with consecutive integers. If ``False`` the labels are strings that are
three digits long. "xyz" means the vertex is in group x (zero through
three), pentagon or pentagram y (zero through four), and is vertex z (zero
through four) of that pentagon or pentagram. See [Haf2004]_ for more.
OUTPUT:
The Higman-Sims graph.
EXAMPLES:
A split into the first 50 and last 50 vertices will induce two copies of the
Hoffman-Singleton graph, and we illustrate another such split, which is
obvious based on the construction used. ::
sage: H = graphs.HigmanSimsGraph()
sage: A = H.subgraph(range(0,50))
sage: B = H.subgraph(range(50,100))
sage: K = graphs.HoffmanSingletonGraph()
sage: K.is_isomorphic(A) and K.is_isomorphic(B)
True
sage: C = H.subgraph(range(25,75))
sage: D = H.subgraph(list(range(0,25))+list(range(75,100)))
sage: K.is_isomorphic(C) and K.is_isomorphic(D)
True
The automorphism group contains only one nontrivial proper normal subgroup,
which is of index 2 and is simple. It is known as the Higman-Sims group. ::
sage: H = graphs.HigmanSimsGraph()
sage: G = H.automorphism_group()
sage: g=G.order(); g
88704000
sage: K = G.normal_subgroups()[1]
sage: K.is_simple()
True
sage: g//K.order()
2
AUTHOR:
- Rob Beezer (2009-10-24)
"""
HS = Graph()
HS.name('Higman-Sims graph')
# Four groups of either five pentagons, or five pentagrams 4 x 5 x 5 = 100
# vertices
# First digit is "group", second is "penta{gon|gram}", third is "vertex"
vlist = ['%d%d%d'%(g,p,v)
for g in range(4) for p in range(5) for v in range(5)]
for avertex in vlist:
HS.add_vertex(avertex)
# Edges: Within groups 0 and 2, joined as pentagons
# Edges: Within groups 1 and 3, joined as pentagrams
for g in range(4):
shift = 1
if g in [1,3]:
shift += 1
for p in range(5):
for v in range(5):
HS.add_edge(('%d%d%d'%(g,p,v), '%d%d%d'%(g,p,(v+shift)%5)))
# Edges: group 0 to group 1
for x in range(5):
for m in range(5):
for c in range(5):
y = (m*x+c)%5
HS.add_edge(('0%d%d'%(x,y), '1%d%d'%(m,c)))
# Edges: group 1 to group 2
for m in range(5):
for A in range(5):
for B in range(5):
c = (2*(m-A)*(m-A)+B)%5
HS.add_edge(('1%d%d'%(m,c), '2%d%d'%(A,B)))
# Edges: group 2 to group 3
for A in range(5):
for a in range(5):
for b in range(5):
B = (2*A*A+3*a*A-a*a+b)%5
HS.add_edge(('2%d%d'%(A,B), '3%d%d'%(a,b)))
# Edges: group 3 to group 0
for a in range(5):
for b in range(5):
for x in range(5):
y = ((x-a)*(x-a)+b)%5
HS.add_edge(('3%d%d'%(a,b), '0%d%d'%(x,y)))
# Edges: group 0 to group 2
for x in range(5):
for A in range(5):
for B in range(5):
y = (3*x*x+A*x+B+1)%5
HS.add_edge(('0%d%d'%(x,y), '2%d%d'%(A,B)))
y = (3*x*x+A*x+B-1)%5
HS.add_edge(('0%d%d'%(x,y), '2%d%d'%(A,B)))
# Edges: group 1 to group 3
for m in range(5):
for a in range(5):
for b in range(5):
c = (m*(m-a)+b+2)%5
HS.add_edge(('1%d%d'%(m,c), '3%d%d'%(a,b)))
c = (m*(m-a)+b-2)%5
HS.add_edge(('1%d%d'%(m,c), '3%d%d'%(a,b)))
# Rename to integer vertex labels, creating dictionary
# Or not, and create identity mapping
if relabel:
vmap = HS.relabel(range(100), return_map=True)
else:
vmap = {v: v for v in vlist}
# Layout vertices in a circle
# In the order given in vlist
# Using labels from vmap
pos_dict = {}
for i in range(100):
x = float(cos((pi/2) + ((2*pi)/100)*i))
y = float(sin((pi/2) + ((2*pi)/100)*i))
pos_dict[vmap[vlist[i]]] = (x,y)
HS.set_pos(pos_dict)
return HS
def HoffmanSingletonGraph():
r"""
Return the Hoffman-Singleton graph.
The Hoffman-Singleton graph is the Moore graph of degree 7, diameter 2 and
girth 5. The Hoffman-Singleton theorem states that any Moore graph with
girth 5 must have degree 2, 3, 7 or 57. The first three respectively are the
pentagon, the Petersen graph, and the Hoffman-Singleton graph. The existence
of a Moore graph with girth 5 and degree 57 is still open.
A Moore graph is a graph with diameter `d` and girth `2d + 1`. This implies
that the graph is regular, and distance regular.
For more details, see [GR2001]_ and the
:wikipedia:`Hoffman–Singleton_graph`.
PLOTTING: Upon construction, the position dictionary is filled to override
the spring-layout algorithm. A novel algorithm written by Tom Boothby gives
a random layout which is pleasing to the eye.
EXAMPLES::
sage: HS = graphs.HoffmanSingletonGraph()
sage: Set(HS.degree())
{7}
sage: HS.girth()
5
sage: HS.diameter()
2
sage: HS.num_verts()
50
Note that you get a different layout each time you create the graph. ::
sage: HS.layout()[1]
(-0.844..., 0.535...)
sage: HS = graphs.HoffmanSingletonGraph()
sage: HS.layout()[1]
(-0.904..., 0.425...)
"""
H = Graph({
'q00':['q01'], 'q01':['q02'], 'q02':['q03'], 'q03':['q04'], 'q04':['q00'],
'q10':['q11'], 'q11':['q12'], 'q12':['q13'], 'q13':['q14'], 'q14':['q10'],
'q20':['q21'], 'q21':['q22'], 'q22':['q23'], 'q23':['q24'], 'q24':['q20'],
'q30':['q31'], 'q31':['q32'], 'q32':['q33'], 'q33':['q34'], 'q34':['q30'],
'q40':['q41'], 'q41':['q42'], 'q42':['q43'], 'q43':['q44'], 'q44':['q40'],
'p00':['p02'], 'p02':['p04'], 'p04':['p01'], 'p01':['p03'], 'p03':['p00'],
'p10':['p12'], 'p12':['p14'], 'p14':['p11'], 'p11':['p13'], 'p13':['p10'],
'p20':['p22'], 'p22':['p24'], 'p24':['p21'], 'p21':['p23'], 'p23':['p20'],
'p30':['p32'], 'p32':['p34'], 'p34':['p31'], 'p31':['p33'], 'p33':['p30'],
'p40':['p42'], 'p42':['p44'], 'p44':['p41'], 'p41':['p43'], 'p43':['p40']})
for j in range(5):
for i in range(5):
for k in range(5):
con = (i+j*k)%5
H.add_edge(('q%d%d'%(k,con),'p%d%d'%(j,i)))
H.name('Hoffman-Singleton graph')
from sage.combinat.permutation import Permutations
from sage.misc.prandom import randint
P = Permutations([1,2,3,4])
qpp = [0] + list(P[randint(0,23)])
ppp = [0] + list(P[randint(0,23)])
qcycle = lambda i,s : ['q%s%s'%(i,(j+s)%5) for j in qpp]
pcycle = lambda i,s : ['p%s%s'%(i,(j+s)%5) for j in ppp]
l = 0
s = 0
D = []
while l < 5:
for q in qcycle(l,s):
D.append(q)
vv = 'p%s'%q[1]
s = int([v[-1] for v in H.neighbors(q) if v[:2] == vv][0])
for p in pcycle(l,s):
D.append(p)
vv = 'q%s'%(int(p[1])+1)
v = [v[-1] for v in H.neighbors(p) if v[:2] == vv]
if len(v):
s = int(v[0])
l+=1
map = H.relabel(range(50), return_map=True)
pos_dict = {}
for i in range(50):
x = float(cos((pi/2) + ((2*pi)/50)*i))
y = float(sin((pi/2) + ((2*pi)/50)*i))
pos_dict[map[D[i]]] = (x,y)
H.set_pos(pos_dict)
return H
def HoffmanGraph():
r"""
Return the Hoffman Graph.
See the :wikipedia:`Hoffman_graph`.
EXAMPLES::
sage: g = graphs.HoffmanGraph()
sage: g.is_bipartite()
True
sage: g.is_hamiltonian() # long time
True
sage: g.radius()
3
sage: g.diameter()
4
sage: g.automorphism_group().cardinality()
48
"""
g = Graph({
0: [1, 7, 8, 13],
1: [2, 9, 14],
2: [3, 8, 10],
3: [4, 9, 15],
4: [5, 10, 11],
5: [6, 12, 14],
6: [7, 11, 13],
7: [12, 15],
8: [12, 14],
9: [11, 13],
10: [12, 15],
11: [14],
13: [15]})
g._circle_embedding(list(range(8)))
g._circle_embedding(list(range(8, 14)), radius=.7, shift=.5)
g._circle_embedding([14, 15], radius=.1)
g.name("Hoffman Graph")
return g
def HoltGraph():
r"""
Return the Holt graph (also called the Doyle graph).
See the :wikipedia:`Holt_graph`.
EXAMPLES::
sage: g = graphs.HoltGraph();g
Holt graph: Graph on 27 vertices
sage: g.is_regular()
True
sage: g.is_vertex_transitive()
True
sage: g.chromatic_number()
3
sage: g.is_hamiltonian() # long time
True
sage: g.radius()
3
sage: g.diameter()
3
sage: g.girth()
5
sage: g.automorphism_group().cardinality()
54
"""
g = Graph(loops=False, name = "Holt graph", pos={})
for x in range(9):
for y in range(3):
g.add_edge((x,y),((4*x+1)%9,(y-1)%3))
g.add_edge((x,y),((4*x-1)%9,(y-1)%3))
g.add_edge((x,y),((7*x+7)%9,(y+1)%3))
g.add_edge((x,y),((7*x-7)%9,(y+1)%3))
for j in range(0,6,2):
g._line_embedding([(x,j/2) for x in range(9)],
first=(cos(2*j*pi/6),sin(2*j*pi/6)),
last=(cos(2*(j+1)*pi/6),sin(2*(j+1)*pi/6)))
return g
def KrackhardtKiteGraph():
"""
Return a Krackhardt kite graph with 10 nodes.
The Krackhardt kite graph was originally developed by David Krackhardt for
the purpose of studying social networks (see [Kre2002]_ and
the :wikipedia:`Krackhardt_kite_graph`). It is used to show the distinction
between: degree centrality, betweeness centrality, and closeness
centrality. For more information read the plotting section below in
conjunction with the example.
PLOTTING: Upon construction, the position dictionary is filled to override
the spring-layout algorithm. By convention, the graph is drawn left to
right, in top to bottom row sequence of [2, 3, 2, 1, 1, 1] nodes on each
row. This places the fourth node (3) in the center of the kite, with the
highest degree. But the fourth node only connects nodes that are otherwise
connected, or those in its clique (i.e.: Degree Centrality). The eighth (7)
node is where the kite meets the tail. It has degree = 3, less than the
average, but is the only connection between the kite and tail (i.e.:
Betweenness Centrality). The sixth and seventh nodes (5 and 6) are drawn in
the third row and have degree = 5. These nodes have the shortest path to all
other nodes in the graph (i.e.: Closeness Centrality). Please execute the
example for visualization.
EXAMPLES:
Construct and show a Krackhardt kite graph ::
sage: g = graphs.KrackhardtKiteGraph()
sage: g.show() # long time
TESTS::
sage: import networkx
sage: G = graphs.KrackhardtKiteGraph()
sage: G.is_isomorphic(Graph(networkx.krackhardt_kite_graph()))
True
"""
edges = {0:[1, 2, 3, 5], 1:[3, 4, 6], 2:[3, 5], 3:[4, 5, 6],
4:[6], 5:[6, 7], 6:[7], 7:[8], 8:[9]}
pos_dict = {0:(-1,4),1:(1,4),2:(-2,3),3:(0,3),4:(2,3),5:(-1,2),6:(1,2),7:(0,1),8:(0,0),9:(0,-1)}
return Graph(edges, pos=pos_dict, name="Krackhardt Kite Graph")
def Klein3RegularGraph():
r"""
Return the Klein 3-regular graph.
The cubic Klein graph has 56 vertices and can be embedded on a surface of
genus 3. It is the dual of
:meth:`~sage.graphs.graph_generators.GraphGenerators.Klein7RegularGraph`. For
more information, see the :wikipedia:`Klein_graphs`.
EXAMPLES::
sage: g = graphs.Klein3RegularGraph(); g
Klein 3-regular Graph: Graph on 56 vertices
sage: g.order(), g.size()
(56, 84)
sage: g.girth()
7
sage: g.automorphism_group().cardinality()
336
sage: g.chromatic_number()
3
"""
g3 = Graph(':w`_GKWDBap`CMWFCpWsQUNdBwwuXPHrg`U`RIqypehVLqgHupYcFJyAv^Prk]'+
'EcarHwIVHAKh|\\tLVUxT]`ZDTJ{Af[o_AuKs{r_?ef',
loops=False, multiedges=False)
g3._circle_embedding([0, 2, 3, 4, 6, 8, 14, 1, 37, 30, 34, 48, 55, 43, 40,
45, 18, 20, 47, 42, 23, 17, 16, 10, 41, 11, 49, 25,
51, 26, 54, 9, 22, 15, 21, 12, 24, 7, 52, 31, 32, 36,
46, 35, 29, 50, 27, 19, 28, 5, 33, 13, 53, 39, 38, 44])
g3.name("Klein 3-regular Graph")
return g3
def Klein7RegularGraph():
r"""
Return the Klein 7-regular graph.
The 7-valent Klein graph has 24 vertices and can be embedded on a surface of
genus 3. It is the dual of
:meth:`~sage.graphs.graph_generators.GraphGenerators.Klein3RegularGraph`. For
more information, see the :wikipedia:`Klein_graphs`.
EXAMPLES::
sage: g = graphs.Klein7RegularGraph(); g
Klein 7-regular Graph: Graph on 24 vertices
sage: g.order(), g.size()
(24, 84)
sage: g.girth()
3
sage: g.automorphism_group().cardinality()
336
sage: g.chromatic_number()
4
"""
g7 = Graph(':W__@`AaBbC_CDbDcE`F_AG_@DEH_IgHIJbFGIKaFHILeFGHMdFKN_EKOPaCNP'+
'Q`HOQRcGLRS`BKMSTdJKLPTU',loops=False,multiedges=False)
g7._circle_embedding([0, 2, 3, 1, 9, 16, 20, 21, 4, 19, 17, 7, 15,
10, 8, 13, 11, 5, 23, 22, 14, 12, 18, 6])
g7.name("Klein 7-regular Graph")
return g7
def LocalMcLaughlinGraph():
r"""
Return the local McLaughlin graph.
The local McLaughlin graph is a strongly regular graph with parameters
`(162,56,10,24)`. It can be obtained from
:meth:`~sage.graphs.graph_generators.GraphGenerators.McLaughlinGraph` by
considering the stabilizer of a point: one of its orbits has cardinality
162.
EXAMPLES::
sage: g = graphs.LocalMcLaughlinGraph(); g # long time # optional - gap_packages
Local McLaughlin Graph: Graph on 162 vertices
sage: g.is_strongly_regular(parameters=True) # long time # optional - gap_packages
(162, 56, 10, 24)
"""
g = McLaughlinGraph()
orbits = g.automorphism_group().stabilizer(1).orbits()
orbit = [x for x in orbits if len(x) == 162][0]
g = g.subgraph(vertices=orbit)
g.relabel()
g.name("Local McLaughlin Graph")
return g
def LjubljanaGraph(embedding=1):
r"""
Return the Ljubljana Graph.
The Ljubljana graph is a bipartite 3-regular graph on 112 vertices and 168
edges. It is not vertex-transitive as it has two orbits which are also
independent sets of size 56. See the :wikipedia:`Ljubljana_graph`.
The default embedding is obtained from the Heawood graph.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.LjubljanaGraph()
sage: g.order()
112
sage: g.size()
168
sage: g.girth()
10
sage: g.diameter()
8
sage: g.show(figsize=[10, 10]) # long time
sage: graphs.LjubljanaGraph(embedding=2).show(figsize=[10, 10]) # long time
TESTS::
sage: graphs.LjubljanaGraph(embedding=3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
L = [47, -23, -31, 39, 25, -21, -31, -41, 25, 15, 29, -41, -19, 15,
-49, 33, 39, -35, -21, 17, -33, 49, 41, 31, -15, -29, 41, 31,
-15, -25, 21, 31, -51, -25, 23, 9, -17, 51, 35, -29, 21, -51,
-39, 33, -9, -51, 51, -47, -33, 19, 51, -21, 29, 21, -31, -39]
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(112, L, 2)
g.name("Ljubljana graph")
if embedding == 1:
dh = HeawoodGraph().get_pos()
# Correspondence between the vertices of the Heawood Graph and 8-sets of
# the Ljubljana Graph.
d = {
0: [1, 21, 39, 57, 51, 77, 95, 107],
1: [2, 22, 38, 58, 50, 78, 94, 106],
2: [3, 23, 37, 59, 49, 79, 93, 105],
3: [4, 24, 36, 60, 48, 80, 92, 104],
4: [5, 25, 35, 61, 15, 81, 91, 71],
9: [6, 26, 44, 62, 16, 82, 100, 72],
10: [7, 27, 45, 63, 17, 83, 101, 73],
11: [8, 28, 46, 64, 18, 84, 102, 74],
12: [9, 29, 47, 65, 19, 85, 103, 75],
13: [10, 30, 0, 66, 20, 86, 56, 76],
8: [11, 31, 111, 67, 99, 87, 55, 43],
7: [12, 32, 110, 68, 98, 88, 54, 42],
6: [13, 33, 109, 69, 97, 89, 53, 41],
5: [14, 34, 108, 70, 96, 90, 52, 40]
}
# The vertices of each 8-set are plotted on a circle, and the
# circles are slowly shifted to obtain a symmetric drawing.
for i, (u, vertices) in enumerate(six.iteritems(d)):
g._circle_embedding(vertices, center=dh[u], radius=.1,
shift=8.*i/14)
return g
elif embedding == 2:
return g
else:
raise ValueError("the value of embedding must be 1 or 2")
def LivingstoneGraph():
r"""
Return the Livingstone Graph.
The Livingstone graph is a distance-transitive graph on 266 vertices whose
automorphism group is the :class:`J1 group
<sage.groups.perm_gps.permgroup_named.JankoGroup>`. For more information,
see the :wikipedia:`Livingstone_graph`.
EXAMPLES::
sage: g = graphs.LivingstoneGraph() # optional - gap_packages internet
sage: g.order() # optional - gap_packages internet
266
sage: g.size() # optional - gap_packages internet
1463
sage: g.girth() # optional - gap_packages internet
5
sage: g.is_vertex_transitive() # optional - gap_packages internet
True
sage: g.is_distance_regular() # optional - gap_packages internet
True
"""
from sage.groups.perm_gps.permgroup_named import JankoGroup
from sage.graphs.graph import Graph
G = JankoGroup(1)
edges = map(tuple, G.orbit((1, 24), action="OnSets"))
return Graph(edges, name="Livingstone Graph")
def M22Graph():
r"""
Return the M22 graph.
The `M_{22}` graph is the unique strongly regular graph with parameters
`v = 77, k = 16, \lambda = 0, \mu = 4`.
For more information on the `M_{22}` graph, see
`<https://www.win.tue.nl/~aeb/graphs/M22.html>`_.
EXAMPLES::
sage: g = graphs.M22Graph()
sage: g.order()
77
sage: g.size()
616
sage: g.is_strongly_regular(parameters = True)
(77, 16, 0, 4)
"""
from sage.groups.perm_gps.permgroup_named import MathieuGroup
sets = [tuple(_) for _ in MathieuGroup(22).orbit((1,2,3,7,10,20), action = "OnSets")]
g = Graph([sets, lambda x,y : not any(xx in y for xx in x)], name="M22 Graph")
g.relabel()
ordering = [0, 1, 3, 4, 5, 6, 7, 10, 12, 19, 20, 31, 2, 24, 35, 34, 22, 32,
36, 23, 27, 25, 40, 26, 16, 71, 61, 63, 50, 68, 39, 52, 48, 44,
69, 28, 9, 64, 60, 17, 38, 49, 45, 65, 14, 70, 72, 21, 43, 56,
33, 73, 58, 55, 41, 29, 66, 54, 76, 46, 67, 11, 51, 47, 62, 53,
15, 8, 18, 13, 59, 37, 30, 57, 75, 74, 42]
g._circle_embedding(ordering)
return g
def MarkstroemGraph():
r"""
Return the Markström Graph.
The Markström Graph is a cubic planar graph with no cycles of length 4 nor
8, but containing cycles of length 16. For more information, see the
`Wolfram page about the Markström Graph
<http://mathworld.wolfram.com/MarkstroemGraph.html>`_.
EXAMPLES::
sage: g = graphs.MarkstroemGraph()
sage: g.order()
24
sage: g.size()
36
sage: g.is_planar()
True
sage: g.is_regular(3)
True
sage: g.subgraph_search(graphs.CycleGraph(4)) is None
True
sage: g.subgraph_search(graphs.CycleGraph(8)) is None
True
sage: g.subgraph_search(graphs.CycleGraph(16))
Subgraph of (Markstroem Graph): Graph on 16 vertices
"""
g = Graph(name="Markstroem Graph")
g.add_cycle(list(range(9)))
g.add_path([0,9,10,11,2,1,11])
g.add_path([3,12,13,14,5,4,14])
g.add_path([6,15,16,17,8,7,17])
g.add_cycle([10,9,18])
g.add_cycle([12,13,19])
g.add_cycle([15,16,20])
g.add_cycle([21,22,23])
g.add_edges([(19,22),(18,21),(20,23)])
g._circle_embedding(sum([[9+3*i+j for j in range(3)]+[0]*2 for i in range(3)],[]), radius=.6, shift=.7)
g._circle_embedding([18,19,20], radius=.35, shift=.25)
g._circle_embedding([21,22,23], radius=.15, shift=.25)
g._circle_embedding(list(range(9)))
return g
def McGeeGraph(embedding=2):
r"""
Return the McGee Graph.
See the :wikipedia:`McGee_graph`.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.McGeeGraph()
sage: g.order()
24
sage: g.size()
36
sage: g.girth()
7
sage: g.diameter()
4
sage: g.show()
sage: graphs.McGeeGraph(embedding=1).show()
TESTS::
sage: graphs.McGeeGraph(embedding=3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(24, [12, 7, -7], 8)
g.name('McGee graph')
if embedding == 1:
return g
elif embedding == 2:
o = [[7, 2, 13, 8, 19, 14, 1, 20],
[5, 4, 11, 10, 17, 16, 23, 22],
[3, 12, 9, 18, 15, 0, 21, 6]]
g._circle_embedding(o[0], radius=1.5)
g._circle_embedding(o[1], radius=3, shift=-.5)
g._circle_embedding(o[2], radius=2.25, shift=.5)
return g
else:
raise ValueError("the value of embedding must be 1 or 2")
def McLaughlinGraph():
r"""
Return the McLaughlin Graph.
The McLaughlin Graph is the unique strongly regular graph of parameters
`(275, 112, 30, 56)`.
For more information on the McLaughlin Graph, see its web page on `Andries
Brouwer's website <https://www.win.tue.nl/~aeb/graphs/McL.html>`_ which
gives the definition that this method implements.
.. NOTE::
To create this graph you must have the gap_packages spkg installed.
EXAMPLES::
sage: g = graphs.McLaughlinGraph() # optional gap_packages
sage: g.is_strongly_regular(parameters=True) # optional gap_packages
(275, 112, 30, 56)
sage: set(g.spectrum()) == {112, 2, -28} # optional gap_packages
True
"""
from sage.combinat.designs.block_design import WittDesign
from itertools import combinations
from sage.sets.set import Set
blocks = [Set(_) for _ in WittDesign(23).blocks()]
B = [b for b in blocks if 0 in b]
C = [b for b in blocks if 0 not in b]
g = Graph()
for b in B:
for x in range(1,23):
if not x in b:
g.add_edge(b, x)
for b in C:
for x in b:
g.add_edge(b, x)
for b, bb in combinations(B, 2):
if len(b & bb) == 1:
g.add_edge(b, bb)
for c, cc in combinations(C, 2):
if len(c & cc) == 1:
g.add_edge(c, cc)
for b in B:
for c in C:
if len(b & c) == 3:
g.add_edge(b, c)
# Here we relabel the elements of g in an architecture-independent way
g.relabel({v: i for i, v in enumerate(list(range(1, 23)) +
sorted(blocks, key=sorted))})
g.name("McLaughlin")
return g
def MoebiusKantorGraph():
"""
Return a Möbius-Kantor Graph.
A Möbius-Kantor graph is a cubic symmetric graph. (See also the Heawood
graph). It has 16 nodes and 24 edges. It is nonplanar and Hamiltonian. It
has diameter = 4, girth = 6, and chromatic number = 2. It is identical to
the Generalized Petersen graph, P[8,3].
For more details, see `Möbius-Kantor Graph - from Wolfram MathWorld
<http://mathworld.wolfram.com/Moebius-KantorGraph.html>`_.
PLOTTING: See the plotting section for the generalized Petersen graphs.
EXAMPLES::
sage: MK = graphs.MoebiusKantorGraph()
sage: MK
Moebius-Kantor Graph: Graph on 16 vertices
sage: MK.graph6_string()
'OhCGKE?O@?ACAC@I?Q_AS'
sage: (graphs.MoebiusKantorGraph()).show() # long time
"""
from sage.graphs.generators.families import GeneralizedPetersenGraph
G=GeneralizedPetersenGraph(8,3)
G.name("Moebius-Kantor Graph")
return G
def MoserSpindle():
r"""
Return the Moser spindle.
For more information, see the :wikipedia:`Moser_spindle`.
EXAMPLES:
The Moser spindle is a planar graph having 7 vertices and 11 edges::
sage: G = graphs.MoserSpindle(); G
Moser spindle: Graph on 7 vertices
sage: G.is_planar()
True
sage: G.order()
7
sage: G.size()
11
It is a Hamiltonian graph with radius 2, diameter 2, and girth 3::
sage: G.is_hamiltonian()
True
sage: G.radius()
2
sage: G.diameter()
2
sage: G.girth()
3
The Moser spindle can be drawn in the plane as a unit distance graph,
has chromatic number 4, and its automorphism group is isomorphic to
the dihedral group `D_4`::
sage: pos = G.get_pos()
sage: all(sum((ui-vi)**2 for ui, vi in zip(pos[u], pos[v])) == 1
....: for u, v in G.edge_iterator(labels=None))
True
sage: G.chromatic_number()
4
sage: ag = G.automorphism_group()
sage: ag.is_isomorphic(DihedralGroup(4))
True
"""
edge_dict = {
0: [1, 4, 6],
1: [2, 5],
2: [3, 5],
3: [4, 5, 6],
4: [6]}
pos_dict = {
0: [QQ('1/2'), 0],
1: [- QQ('1/2'), 0],
2: [- QQ('1/12') * sqrt(33) - QQ('1/4'),
QQ('1/2') * sqrt( QQ('1/6') * sqrt(33) + QQ('17/6'))],
3: [0, QQ('1/2') * sqrt(11)],
4: [QQ('1/12') * sqrt(33) + QQ('1/4'),
QQ('1/2') * sqrt( QQ('1/6') * sqrt(33) + QQ('17/6'))],
5: [QQ('1/12') * sqrt(33) - QQ('1/4'),
QQ('1/2') * sqrt(- QQ('1/6') * sqrt(33) + QQ('17/6'))],
6: [- QQ('1/12') * sqrt(33) + QQ('1/4'),
QQ('1/2') * sqrt(- QQ('1/6') * sqrt(33) + QQ('17/6'))]}
return Graph(edge_dict, pos=pos_dict, name="Moser spindle")
def NauruGraph(embedding=2):
"""
Return the Nauru Graph.
See the :wikipedia:`Nauru_graph`.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.NauruGraph()
sage: g.order()
24
sage: g.size()
36
sage: g.girth()
6
sage: g.diameter()
4
sage: g.show()
sage: graphs.NauruGraph(embedding=1).show()
TESTS::
sage: graphs.NauruGraph(embedding=3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
sage: graphs.NauruGraph(embedding=1).is_isomorphic(g)
True
"""
if embedding == 1:
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(24, [5, -9, 7, -7, 9, -5], 4)
g.name('Nauru Graph')
return g
elif embedding == 2:
from sage.graphs.generators.families import GeneralizedPetersenGraph
g = GeneralizedPetersenGraph(12, 5)
g.name("Nauru Graph")
return g
else:
raise ValueError("the value of embedding must be 1 or 2")
def PappusGraph():
"""
Return the Pappus graph, a graph on 18 vertices.
The Pappus graph is cubic, symmetric, and distance-regular.
EXAMPLES::
sage: G = graphs.PappusGraph()
sage: G.show() # long time
sage: L = graphs.LCFGraph(18, [5,7,-7,7,-7,-5], 3)
sage: L.show() # long time
sage: G.is_isomorphic(L)
True
"""
pos_dict = {}
for i in range(6):
pos_dict[i] = [float(cos(pi/2 + ((2*pi)/6)*i)),\
float(sin(pi/2 + ((2*pi)/6)*i))]
pos_dict[6 + i] = [(2/3.0)*float(cos(pi/2 + ((2*pi)/6)*i)),\
(2/3.0)*float(sin(pi/2 + ((2*pi)/6)*i))]
pos_dict[12 + i] = [(1/3.0)*float(cos(pi/2 + ((2*pi)/6)*i)),\
(1/3.0)*float(sin(pi/2 + ((2*pi)/6)*i))]
return Graph({0:[1,5,6],1:[2,7],2:[3,8],3:[4,9],4:[5,10],\
5:[11],6:[13,17],7:[12,14],8:[13,15],9:[14,16],\
10:[15,17],11:[12,16],12:[15],13:[16],14:[17]},\
pos=pos_dict, name="Pappus Graph")
def PoussinGraph():
r"""
Return the Poussin Graph.
For more information on the Poussin Graph, see its corresponding `Wolfram
page <http://mathworld.wolfram.com/PoussinGraph.html>`_.
EXAMPLES::
sage: g = graphs.PoussinGraph()
sage: g.order()
15
sage: g.is_planar()
True
"""
g = Graph({2:[7,8,3,4],1:[7,6],0:[6,5,4],3:[5]},name="Poussin Graph")
g.add_cycle(list(range(3)))
g.add_cycle(list(range(3, 9)))
g.add_cycle(list(range(9, 14)))
g.add_path([8,12,7,11,6,10,5,9,3,13,8,12])
g.add_edges([(14,i) for i in range(9,14)])
g._circle_embedding(list(range(3)), shift=.75)
g._circle_embedding(list(range(3, 9)), radius=.4, shift=0)
g._circle_embedding(list(range(9, 14)), radius=.2, shift=.4)
g.get_pos()[14] = (0,0)
return g
def PetersenGraph():
"""
Return the Petersen Graph.
The Petersen Graph is a named graph that consists of 10 vertices and 15
edges, usually drawn as a five-point star embedded in a pentagon.
The Petersen Graph is a common counterexample. For example, it is not
Hamiltonian.
PLOTTING: See the plotting section for the generalized Petersen graphs.
EXAMPLES: We compare below the Petersen graph with the default spring-layout
versus a planned position dictionary of [x,y] tuples::
sage: petersen_spring = Graph({0:[1,4,5], 1:[0,2,6], 2:[1,3,7], 3:[2,4,8], 4:[0,3,9], 5:[0,7,8], 6:[1,8,9], 7:[2,5,9], 8:[3,5,6], 9:[4,6,7]})
sage: petersen_spring.show() # long time
sage: petersen_database = graphs.PetersenGraph()
sage: petersen_database.show() # long time
"""
from sage.graphs.generators.families import GeneralizedPetersenGraph
P=GeneralizedPetersenGraph(5,2)
P.name("Petersen graph")
return P
def PerkelGraph():
r"""
Return the Perkel Graph.
The Perkel Graph is a 6-regular graph with `57` vertices and `171` edges. It
is the unique distance-regular graph with intersection array
`(6,5,2;1,1,3)`. For more information, see the :wikipedia:`Perkel_graph` or
https://www.win.tue.nl/~aeb/graphs/Perkel.html.
EXAMPLES::
sage: g = graphs.PerkelGraph(); g
Perkel Graph: Graph on 57 vertices
sage: g.is_distance_regular(parameters=True)
([6, 5, 2, None], [None, 1, 1, 3])
"""
g = Graph(name="Perkel Graph")
for i in range(19):
g.add_edges(((0, i), (1, (i + j) % 19)) for j in [2, 5, 7])
g.add_edges(((0, i), (2, (i + j) % 19)) for j in [5, -4, -8])
g.add_edges(((1, i), (2, (i + j) % 19)) for j in [7, -4, -5])
g.relabel()
g._circle_embedding([0, 2, 3, 35, 8, 33, 45, 5, 53, 51, 18, 50, 29, 46, 30,
48, 40, 17, 20, 27, 43, 16, 7, 14, 6, 4, 15, 41, 24, 37,
28, 9, 55, 38, 19, 34, 39, 36, 54, 52, 44, 23, 12, 22,
32, 10, 13, 26, 1, 21, 42, 56, 49, 31, 47, 11, 25])
return g
def RobertsonGraph():
"""
Return the Robertson graph.
See the :wikipedia:`Robertson_graph`.
EXAMPLES::
sage: g = graphs.RobertsonGraph()
sage: g.order()
19
sage: g.size()
38
sage: g.diameter()
3
sage: g.girth()
5
sage: g.charpoly().factor()
(x - 4) * (x - 1)^2 * (x^2 + x - 5) * (x^2 + x - 1) * (x^2 - 3)^2 * (x^2 + x - 4)^2 * (x^2 + x - 3)^2
sage: g.chromatic_number()
3
sage: g.is_hamiltonian()
True
sage: g.is_vertex_transitive()
False
"""
from sage.graphs.generators.families import LCFGraph
lcf = [8, 4, 7, 4, 8, 5, 7, 4, 7, 8, 4, 5, 7, 8, 4, 8, 4, 8, 4]
g = LCFGraph(19, lcf, 1)
g.name("Robertson Graph")
return g
def SchlaefliGraph():
r"""
Return the Schläfli graph.
The Schläfli graph is the only strongly regular graphs of parameters
`(27,16,10,8)` (see [GR2001]_).
For more information, see the :wikipedia:`Schläfli_graph`.
.. SEEALSO::
:meth:`Graph.is_strongly_regular` -- tests whether a graph is strongly
regular and/or returns its parameters.
.. TODO::
Find a beautiful layout for this beautiful graph.
EXAMPLES:
Checking that the method actually returns the Schläfli graph::
sage: S = graphs.SchlaefliGraph()
sage: S.is_strongly_regular(parameters = True)
(27, 16, 10, 8)
The graph is vertex-transitive::
sage: S.is_vertex_transitive()
True
The neighborhood of each vertex is isomorphic to the complement of the
Clebsch graph::
sage: neighborhood = S.subgraph(vertices = S.neighbors(0))
sage: graphs.ClebschGraph().complement().is_isomorphic(neighborhood)
True
"""
from sage.graphs.graph import Graph
G = Graph('ZBXzr|}^z~TTitjLth|dmkrmsl|if}TmbJMhrJX]YfFyTbmsseztKTvyhDvw')
order = [1,8,5,10,2,6,11,15,17,13,18,12,9,24,25,3,26,7,16,20,23,0,21,14,22,4,19]
G._circle_embedding(order)
G.name("Schläfli graph")
return G
def ShrikhandeGraph():
"""
Return the Shrikhande graph.
For more information, see the `MathWorld article on the Shrikhande graph
<http://mathworld.wolfram.com/ShrikhandeGraph.html>`_ or the
:wikipedia:`Shrikhande_graph`.
.. SEEALSO::
:meth:`Graph.is_strongly_regular` -- tests whether a graph is strongly
regular and/or returns its parameters.
EXAMPLES:
The Shrikhande graph was defined by S. S. Shrikhande in 1959. It has `16`
vertices and `48` edges, and is strongly regular of degree `6` with
parameters `(2,2)`::
sage: G = graphs.ShrikhandeGraph(); G
Shrikhande graph: Graph on 16 vertices
sage: G.order()
16
sage: G.size()
48
sage: G.is_regular(6)
True
sage: set([ len([x for x in G.neighbors(i) if x in G.neighbors(j)])
....: for i in range(G.order())
....: for j in range(i) ])
{2}
It is non-planar, and both Hamiltonian and Eulerian::
sage: G.is_planar()
False
sage: G.is_hamiltonian()
True
sage: G.is_eulerian()
True
It has radius `2`, diameter `2`, and girth `3`::
sage: G.radius()
2
sage: G.diameter()
2
sage: G.girth()
3
Its chromatic number is `4` and its automorphism group is of order `192`::
sage: G.chromatic_number()
4
sage: G.automorphism_group().cardinality()
192
It is an integral graph since it has only integral eigenvalues::
sage: G.characteristic_polynomial().factor()
(x - 6) * (x - 2)^6 * (x + 2)^9
It is a toroidal graph, and its embedding on a torus is dual to an
embedding of the Dyck graph (:meth:`DyckGraph <GraphGenerators.DyckGraph>`).
"""
pos_dict = {}
for i in range(8):
pos_dict[i] = [float(cos((2*i) * pi/8)),
float(sin((2*i) * pi/8))]
pos_dict[8 + i] = [0.5 * pos_dict[i][0],
0.5 * pos_dict[i][1]]
edge_dict = {
0O00: [0O06, 0O07, 0O01, 0O02, 0O11, 0O17],
0O01: [0O07, 0O00, 0O02, 0O03, 0O12, 0O10],
0O02: [0O00, 0O01, 0O03, 0O04, 0O13, 0O11],
0O03: [0O01, 0O02, 0O04, 0O05, 0O14, 0O12],
0O04: [0O02, 0O03, 0O05, 0O06, 0O15, 0O13],
0O05: [0O03, 0O04, 0O06, 0O07, 0O16, 0O14],
0O06: [0O04, 0O05, 0O07, 0O00, 0O17, 0O15],
0O07: [0O05, 0O06, 0O00, 0O01, 0O10, 0O16],
0O10: [0O12, 0O13, 0O15, 0O16, 0O07, 0O01],
0O11: [0O13, 0O14, 0O16, 0O17, 0O00, 0O02],
0O12: [0O14, 0O15, 0O17, 0O10, 0O01, 0O03],
0O13: [0O15, 0O16, 0O10, 0O11, 0O02, 0O04],
0O14: [0O16, 0O17, 0O11, 0O12, 0O03, 0O05],
0O15: [0O17, 0O10, 0O12, 0O13, 0O04, 0O06],
0O16: [0O10, 0O11, 0O13, 0O14, 0O05, 0O07],
0O17: [0O11, 0O12, 0O14, 0O15, 0O06, 0O00]
}
return Graph(edge_dict, pos=pos_dict, name="Shrikhande graph")
def SylvesterGraph():
"""
Return the Sylvester Graph.
This graph is obtained from the Hoffman Singleton graph by considering the
graph induced by the vertices at distance two from the vertices of an (any)
edge.
For more information on the Sylvester graph, see
`<https://www.win.tue.nl/~aeb/graphs/Sylvester.html>`_.
.. SEEALSO::
* :meth:`~sage.graphs.graph_generators.GraphGenerators.HoffmanSingletonGraph`.
EXAMPLES::
sage: g = graphs.SylvesterGraph(); g
Sylvester Graph: Graph on 36 vertices
sage: g.order()
36
sage: g.size()
90
sage: g.is_regular(k=5)
True
"""
g = HoffmanSingletonGraph()
e = next(g.edge_iterator(labels = False))
g.delete_vertices(g.neighbors(e[0]) + g.neighbors(e[1]))
g.relabel()
ordering = [0, 1, 2, 4, 5, 9, 16, 35, 15, 18, 20, 30, 22, 6, 33, 32, 14,
10, 28, 29, 7, 24, 23, 26, 19, 12, 13, 21, 11, 31, 3, 27, 25,
17, 8, 34]
g._circle_embedding(ordering, shift=.5)
g.name("Sylvester Graph")
return g
def SimsGewirtzGraph():
r"""
Return the Sims-Gewirtz Graph.
This graph is obtained from the Higman Sims graph by considering the graph
induced by the vertices at distance two from the vertices of an (any)
edge. It is the only strongly regular graph with parameters `v = 56`,
`k = 10`, `\lambda = 0`, `\mu = 2`
For more information on the Sylvester graph, see
`<https://www.win.tue.nl/~aeb/graphs/Sims-Gewirtz.html>`_ or its
:wikipedia:`Gewirtz_graph`.
.. SEEALSO::
* :meth:`~sage.graphs.graph_generators.GraphGenerators.HigmanSimsGraph`.
EXAMPLES::
sage: g = graphs.SimsGewirtzGraph(); g
Sims-Gewirtz Graph: Graph on 56 vertices
sage: g.order()
56
sage: g.size()
280
sage: g.is_strongly_regular(parameters = True)
(56, 10, 0, 2)
"""
g = HigmanSimsGraph()
e = next(g.edge_iterator(labels = False))
g.delete_vertices(g.neighbors(e[0]) + g.neighbors(e[1]))
g.relabel()
ordering = [0, 2, 3, 4, 6, 7, 8, 17, 1, 41, 49, 5, 22, 26, 11, 27, 15, 47,
53, 52, 38, 43, 44, 18, 20, 32, 19, 42, 54, 36, 51, 30, 33, 35,
37, 28, 34, 12, 29, 23, 55, 25, 40, 24, 9, 14, 48, 39, 45, 16,
13, 21, 31, 50, 10, 46]
g._circle_embedding(ordering)
g.name("Sims-Gewirtz Graph")
return g
def SousselierGraph():
r"""
Return the Sousselier Graph.
The Sousselier graph is a hypohamiltonian graph on 16 vertices and 27
edges. For more information, see :wikipedia:`Sousselier_graph` or
the corresponding French
`Wikipedia page <https://fr.wikipedia.org/wiki/Graphe_de_Sousselier>`_.
EXAMPLES::
sage: g = graphs.SousselierGraph()
sage: g.order()
16
sage: g.size()
27
sage: g.radius()
2
sage: g.diameter()
3
sage: g.automorphism_group().cardinality()
2
sage: g.is_hamiltonian()
False
sage: g.delete_vertex(g.random_vertex())
sage: g.is_hamiltonian()
True
"""
g = Graph(name="Sousselier Graph")
g.add_cycle(list(range(15)))
g.add_path([12,8,3,14])
g.add_path([9,5,0,11])
g.add_edge(6,2)
g.add_edges([(15,i) for i in range(15) if i%3==1])
g._circle_embedding(list(range(15)), shift=-.25)
g.get_pos()[15] = (0,0)
return g
def SzekeresSnarkGraph():
r"""
Return the Szekeres Snark Graph.
The Szekeres graph is a snark with 50 vertices and 75 edges. For more
information on this graph, see the :wikipedia:`Szekeres_snark`.
EXAMPLES::
sage: g = graphs.SzekeresSnarkGraph()
sage: g.order()
50
sage: g.size()
75
sage: g.chromatic_number()
3
"""
g = Graph(name="Szekeres Snark Graph")
c = [(-1, i) for i in range(5)]
for i in range(5):
g.add_cycle([(i, j) for j in range(9)])
g.delete_edge((i, 0), (i, 8))
g.add_edge((i, 1), c[i])
g.add_edge((i, 4), c[i])
g.add_edge((i, 7), c[i])
g.add_edge((i, 0), (i, 5))
g.add_edge((i, 8), (i, 3))
g.add_edge((i, 0), ((i + 1) % 5, 8))
g.add_edge((i, 6), ((i + 2) % 5, 2))
g._circle_embedding([(i, j) for j in range(9)],
radius=.3,
center=(cos(2 * (i + .25) * pi / 5), sin( 2 * (i +.25) * pi / 5)),
shift=5.45 + 1.8 * i)
g._circle_embedding(c, radius=1, shift=.25)
g.relabel()
return g
def ThomsenGraph():
"""
Return the Thomsen Graph.
The Thomsen Graph is actually a complete bipartite graph with `(n1, n2) =
(3, 3)`. It is also called the Utility graph.
PLOTTING: See CompleteBipartiteGraph.
EXAMPLES::
sage: T = graphs.ThomsenGraph()
sage: T
Thomsen graph: Graph on 6 vertices
sage: T.graph6_string()
'EFz_'
sage: (graphs.ThomsenGraph()).show() # long time
"""
edges = {0:[3, 4, 5], 1:[3, 4, 5], 2:[3, 4, 5]}
pos_dict = {0:(-1,1),1:(0,1),2:(1,1),3:(-1,0),4:(0,0),5:(1,0)}
return Graph(edges, pos=pos_dict, name="Thomsen graph")
def TietzeGraph():
r"""
Return the Tietze Graph.
For more information on the Tietze Graph, see the
:wikipedia:`Tietze's_graph`.
EXAMPLES::
sage: g = graphs.TietzeGraph()
sage: g.order()
12
sage: g.size()
18
sage: g.diameter()
3
sage: g.girth()
3
sage: g.automorphism_group().cardinality()
12
sage: g.automorphism_group().is_isomorphic(groups.permutation.Dihedral(6))
True
"""
g = Graph([(0,9),(3,10),(6,11),(1,5),(2,7),(4,8)], name="Tietze Graph")
g.add_cycle(list(range(9)))
g.add_cycle([9,10,11])
g._circle_embedding(list(range(9)))
g._circle_embedding([9, 10, 11], radius=.5)
return g
def TruncatedIcosidodecahedralGraph():
r"""
Return the truncated icosidodecahedron.
The truncated icosidodecahedron is an Archimedean solid with 30 square
faces, 20 regular hexagonal faces, 12 regular decagonal faces, 120 vertices
and 180 edges. For more information, see the
:wikipedia:`Truncated_icosidodecahedron`.
EXAMPLES:
Unfortunately, this graph can not be constructed currently, due to numerical issues::
sage: g = graphs.TruncatedIcosidodecahedralGraph(); g
Traceback (most recent call last):
...
ValueError: *Error: Numerical inconsistency is found. Use the GMP exact arithmetic.
sage: g.order(), g.size() # not tested
(120, 180)
"""
from sage.geometry.polyhedron.library import polytopes
# note that dropping exact=False here makes the construction take forever
G = polytopes.icosidodecahedron(exact=False).truncation().graph()
G.name("Truncated Icosidodecahedron")
return G
def TruncatedTetrahedralGraph():
r"""
Return the truncated tetrahedron.
The truncated tetrahedron is an Archimedean solid with 12 vertices and 18
edges. For more information, see the :wikipedia:`Truncated_tetrahedron`.
EXAMPLES::
sage: g = graphs.TruncatedTetrahedralGraph(); g
Truncated Tetrahedron: Graph on 12 vertices
sage: g.order(), g.size()
(12, 18)
sage: g.is_isomorphic(polytopes.simplex(3).truncation().graph())
True
"""
g = Graph(':K`ESwC_EOyDl\\MCi', loops=False, multiedges=False)
g._circle_embedding(list(range(6)), radius=1)
g._circle_embedding(list(range(6, 9)), radius=.6, shift=.25)
g._circle_embedding(list(range(9, 12)), radius=.2, shift=.25)
g.name("Truncated Tetrahedron")
return g
def Tutte12Cage():
r"""
Return the Tutte 12-Cage.
See the :wikipedia:`Tutte_12-cage`.
EXAMPLES::
sage: g = graphs.Tutte12Cage()
sage: g.order()
126
sage: g.size()
189
sage: g.girth()
12
sage: g.diameter()
6
sage: g.show()
"""
L = [17, 27, -13, -59, -35, 35, -11, 13, -53, 53, -27, 21, 57, 11,
-21, -57, 59, -17]
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(126, L, 7)
g.name("Tutte 12-Cage")
return g
def TutteCoxeterGraph(embedding=2):
r"""
Return the Tutte-Coxeter graph.
See the :wikipedia:`Tutte-Coxeter_graph`.
INPUT:
- ``embedding`` -- two embeddings are available, and can be selected by
setting ``embedding`` to 1 or 2.
EXAMPLES::
sage: g = graphs.TutteCoxeterGraph()
sage: g.order()
30
sage: g.size()
45
sage: g.girth()
8
sage: g.diameter()
4
sage: g.show()
sage: graphs.TutteCoxeterGraph(embedding=1).show()
TESTS::
sage: graphs.TutteCoxeterGraph(embedding=3)
Traceback (most recent call last):
...
ValueError: the value of embedding must be 1 or 2
"""
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(30, [-13, -9, 7, -7, 9, 13], 5)
g.name("Tutte-Coxeter graph")
if embedding == 1:
d = {
0: [1, 3, 5, 7, 29],
1: [2, 4, 6, 28, 0],
2: [8, 18, 26, 22, 12],
3: [9, 13, 23, 27, 17],
4: [11, 15, 21, 25, 19],
5: [10, 14, 24, 20, 16]
}
g._circle_embedding(d[0], center=(-1, 1), radius=.25)
g._circle_embedding(d[1], center=(1, 1), radius=.25)
g._circle_embedding(d[2], center=(-.8, 0), radius=.25, shift=2.5)
g._circle_embedding(d[3], center=(1.2, 0), radius=.25)
g._circle_embedding(d[4], center=(-1, -1), radius=.25, shift=2)
g._circle_embedding(d[5], center=(1, -1), radius=.25)
return g
elif embedding == 2:
return g
else:
raise ValueError("the value of embedding must be 1 or 2")
def TutteGraph():
r"""
Return the Tutte Graph.
The Tutte graph is a 3-regular, 3-connected, and planar non-hamiltonian
graph. For more information on the Tutte Graph, see the
:wikipedia:`Tutte_graph`.
EXAMPLES::
sage: g = graphs.TutteGraph()
sage: g.order()
46
sage: g.size()
69
sage: g.is_planar()
True
sage: g.vertex_connectivity() # long time
3
sage: g.girth()
4
sage: g.automorphism_group().cardinality()
3
sage: g.is_hamiltonian()
False
"""
g = Graph(name="Tutte Graph")
g.add_cycle([(i,j) for i in range(3) for j in range(3) ])
for i in range(3):
g.add_cycle([(i,j) for j in range(9)])
g.add_cycle([(i,j) for j in range(9,14)])
g.add_edge((i,5),0)
g.add_edge((i,13),(i,3))
g.add_edge((i,12),(i,1))
g.add_edge((i,11),(i,8))
g.add_edge((i,10),(i,7))
g.add_edge((i,6),(i,14))
g.add_edge((i,4),(i,14))
g.add_edge((i,9),(i,14))
g._circle_embedding([(i, j) for i in range(3) for j in range(6)], shift=.5)
g._circle_embedding([(i, 14) for i in range(3) ], radius=.3, shift=.25)
for i in range(3):
g._circle_embedding([(i, j) for j in range(3, 9)] + [0]*5,
shift=3.7*(i-2)+.75,
radius=.4,
center=(.6*cos(2*(i+.25)*pi/3), .6*sin(2*(i+.25)*pi/3)))
g._circle_embedding([(i, j) for j in range(9, 14)],
shift=1.7*(i-2)+1,
radius=.2,
center=(.6*cos(2*(i+.25)*pi/3), .6*sin(2*(i+.25)*pi/3)))
g.get_pos()[0] = (0,0)
return g
def WagnerGraph():
"""
Return the Wagner Graph.
See the :wikipedia:`Wagner_graph`.
EXAMPLES::
sage: g = graphs.WagnerGraph()
sage: g.order()
8
sage: g.size()
12
sage: g.girth()
4
sage: g.diameter()
2
sage: g.show()
"""
from sage.graphs.generators.families import LCFGraph
g = LCFGraph(8, [4], 8)
g.name("Wagner Graph")
return g
def WatkinsSnarkGraph():
r"""
Return the Watkins Snark Graph.
The Watkins Graph is a snark with 50 vertices and 75 edges. For more
information, see the :wikipedia:`Watkins_snark`.
EXAMPLES::
sage: g = graphs.WatkinsSnarkGraph()
sage: g.order()
50
sage: g.size()
75
sage: g.chromatic_number()
3
"""
g = Graph(name="Watkins Snark Graph")
for i in range(5):
g.add_cycle([(i,j) for j in range(9)])
g._circle_embedding([(i,j) for j in range(4)]+[0]*2+[(i,4)]+[0]*2+[(i,j) for j in range(5,9)],
radius=.3,
center=(cos(2*(i+.25)*pi/5), sin(2*(i+.25)*pi/5)),
shift=2.7*i+7.55)
g.add_edge((i,5),((i+1)%5,0))
g.add_edge((i,8),((i+2)%5,3))
g.add_edge((i,1),i)
g.add_edge((i,7),i)
g.add_edge((i,4),i)
g.add_edge((i,6),(i,2))
g._circle_embedding(list(range(5)), shift=.25, radius=1.1)
return g
def WienerArayaGraph():
r"""
Return the Wiener-Araya Graph.
The Wiener-Araya Graph is a planar hypohamiltonian graph on 42 vertices and
67 edges. For more information, see the `Wolfram Page on the Wiener-Araya
Graph <http://mathworld.wolfram.com/Wiener-ArayaGraph.html>`_ or
:wikipedia:`Wiener-Araya_graph`.
EXAMPLES::
sage: g = graphs.WienerArayaGraph()
sage: g.order()
42
sage: g.size()
67
sage: g.girth()
4
sage: g.is_planar()
True
sage: g.is_hamiltonian() # not tested -- around 30s long
False
sage: g.delete_vertex(g.random_vertex())
sage: g.is_hamiltonian()
True
"""
g = Graph(name="Wiener-Araya Graph")
g.add_cycle([(0,i) for i in range(4)])
g.add_cycle([(1,i) for i in range(12)])
g.add_cycle([(2,i) for i in range(20)])
g.add_cycle([(3,i) for i in range(6)])
g._circle_embedding([(0, i) for i in range(4)], shift=.5)
g._circle_embedding(sum([[(1,3*i),(1,3*i+1)]+[0]*3+[(1,3*i+2)]+[0]*3 for i in range(4)],[]),
shift=4,
radius=.65)
g._circle_embedding([(2, i) for i in range(20)], radius=.5)
g._circle_embedding([(3, i) for i in range(6)], radius=.3, shift=.5)
for i in range(4):
g.delete_edge((1,3*i),(1,3*i+1))
g.add_edge((1,3*i),(0,i))
g.add_edge((1,3*i+1),(0,i))
g.add_edge((2,5*i+2),(1,3*i))
g.add_edge((2,5*i+3),(1,3*i+1))
g.add_edge((2,(5*i+5)%20),(1,3*i+2))
g.add_edge((2,(5*i+1)%20),(3,i+(i>=1)+(i>=3)))
g.add_edge((2,(5*i+4)%20),(3,i+(i>=1)+(i>=3)))
g.delete_edge((3,1),(3,0))
g.add_edge((3,1),(2,4))
g.delete_edge((3,4),(3,3))
g.add_edge((3,4),(2,14))
g.add_edge((3,1),(3,4))
g.get_pos().pop(0)
g.relabel()
return g
def _EllipticLinesProjectivePlaneScheme(k):
r"""
Pseudo-cyclic association scheme for action of `O(3,2^k)` on elliptic lines
The group `O(3,2^k)` acts naturally on the `q(q-1)/2` lines of `PG(2,2^k)`
skew to the conic preserved by it, see Sect. 12.7.B of [BCN1989]_ and
Sect. 6.D in [BL1984]_. Compute the orbitals of this action and return them.
This is a helper for
:func:`sage.graphs.generators.smallgraphs.MathonStronglyRegularGraph`.
INPUT:
- ``k`` (integer) -- the exponent of 2 to get the field size
TESTS::
sage: from sage.graphs.generators.smallgraphs import _EllipticLinesProjectivePlaneScheme
sage: _EllipticLinesProjectivePlaneScheme(2)
[
[1 0 0 0 0 0] [0 1 1 1 1 0] [0 0 0 0 0 1]
[0 1 0 0 0 0] [1 0 1 1 0 1] [0 0 0 0 1 0]
[0 0 1 0 0 0] [1 1 0 0 1 1] [0 0 0 1 0 0]
[0 0 0 1 0 0] [1 1 0 0 1 1] [0 0 1 0 0 0]
[0 0 0 0 1 0] [1 0 1 1 0 1] [0 1 0 0 0 0]
[0 0 0 0 0 1], [0 1 1 1 1 0], [1 0 0 0 0 0]
]
"""
from sage.libs.gap.libgap import libgap
from sage.matrix.constructor import matrix
from itertools import product
q = 2**k
g0 = libgap.GeneralOrthogonalGroup(3,q) # invariant form x0^2+x1*x2
g = libgap.Group(libgap.List(g0.GeneratorsOfGroup(), libgap.TransposedMat))
W = libgap.FullRowSpace(libgap.GF(q), 3)
l = sum(libgap.Elements(libgap.Basis(W)))
gp = libgap.Action(g, libgap.Orbit(g, l, libgap.OnLines), libgap.OnLines)
orbitals = gp.Orbits(list(product(gp.Orbit(1), gp.Orbit(1))),
libgap.OnTuples)
mats = map(lambda o: [(int(x[0]) - 1, int(x[1]) - 1) for x in o], orbitals)
return [matrix((q * (q - 1)) // 2, lambda i, j: 1 if (i, j) in x else 0)
for x in mats]
def MathonStronglyRegularGraph(t):
r"""
Return one of Mathon's graphs on 784 vertices.
INPUT:
- ``t`` (integer) -- the number of the graph, from 0 to 2.
EXAMPLES::
sage: from sage.graphs.generators.smallgraphs import MathonStronglyRegularGraph
sage: G = MathonStronglyRegularGraph(0) # long time
sage: G.is_strongly_regular(parameters=True) # long time
(784, 243, 82, 72)
TESTS::
sage: G = graphs.MathonStronglyRegularGraph(1) # long time
sage: G.is_strongly_regular(parameters=True) # long time
(784, 270, 98, 90)
sage: G = graphs.MathonStronglyRegularGraph(2) # long time
sage: G.is_strongly_regular(parameters=True) # long time
(784, 297, 116, 110)
"""
from sage.graphs.generators.families import MathonPseudocyclicMergingGraph
ES = _EllipticLinesProjectivePlaneScheme(3)
return MathonPseudocyclicMergingGraph(ES, t)
def JankoKharaghaniGraph(v):
r"""
Return a (936, 375, 150, 150)-srg or a (1800, 1029, 588, 588)-srg.
This functions returns a strongly regular graph for the two sets of
parameters shown to be realizable in [JK2002]_. The paper also uses a
construction from [GM1987]_.
INPUT:
- ``v`` (integer) -- one of 936 or 1800.
EXAMPLES::
sage: g = graphs.JankoKharaghaniGraph(936) # long time
sage: g.is_strongly_regular(parameters=True) # long time
(936, 375, 150, 150)
sage: g = graphs.JankoKharaghaniGraph(1800) # not tested (30s)
sage: g.is_strongly_regular(parameters=True) # not tested (30s)
(1800, 1029, 588, 588)
"""
from sage.rings.finite_rings.finite_field_constructor import FiniteField as GF
from sage.matrix.constructor import matrix
# The notations of [JK02] are rather tricky, and so this code attempts to
# stick as much as possible to the paper's variable names.
assert v in [1800,936]
J = matrix.ones
I = matrix.identity
# Definition of the 36x36 matrix H ([JK02], section 2)
A = J(6)
B = ("111---","1---11","1--1-1","--111-","-1-11-","-11--1")
C = ("-1-1-1","1---11","--11-1","1-1-1-","-1-11-","111---")
D = ("--1-11","-11-1-","11-1--","--11-1","11---1","1--11-")
E = ("-1--11","1-1--1","-11-1-","---111","1-11--","11-1--")
F = ("-1-1-1","11--1-","--111-","1-11--","-11--1","1---11")
B,C,D,E,F = [matrix([map({'1':1,'-':-1}.get,r) for r in m])
for m in [B,C,D,E,F]]
H = [A,B,C,D,E,F]
H = [[-x for x in H[6-i:]] + H[:6-i] for i in range(6)]
H = matrix.block(H)
# Definition of the BGW matrix W with the cyclotomic method
# ([JK02] Lemma 1, and [GM87] Construction 1)
m = 12
t = (2 if v == 936 else 4)
k = m
q = m*t+1
K = GF(q,'alpha')
a = K.primitive_element()
Ci= [[K(0)]] + [set(a**(k*j+i) for j in range(t)) for i in range(m)]
Kelem_to_Ci = {v:i for i,s in enumerate(Ci) for v in s} # maps v to [0,...,12]
W = ([[0]+ [1]*(len(K))] +
[[1]+[Kelem_to_Ci[aj-ai] for aj in K] for ai in K])
# The nonzero elements of W are considered as elements of C_12, generated by
# a matrix Omega of order 12
n = 18
U = matrix.circulant([int(i==1) for i in range(2*n)])
N = matrix.diagonal([1 if i else -1 for i in range(2*n)])
Omega = (U*N)**6
assert Omega**12 == I(36)
# The value w_{ij} is understood in the paper as matrix generated by Omega
# acting on the left of a matrix L, which we now define.
M = H-I(6).tensor_product(J(6))
L = matrix(list(reversed(I(6).rows()))).tensor_product(I(6))
# w_ij represents in the paper the matrix w_{ij}*L. We perform this action while
# computing what is noted '[ M w_{ij} ]' in the paper.
D = [[M*0 if w == 0 else M*(Omega**w)*L for w in R]
for R in W]
D = matrix.block(D)
# for v=1800 the construction is slightly different, and we must add to D a
# matrix which we now compute.
if v == 1800:
abs = lambda M: matrix([[1 if x else 0 for x in R] for R in M.rows()])
M = (J(6)+I(6)).tensor_product(J(6)) # we define M = (J(6)+I(6)) x J(6)
D2 = [[M*0 if w == 0 else M*abs((Omega**w)*L) for w in R] # '[ (J(6)+I(6)) x J(6) |w_{ij}| ]'
for R in W]
D = (D+matrix.block(D2))/2
return Graph([e for e,v in six.iteritems(D.dict()) if v == 1],
multiedges=False,
name="Janko-Kharaghani")
def JankoKharaghaniTonchevGraph():
r"""
Return a (324,153,72,72)-strongly regular graph from [JKT2001]_.
Build the graph using the description given in [JKT2001]_, taking sets B1
and B163 in the text as adjacencies of vertices 1 and 163, respectively, and
taking the edge orbits of the group `G` provided.
EXAMPLES::
sage: Gamma=graphs.JankoKharaghaniTonchevGraph() # long time
sage: Gamma.is_strongly_regular(parameters=True) # long time
(324, 153, 72, 72)
"""
from sage.misc.misc_c import prod
from sage.combinat.permutation import Permutation as P
from sage.libs.gap.libgap import libgap
m1=prod(P((9*x+k,9*x+k+3,9*x+k+6)) for k in range(1, 4) for x in range(36))
m2=prod(P((3*x+1,3*x+2,3*x+3)) for x in range(108))
t=prod(prod(map(P,[(9*x+2,9*x+3),(9*x+4,9*x+7),(9*x+5,9*x+9),(9*x+6,9*x+8)])) for
x in range(36))
n1=prod(prod(map(P,[(1+x,19+x,37+x),(55+x,73+x,91+x),(109+x,127+x,145+x),
(163+x,181+x,199+x),(217+x,235+x,253+x),(271+x,289+x,307+x)]))
for x in range(18))
n2=prod(prod(map(P,[(1+x,55+x,109+x),(19+x,73+x,127+x),(37+x,91+x,145+x),
(163+x,217+x,271+x),(181+x,235+x,289+x),(199+x,253+x,307+x)]))
for x in range(18))
s=prod(prod(map(P,[(19+x,37+x),(55+x,109+x),(73+x,145+x),(91+x,127+x),
(181+x,199+x),(217+x,271+x),(235+x,307+x),(253+x,289+x)]))
for x in range(18))
k=prod(prod(map(P,[(18*x+1,18*x+10),(18*x+2,18*x+11),(18*x+3,18*x+12),
(18*x+4,18*x+13),(18*x+5,18*x+14),(18*x+6,18*x+15),(18*x+7,18*x+16),
(18*x+8,18*x+17),(18*x+9,18*x+18)]))
for x in range(18))
G = libgap.Group([libgap.PermList(p) for p in [m1, m2, t, n1, n2, s, k]])
st = libgap.Group([libgap.PermList(p) for p in [t, s]])
B1=(19,22,25,29,30,31,33,34,35,37,40,43,47,48,49,51,52,53,55,56,57,65,
66,67,68,70,72,76,77,78,79,80,81,82,86,90,92,93,95,96,98,99,100,105,107,
109,110,111,119,120,121,122,124,126,128,129,131,132,134,135,136,141,143,
148,149,150,151,152,153,154,158,162,167,168,170,171,172,176,177,179,180,
184,186,187,188,190,191,192,193,196,202,204,205,206,208,209,210,211,214,
218,219,221,225,226,227,228,229,232,236,237,238,241,244,245,246,249,251,
254,255,256,259,262,265,266,268,270,272,273,275,279,280,281,282,283,286,
290,291,292,295,298,301,302,304,306,308,309,310,313,316,317,318,321,323)
B163=(5,6,8,9,10,14,15,17,18,22,24,25,26,28,29,30,31,34,40,42,43,44,46,
47,48,49,52,56,57,59,63,64,65,66,67,70,74,75,76,79,82,83,84,87,89,92,93,
94,97,100,103,104,106,108,110,111,113,117,118,119,120,121,124,128,129,
130,133,136,139,140,142,144,146,147,148,151,154,155,156,159,161,181,185,
189,191,192,194,195,197,198,199,203,207,209,210,212,213,215,216,217,222,
224,229,230,231,232,233,234,236,237,238,240,241,242,244,245,246,254,255,
256,257,259,261,262,265,268,271,276,278,283,284,285,286,287,288,290,291,
292,293,295,297,298,301,304,308,309,310,312,313,314,316,317,318)
Gamma=Graph(multiedges=False,name='Janko-Kharaghani-Tonchev')
for i,b in ((1,B1),(163,B163)):
for j in map(lambda x: x[0], st.OrbitsDomain(b)):
Gamma.add_edges(map(tuple,G.Orbit(libgap.Set([i,j]), libgap.OnSets)))
Gamma.relabel(range(Gamma.order()))
return Gamma
def IoninKharaghani765Graph():
r"""
Return a `(765, 192, 48, 48)`-strongly regular graph.
Existence of a strongly regular graph with these parameters was claimed in
[IK2003]_. Implementing the construction in the latter did not work,
however. This function implements the following instructions, shared by Yury
Ionin and Hadi Kharaghani.
Let `A` be the affine plane over the field `GF(3)=\{-1,0,1\}`. Let
.. MATH::
\phi_1(x,y) &= x\\
\phi_2(x,y) &= y\\
\phi_3(x,y) &= x+y\\
\phi_4(x,y) &= x-y\\
For `i=1,2,3,4` and `j\in GF(3)`, let `L_{i,j}` be the line in `A`
defined by `\phi_i(x,y)=j`. Let `\mathcal M` be the set of all 12 lines
`L_{i,j}`, plus the empty set. Let `\pi` be the permutation defined on
`\mathcal M` by `\pi(L_{i,j}) = L_{i,j+1}` and `\pi(\emptyset) =
\emptyset`, so that `\pi` has three orbits of cardinality 3 and one of
cardinality 1.
Let `A=(p_1,...,p_9)` with `p_1=(-1,1)`, `p_2=(-1,0)`, `p_3=(-1,1)`,
`p_4=(0,-1)`, `p_5=(0,0)`, `p_6=(0,1)`, `p_7=(1,-1)`, `p_8=(1,0)`,
`p_9=(1,1)`. Note that `p_i+p_{10-i}=(0,0)`. For any subset `X` of `A`,
let `M(X)` be the `(0,1)`-matrix of order 9 whose `(i,j)`-entry equals 1
if and only if `p_{10-i}-p_j\in X`. Note that `M` is a symmetric matrix.
An `MF`-tuple is an ordered quintuple `(X_1, X_2, X_3, X_4, X_5)` of
subsets of `A`, of which one is the empty set and the other four are
pairwise non-parallel lines. Such a quintuple generates the following
block matrix:
.. MATH::
N(X_1, X_2, X_3, X_4, X_5) = \left( \begin{array}{ccccc}
M(X_1) & M(X_2) & M(X_3) & M(X_4) & M(X_5)\\
M(X_2) & M(X_3) & M(X_4) & M(X_5) & M(X_1)\\
M(X_3) & M(X_4) & M(X_5) & M(X_1) & M(X_2)\\
M(X_4) & M(X_5) & M(X_1) & M(X_2) & M(X_3)\\
M(X_5) & M(X_1) & M(X_2) & M(X_3) & M(X_4)
\end{array}\right)
Observe that if `(X_1, X_2, X_3, X_4, X_5)` is an `MF`-tuple, then
`N(X_1, X_2, X_3, X_4, X_5)` is the symmetric incidence matrix of a
symmetric `(45, 12, 3)`-design.
Let `\mathcal F` be the set of all `MF`-tuples and let `\sigma` be the
following permutation of `\mathcal F`:
.. MATH::
\sigma(X_1, X_2, X_3, X_4, X_5) & = (X_2, X_3, X_4, X_5, X_1)\\
\pi(X_1, X_2, X_3, X_4, X_5) & = (\pi(X_1), \pi(X_2), \pi(X_3), \pi(X_4), \pi(X_5))\\
Observe that `\sigma` and `\pi` commute, and generate a (cyclic) group
`G` of order 15. We will from now on identify `G` with the (cyclic)
multiplicative group of the field `GF(16)` equal to
`\{\omega^0,...,\omega^{14}\}`. Let `W=[w_{ij}]` be the following matrix
of order 17 over `GF(16)=\{a_1,...,a_16\}`:
.. MATH::
w_{ij}=\left\{\begin{array}{ll}
a_i+a_j & \text{if }1\leq i\leq 16, 1\leq j\leq 16,\\
1 & \text{if }i=17, j\neq 17,\\
1 & \text{if }i\neq 17, j= 17,\\
0 & \text{if }i=j=17
\end{array}\right.
The diagonal entries of `W` are equal to 0, each off-diagonal entry can
be represented as `\omega^k` with `0\leq k\leq 14`. Matrix `W` is a
symmetric `BGW(17,16,15; G)`.
Fix an `MF`-tuple `(X_1, X_2, X_3, X_4, X_5)` and let `S` be the block
matrix obtained from `W` by replacing every diagonal entry of `W` by the
zero matrix of order 45, and every off-diagonal entry `\omega^k` by the
matrix `N(\sigma^k(X_1, X_2, X_3, X_4, X_5))` (through the association
of `\omega^k` with an element of `G`). Then `S` is a symmetric incidence
matrix of a symmetric `(765, 192, 48)`-design with zero diagonal, and
therefore `S` is an adjacency matrix of a strongly regular graph with
parameters `(765, 192, 48, 48)`.
EXAMPLES::
sage: g = graphs.IoninKharaghani765Graph(); g
Ionin-Kharaghani: Graph on 765 vertices
TESTS::
sage: graphs.strongly_regular_graph(765, 192, 48, 48)
Ionin-Kharaghani: Graph on 765 vertices
.. TODO::
An update to [IK2003]_ meant to fix the problem encountered became available
2016/02/24, see http://www.cs.uleth.ca/~hadi/research/IoninKharaghani.pdf
"""
from sage.rings.finite_rings.finite_field_constructor import FiniteField as GF
K = GF(3)
# the four φ functions
phi = [lambda xy: 1*xy[0]+0*xy[1],
lambda xy: 0*xy[0]+1*xy[1],
lambda xy: 1*xy[0]+1*xy[1],
lambda xy: 1*xy[0]-1*xy[1]]
# Defining L_{i,j}
L = {(i,j):set() for i in range(4) for j in K}
from itertools import product
for p in product(K,K):
for i in range(4):
L[i,phi[i](p)].add(p)
L = {k:frozenset(v) for k,v in six.iteritems(L)}
# Defining pi
pi = {L[i,j]:L[i,(j+1)%3] for (i,j) in L}
pi[frozenset()] = frozenset()
# Defining A
A = [(-1,-1), (-1,0), (-1,1), (0,-1), (0,0), (0,1), (1,-1), (1,0), (1,1)]
def M(S):
S = set((K(x), K(y)) for x, y in S)
def difference(xy, xxyy):
return (K(xy[0] - xxyy[0]), K(xy[1] - xxyy[1]))
return matrix([[1 if difference(A[8-i],A[j]) in S else 0
for i in range(9)]
for j in range(9)])
def N(Xi):
Xi = [M(x) for x in Xi]
return matrix.block([Xi[i:] + Xi[:i]
for i in range(len(Xi))])
# sigma = lambda Xi: Xi[1:] + [pi[Xi[0]]]
f_pow = lambda f, i, X: f_pow(f, i-1, f(X)) if i else X
sigma2 = lambda Xi: Xi[1:] + [Xi[0]]
pi_vec = lambda x: [pi.get(_) for _ in x]
# The matrix W, with off-diagonal entries equal to integers 1,...,15
# (instead of x^1,...,x^15)
from sage.matrix.constructor import matrix
GF16 = GF(16,'x')
W = matrix( [[x+y for x in GF16] + [1] for y in GF16] +
[[1]*16+[0]])
x = GF16.primitive_element()
log_x = {x**i:i for i in range(15)}
W = W.apply_map(lambda x:log_x[x]+1 if x else 0)
# Associate a matrix to every entry of W
int_to_matrix = {0:matrix.zero(45)}
for i in range(15):
vec = [frozenset([]),L[0,0],L[1,0],L[2,0],L[3,0]]
vec = f_pow(pi_vec, i % 3, vec)
vec = f_pow(sigma2, i % 5, vec)
int_to_matrix[i+1] = N(vec)
M2 = matrix.block([[int_to_matrix[x] for x in R] for R in W.rows()])
g = Graph(M2, name="Ionin-Kharaghani")
return g
def U42Graph216():
r"""
Return a (216,40,4,8)-strongly regular graph from [CRS2016]_.
Build the graph, interpreting the `U_4(2)`-action considered in [CRS2016]_
as the one on the hyperbolic lines of the corresponding unitary polar space,
and then doing the unique merging of the orbitals leading to a graph with
the parameters in question.
EXAMPLES::
sage: G=graphs.U42Graph216() # optional - gap_packages (grape)
sage: G.is_strongly_regular(parameters=True) # optional - gap_packages (grape)
(216, 40, 4, 8)
"""
from sage.libs.gap.libgap import libgap
from sage.features.gap import GapPackage
GapPackage("grape", spkg="gap_packages").require()
adj_list=libgap.function_factory("""function()
local gg, hl, o216, a216, x, h, re, G;
LoadPackage("grape");
gg:=SpecialUnitaryGroup(4,2);
hl:=Z(2)*[
[0,0,1,0],
[1,1,0,0],
[0,1,0,1],
[0,1,1,0],
[1,1,0,1]];
o216:=Orbit(gg,Set(hl),OnSets);
a216:=Action(gg,o216,OnSets);
h:=Stabilizer(a216,1);
re:=Filtered(Orbits(h,[1..216]),x->Length(x)=20);
G:=EdgeOrbitsGraph(a216, [[1,re[1][1]], [1,re[2][1]]]);
return List([1..216],x->Adjacency(G,x));
end;""")
adj = adj_list() # for each vertex, we get the list of vertices it is adjacent to
G = Graph(((i,int(j-1))
for i,ni in enumerate(adj) for j in ni),
format='list_of_edges', multiedges=False)
G.name('U42Graph216')
return G
def U42Graph540():
r"""
Return a (540,187,58,68)-strongly regular graph from [CRS2016]_.
Build the graph, interpreting the `U_4(2)`-action considered in [CRS2016]_
as the action of `U_4(2)=Sp_4(3)<U_4(3)` on the nonsingular, w.r.t. to the
Hermitean form stabilised by `U_4(3)`, points of the 3-dimensional
projective space over `GF(9)`. There are several possible mergings of
orbitals, some leading to non-isomorphic graphs with the same parameters. We
found the merging here using [FK1991]_.
EXAMPLES::
sage: G=graphs.U42Graph540() # optional - gap_packages (grape)
sage: G.is_strongly_regular(parameters=True) # optional - gap_packages (grape)
(540, 187, 58, 68)
"""
from sage.libs.gap.libgap import libgap
from sage.features.gap import GapPackage
GapPackage("grape", spkg="gap_packages").require()
adj_list=libgap.function_factory("""function()
local f, o540, a540, x, oh, h, lo, G;
LoadPackage("grape");
f:=Sp(4,3);
o540:=Orbit(f,Z(3)^0*[1,0,0,Z(9)],OnLines);
a540:=Action(f,o540,OnLines);
h:=Stabilizer(a540,1);
oh:=Orbits(h,[1..540]);
lo:=List([8,9,10,11,12,16,19,22,23,24],x->[1,oh[x+1][1]]);
G:=EdgeOrbitsGraph(a540,lo);
return List([1..540],x->Adjacency(G,x));
end;""")
adj = adj_list() # for each vertex, we get the list of vertices it is adjacent to
G = Graph(((i,int(j-1))
for i,ni in enumerate(adj) for j in ni),
format='list_of_edges', multiedges=False)
G.name('U42Graph540')
return G
| 33.451045
| 149
| 0.533744
|
4a0ab6af83691bca10ba35c6cc51026c88cc4540
| 9,739
|
py
|
Python
|
django/core/management/commands/squashmigrations.py
|
QinMing/django
|
162ae9c9143aa85eb27ea69b446a28973eea4854
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 19
|
2015-07-07T02:08:59.000Z
|
2021-11-08T11:05:40.000Z
|
django/core/management/commands/squashmigrations.py
|
QinMing/django
|
162ae9c9143aa85eb27ea69b446a28973eea4854
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2020-01-31T11:30:21.000Z
|
2020-01-31T11:30:21.000Z
|
django/core/management/commands/squashmigrations.py
|
QinMing/django
|
162ae9c9143aa85eb27ea69b446a28973eea4854
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 145
|
2019-03-14T18:54:45.000Z
|
2022-03-04T20:25:31.000Z
|
from django.apps import apps
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections, migrations
from django.db.migrations.loader import AmbiguityError, MigrationLoader
from django.db.migrations.migration import SwappableTuple
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.writer import MigrationWriter
from django.utils.version import get_docs_version
class Command(BaseCommand):
help = "Squashes an existing set of migrations (from first until specified) into a single new one."
def add_arguments(self, parser):
parser.add_argument(
'app_label',
help='App label of the application to squash migrations for.',
)
parser.add_argument(
'start_migration_name', nargs='?',
help='Migrations will be squashed starting from and including this migration.',
)
parser.add_argument(
'migration_name',
help='Migrations will be squashed until and including this migration.',
)
parser.add_argument(
'--no-optimize', action='store_true',
help='Do not try to optimize the squashed operations.',
)
parser.add_argument(
'--noinput', '--no-input', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--squashed-name',
help='Sets the name of the new squashed migration.',
)
parser.add_argument(
'--no-header', action='store_false', dest='include_header',
help='Do not add a header comment to the new squashed migration.',
)
def handle(self, **options):
self.verbosity = options['verbosity']
self.interactive = options['interactive']
app_label = options['app_label']
start_migration_name = options['start_migration_name']
migration_name = options['migration_name']
no_optimize = options['no_optimize']
squashed_name = options['squashed_name']
include_header = options['include_header']
# Validate app_label.
try:
apps.get_app_config(app_label)
except LookupError as err:
raise CommandError(str(err))
# Load the current graph state, check the app and migration they asked for exists
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
if app_label not in loader.migrated_apps:
raise CommandError(
"App '%s' does not have migrations (so squashmigrations on "
"it makes no sense)" % app_label
)
migration = self.find_migration(loader, app_label, migration_name)
# Work out the list of predecessor migrations
migrations_to_squash = [
loader.get_migration(al, mn)
for al, mn in loader.graph.forwards_plan((migration.app_label, migration.name))
if al == migration.app_label
]
if start_migration_name:
start_migration = self.find_migration(loader, app_label, start_migration_name)
start = loader.get_migration(start_migration.app_label, start_migration.name)
try:
start_index = migrations_to_squash.index(start)
migrations_to_squash = migrations_to_squash[start_index:]
except ValueError:
raise CommandError(
"The migration '%s' cannot be found. Maybe it comes after "
"the migration '%s'?\n"
"Have a look at:\n"
" python manage.py showmigrations %s\n"
"to debug this issue." % (start_migration, migration, app_label)
)
# Tell them what we're doing and optionally ask if we should proceed
if self.verbosity > 0 or self.interactive:
self.stdout.write(self.style.MIGRATE_HEADING("Will squash the following migrations:"))
for migration in migrations_to_squash:
self.stdout.write(" - %s" % migration.name)
if self.interactive:
answer = None
while not answer or answer not in "yn":
answer = input("Do you wish to proceed? [yN] ")
if not answer:
answer = "n"
break
else:
answer = answer[0].lower()
if answer != "y":
return
# Load the operations from all those migrations and concat together,
# along with collecting external dependencies and detecting
# double-squashing
operations = []
dependencies = set()
# We need to take all dependencies from the first migration in the list
# as it may be 0002 depending on 0001
first_migration = True
for smigration in migrations_to_squash:
if smigration.replaces:
raise CommandError(
"You cannot squash squashed migrations! Please transition "
"it to a normal migration first: "
"https://docs.djangoproject.com/en/%s/topics/migrations/#squashing-migrations" % get_docs_version()
)
operations.extend(smigration.operations)
for dependency in smigration.dependencies:
if isinstance(dependency, SwappableTuple):
if settings.AUTH_USER_MODEL == dependency.setting:
dependencies.add(("__setting__", "AUTH_USER_MODEL"))
else:
dependencies.add(dependency)
elif dependency[0] != smigration.app_label or first_migration:
dependencies.add(dependency)
first_migration = False
if no_optimize:
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("(Skipping optimization.)"))
new_operations = operations
else:
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("Optimizing…"))
optimizer = MigrationOptimizer()
new_operations = optimizer.optimize(operations, migration.app_label)
if self.verbosity > 0:
if len(new_operations) == len(operations):
self.stdout.write(" No optimizations possible.")
else:
self.stdout.write(
" Optimized from %s operations to %s operations." %
(len(operations), len(new_operations))
)
# Work out the value of replaces (any squashed ones we're re-squashing)
# need to feed their replaces into ours
replaces = []
for migration in migrations_to_squash:
if migration.replaces:
replaces.extend(migration.replaces)
else:
replaces.append((migration.app_label, migration.name))
# Make a new migration with those operations
subclass = type("Migration", (migrations.Migration,), {
"dependencies": dependencies,
"operations": new_operations,
"replaces": replaces,
})
if start_migration_name:
if squashed_name:
# Use the name from --squashed-name.
prefix, _ = start_migration.name.split('_', 1)
name = '%s_%s' % (prefix, squashed_name)
else:
# Generate a name.
name = '%s_squashed_%s' % (start_migration.name, migration.name)
new_migration = subclass(name, app_label)
else:
name = '0001_%s' % (squashed_name or 'squashed_%s' % migration.name)
new_migration = subclass(name, app_label)
new_migration.initial = True
# Write out the new migration file
writer = MigrationWriter(new_migration, include_header)
with open(writer.path, "w", encoding='utf-8') as fh:
fh.write(writer.as_string())
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("Created new squashed migration %s" % writer.path))
self.stdout.write(" You should commit this migration but leave the old ones in place;")
self.stdout.write(" the new migration will be used for new installs. Once you are sure")
self.stdout.write(" all instances of the codebase have applied the migrations you squashed,")
self.stdout.write(" you can delete them.")
if writer.needs_manual_porting:
self.stdout.write(self.style.MIGRATE_HEADING("Manual porting required"))
self.stdout.write(" Your migrations contained functions that must be manually copied over,")
self.stdout.write(" as we could not safely copy their implementation.")
self.stdout.write(" See the comment at the top of the squashed migration for details.")
def find_migration(self, loader, app_label, name):
try:
return loader.get_migration_by_prefix(app_label, name)
except AmbiguityError:
raise CommandError(
"More than one migration matches '%s' in app '%s'. Please be "
"more specific." % (name, app_label)
)
except KeyError:
raise CommandError(
"Cannot find a migration matching '%s' from app '%s'." %
(name, app_label)
)
| 45.297674
| 119
| 0.595852
|
4a0ab6c4067066fa4c265954ee71cddb0e20cb13
| 1,342
|
py
|
Python
|
bsuite/bsuite/experiments/cartpole_swingup/cartpole_swingup_test.py
|
hbutsuak95/iv_rl
|
0f72a8f077a238237027ea96b7d1160c35ac9959
|
[
"MIT"
] | 1,337
|
2019-08-13T09:25:31.000Z
|
2022-03-28T14:30:04.000Z
|
bsuite/bsuite/experiments/cartpole_swingup/cartpole_swingup_test.py
|
hbutsuak95/iv_rl
|
0f72a8f077a238237027ea96b7d1160c35ac9959
|
[
"MIT"
] | 37
|
2019-08-13T19:08:01.000Z
|
2021-05-20T15:32:06.000Z
|
bsuite/bsuite/experiments/cartpole_swingup/cartpole_swingup_test.py
|
hbutsuak95/iv_rl
|
0f72a8f077a238237027ea96b7d1160c35ac9959
|
[
"MIT"
] | 194
|
2019-08-13T18:06:07.000Z
|
2022-03-24T05:50:10.000Z
|
# python3
# pylint: disable=g-bad-file-header
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for bsuite.experiments.cartpole_swingup."""
from absl.testing import absltest
from bsuite.experiments.cartpole_swingup import cartpole_swingup
from dm_env import test_utils
import numpy as np
class InterfaceTest(test_utils.EnvironmentTestMixin, absltest.TestCase):
def make_object_under_test(self):
return cartpole_swingup.CartpoleSwingup(seed=42, height_threshold=0.8)
def make_action_sequence(self):
valid_actions = [0, 1, 2]
rng = np.random.RandomState(42)
for _ in range(100):
yield rng.choice(valid_actions)
if __name__ == '__main__':
absltest.main()
| 32.731707
| 78
| 0.722057
|
4a0ab75dd4ab4bc4c9acd0e482a5ca7420ab3c4f
| 2,320
|
py
|
Python
|
test/Fortran/F90COMSTR.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 1,403
|
2017-11-23T14:24:01.000Z
|
2022-03-30T20:59:39.000Z
|
test/Fortran/F90COMSTR.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 3,708
|
2017-11-27T13:47:12.000Z
|
2022-03-29T17:21:17.000Z
|
test/Fortran/F90COMSTR.py
|
moroten/scons
|
20927b42ed4f0cb87f51287fa3b4b6cf915afcf8
|
[
"MIT"
] | 281
|
2017-12-01T23:48:38.000Z
|
2022-03-31T15:25:44.000Z
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.file_fixture('mycompile.py')
if not TestSCons.case_sensitive_suffixes('.f','.F'):
f90pp = 'f90'
else:
f90pp = 'f90pp'
test.write('SConstruct', """
env = Environment(F90COM = r'%(_python_)s mycompile.py f90 $TARGET $SOURCES',
F90COMSTR = 'Building f90 $TARGET from $SOURCES',
F90PPCOM = r'%(_python_)s mycompile.py f90pp $TARGET $SOURCES',
F90PPCOMSTR = 'Building f90pp $TARGET from $SOURCES',
OBJSUFFIX='.obj')
env.Object(source = 'test01.f90')
env.Object(source = 'test02.F90')
""" % locals())
test.write('test01.f90', "A .f90 file.\n/*f90*/\n")
test.write('test02.F90', "A .F90 file.\n/*%s*/\n" % f90pp)
test.run(stdout = test.wrap_stdout("""\
Building f90 test01.obj from test01.f90
Building %(f90pp)s test02.obj from test02.F90
""" % locals()))
test.must_match('test01.obj', "A .f90 file.\n")
test.must_match('test02.obj', "A .F90 file.\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 34.117647
| 81
| 0.709483
|
4a0ab890b6d0d040ee3318ebb299bb01b4d3e646
| 6,792
|
py
|
Python
|
python/GafferSceneTest/IECoreGLPreviewTest/RendererTest.py
|
sebaDesmet/gaffer
|
47b2d093c40452bd77947e3b5bd0722a366c8d59
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferSceneTest/IECoreGLPreviewTest/RendererTest.py
|
sebaDesmet/gaffer
|
47b2d093c40452bd77947e3b5bd0722a366c8d59
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferSceneTest/IECoreGLPreviewTest/RendererTest.py
|
sebaDesmet/gaffer
|
47b2d093c40452bd77947e3b5bd0722a366c8d59
|
[
"BSD-3-Clause"
] | null | null | null |
##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import imath
import IECore
import IECoreScene
import IECoreGL
import GafferTest
import GafferScene
@unittest.skipIf( "TRAVIS" in os.environ or "TF_BUILD" in os.environ, "OpenGL not set up" )
class RendererTest( GafferTest.TestCase ) :
def setUp( self ) :
GafferTest.TestCase.setUp( self )
IECoreGL.init( False )
def testFactory( self ) :
self.assertTrue( "OpenGL" in GafferScene.Private.IECoreScenePreview.Renderer.types() )
r = GafferScene.Private.IECoreScenePreview.Renderer.create( "OpenGL" )
self.assertTrue( isinstance( r, GafferScene.Private.IECoreScenePreview.Renderer ) )
self.assertEqual( r.name(), "OpenGL" )
def testOtherRendererAttributes( self ) :
# Attributes destined for other renderers should be silently ignored
renderer = GafferScene.Private.IECoreScenePreview.Renderer.create( "OpenGL" )
with IECore.CapturingMessageHandler() as handler :
renderer.attributes(
IECore.CompoundObject( {
"ai:visibility:camera" : IECore.IntData( 0 )
} )
)
self.assertEqual( len( handler.messages ), 0 )
def testPrimVars( self ) :
renderer = GafferScene.Private.IECoreScenePreview.Renderer.create( "OpenGL" )
renderer.output( "test", IECoreScene.Output( self.temporaryDirectory() + "/testPrimVars.exr", "exr", "rgba", {} ) )
fragmentSource = """
uniform float red;
uniform float green;
uniform float blue;
void main()
{
gl_FragColor = vec4( red, green, blue, 1 );
}
"""
attributes = renderer.attributes(
IECore.CompoundObject( {
"gl:surface" : IECoreScene.ShaderNetwork(
{
"output" : IECoreScene.Shader( "rgbColor", "surface", { "gl:fragmentSource" : fragmentSource } )
},
output = "output"
)
} )
)
def sphere( red, green, blue ) :
s = IECoreScene.SpherePrimitive()
s["red"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.FloatData( red ) )
s["green"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.FloatData( green ) )
s["blue"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.FloatData( blue ) )
return s
renderer.object(
"redSphere",
sphere( 1, 0, 0 ),
attributes
).transform(
imath.M44f().translate( imath.V3f( 0, 0, -5 ) )
)
renderer.object(
"greenSphere",
sphere( 0, 1, 0 ),
attributes
).transform(
imath.M44f().translate( imath.V3f( -1, 0, -5 ) )
)
renderer.object(
"blueSphere",
sphere( 0, 0, 1 ),
attributes
).transform(
imath.M44f().translate( imath.V3f( 1, 0, -5 ) )
)
renderer.render()
image = IECore.Reader.create( self.temporaryDirectory() + "/testPrimVars.exr" ).read()
dimensions = image.dataWindow.size() + imath.V2i( 1 )
index = dimensions.x * int( dimensions.y * 0.5 )
self.assertEqual( image["R"][index], 0 )
self.assertEqual( image["G"][index], 1 )
self.assertEqual( image["B"][index], 0 )
index = dimensions.x * int(dimensions.y * 0.5) + int( dimensions.x * 0.5 )
self.assertEqual( image["R"][index], 1 )
self.assertEqual( image["G"][index], 0 )
self.assertEqual( image["B"][index], 0 )
index = dimensions.x * int(dimensions.y * 0.5) + int( dimensions.x * 1 ) - 1
self.assertEqual( image["R"][index], 0 )
self.assertEqual( image["G"][index], 0 )
self.assertEqual( image["B"][index], 1 )
def testShaderParameters( self ) :
renderer = GafferScene.Private.IECoreScenePreview.Renderer.create( "OpenGL" )
renderer.output( "test", IECoreScene.Output( self.temporaryDirectory() + "/testShaderParameters.exr", "exr", "rgba", {} ) )
fragmentSource = """
uniform vec3 colorValue;
void main()
{
gl_FragColor = vec4( colorValue, 1 );
}
"""
attributes = renderer.attributes(
IECore.CompoundObject( {
"gl:surface" : IECoreScene.ShaderNetwork(
shaders = {
"output" : IECoreScene.Shader(
"color",
"surface",
{
"gl:fragmentSource" : fragmentSource,
"colorValue" : imath.Color3f( 1, 0, 0 )
}
)
},
output = "output"
)
} )
)
renderer.object(
"sphere",
IECoreScene.SpherePrimitive(),
attributes
).transform(
imath.M44f().translate( imath.V3f( 0, 0, -5 ) )
)
renderer.render()
def testQueryBound( self ) :
renderer = GafferScene.Private.IECoreScenePreview.Renderer.create(
"OpenGL",
GafferScene.Private.IECoreScenePreview.Renderer.RenderType.Interactive
)
cube = IECoreScene.MeshPrimitive.createBox( imath.Box3f( imath.V3f( -1 ), imath.V3f( 9 ) ) )
o = renderer.object(
"/cube",
cube,
renderer.attributes( IECore.CompoundObject() )
)
o.transform(
imath.M44f().translate( imath.V3f( 1 ) )
)
self.assertEqual(
renderer.command( "gl:queryBound", {} ),
imath.Box3f(
cube.bound().min() + imath.V3f( 1 ),
cube.bound().max() + imath.V3f( 1 )
)
)
del o
if __name__ == "__main__":
unittest.main()
| 29.402597
| 128
| 0.66755
|
4a0ab9510b9828ec238beba30bcffcac7ec8675f
| 3,061
|
py
|
Python
|
lamlight/aws_resources/ec2_utils.py
|
Rohit25negi/lamlight
|
5ae455f8018efef8aa0049825d73777311a806a7
|
[
"MIT"
] | 40
|
2017-12-03T13:12:51.000Z
|
2018-05-24T10:03:46.000Z
|
lamlight/aws_resources/ec2_utils.py
|
Rohit25negi/lamlight
|
5ae455f8018efef8aa0049825d73777311a806a7
|
[
"MIT"
] | 10
|
2018-05-19T13:37:57.000Z
|
2018-06-15T10:09:41.000Z
|
lamlight/aws_resources/ec2_utils.py
|
Rohit25negi/Lamlight
|
5ae455f8018efef8aa0049825d73777311a806a7
|
[
"MIT"
] | 1
|
2018-04-25T05:12:17.000Z
|
2018-04-25T05:12:17.000Z
|
"""
Module provides the following utilities:
1) List the available subnets and let user select one.
2) List the available IAM roles and let user selecrt one.
3) List the available Security Groups and let user select one.
"""
import os
import boto3
def get_subnet_id():
"""
Returns the subnet id for new lambda function
Returns
--------
subnet_id: str
subnet id
"""
client = boto3.client('ec2', region_name=os.getenv('AWS_REGION'))
subnets = client.describe_subnets()
trimmed_subnets_list = [{"SubnetId": subnet.get("SubnetId"),
"VpcId": subnet["VpcId"],
"Tags":subnet["Tags"]} for subnet in subnets.get("Subnets")
]
print "-----------------------------SELECT THE SUBNET---------------------------------"
for subnet in trimmed_subnets_list:
print "SUBNET ID = {}".format(subnet["SubnetId"])
print "VPC ID = {}".format(subnet["VpcId"])
print "TAGS = {}".format(subnet["Tags"])
print ""
print "-------------------------------------------------------------------------------"
subnet_id = raw_input('enter the subnet id : ').strip()
return subnet_id
def get_role():
"""
Returns the IAM role ARN to be assigned to the lambda function
Returns
--------
role_arn: str
IAM Role ARN
"""
client = boto3.client('iam', region_name=os.getenv('AWS_REGION'))
roles = client.list_roles()
trimed_roles_list = [{'RoleName': role['RoleName'],
'Arn': role['Arn']} for role in roles.get('Roles')
]
print "-----------------------------SELECT THE ROLE----------------------------------"
for role in trimed_roles_list:
print "RoleName = {}".format(role.get("RoleName"))
print "Arn = {}".format(role.get("Arn"))
print ""
print "------------------------------------------------------------------------------"
role_arn = raw_input('give the role Arn : ').strip()
return role_arn
def get_security_group():
"""
Returns the Security group id to be assigned to aws lambda function
Returns
--------
security_group: str
security group id
"""
client = boto3.client('ec2', region_name=os.getenv('AWS_REGION'))
security_groups = client.describe_security_groups()
trimmed_sg_list = [{"GroupName": sg["GroupName"],
"GroupId":sg["GroupId"]}
for sg in security_groups['SecurityGroups']
]
print "-----------------------------SELECT THE SECURITY GROUP-------------------------"
for sec_group in trimmed_sg_list:
print "GROUP NAME = {}".format(sec_group['GroupName'])
print "GROUP ID = {}".format(sec_group["GroupId"])
print ""
print "--------------------------------------------------------------------------------"
security_group = raw_input("security group id : ").strip()
return security_group
| 34.784091
| 92
| 0.504084
|
4a0ab977d2bfc18f74500f65c190f6a2342e80b2
| 485
|
py
|
Python
|
tests/atest/transformers/NormalizeSettingName/test_transformer.py
|
Liambeguin/robotframework-tidy
|
c67875f9d40dacff974256dda16501190b5faa2d
|
[
"Apache-2.0"
] | 37
|
2020-12-02T19:55:16.000Z
|
2022-03-27T09:47:27.000Z
|
tests/atest/transformers/NormalizeSettingName/test_transformer.py
|
Liambeguin/robotframework-tidy
|
c67875f9d40dacff974256dda16501190b5faa2d
|
[
"Apache-2.0"
] | 223
|
2020-12-03T15:44:48.000Z
|
2022-03-29T07:10:41.000Z
|
tests/atest/transformers/NormalizeSettingName/test_transformer.py
|
Liambeguin/robotframework-tidy
|
c67875f9d40dacff974256dda16501190b5faa2d
|
[
"Apache-2.0"
] | 7
|
2021-01-24T21:38:53.000Z
|
2022-03-24T17:11:11.000Z
|
from .. import run_tidy_and_compare
class TestNormalizeSettingName:
TRANSFORMER_NAME = 'NormalizeSettingName'
def test_normalize_setting_name(self):
run_tidy_and_compare(self.TRANSFORMER_NAME, source='tests.robot')
def test_normalize_setting_name_selected(self):
run_tidy_and_compare(
self.TRANSFORMER_NAME,
source='tests.robot',
expected='selected.robot',
config=' --startline 12 --endline 15'
)
| 28.529412
| 73
| 0.682474
|
4a0ab9eacf08de65b3f21afb1a5be61b8bae8d39
| 3,757
|
py
|
Python
|
fastreid/data/datasets/vehicleid.py
|
tycallen/fast-reid
|
66683fa95bc7d7222659e8db3ac04e5b8e366190
|
[
"Apache-2.0"
] | 4
|
2021-04-26T05:35:37.000Z
|
2021-08-29T23:36:44.000Z
|
fastreid/data/datasets/vehicleid.py
|
tycallen/fast-reid
|
66683fa95bc7d7222659e8db3ac04e5b8e366190
|
[
"Apache-2.0"
] | null | null | null |
fastreid/data/datasets/vehicleid.py
|
tycallen/fast-reid
|
66683fa95bc7d7222659e8db3ac04e5b8e366190
|
[
"Apache-2.0"
] | 1
|
2021-04-26T05:35:38.000Z
|
2021-04-26T05:35:38.000Z
|
# encoding: utf-8
"""
@author: Jinkai Zheng
@contact: 1315673509@qq.com
"""
import os.path as osp
import random
from .bases import ImageDataset
from ..datasets import DATASET_REGISTRY
@DATASET_REGISTRY.register()
class VehicleID(ImageDataset):
"""VehicleID.
Reference:
Liu et al. Deep relative distance learning: Tell the difference between similar vehicles. CVPR 2016.
URL: `<https://pkuml.org/resources/pku-vehicleid.html>`_
Train dataset statistics:
- identities: 13164.
- images: 113346.
"""
dataset_dir = "vehicleid"
dataset_name = "vehicleid"
def __init__(self, root='datasets', test_list='', **kwargs):
self.dataset_dir = osp.join(root, self.dataset_dir)
self.image_dir = osp.join(self.dataset_dir, 'image')
self.train_list = osp.join(self.dataset_dir, 'train_test_split/train_list.txt')
if test_list:
self.test_list = test_list
else:
self.test_list = osp.join(self.dataset_dir, 'train_test_split/test_list_13164.txt')
required_files = [
self.dataset_dir,
self.image_dir,
self.train_list,
self.test_list,
]
self.check_before_run(required_files)
train = self.process_dir(self.train_list, is_train=True)
query, gallery = self.process_dir(self.test_list, is_train=False)
super(VehicleID, self).__init__(train, query, gallery, **kwargs)
def process_dir(self, list_file, is_train=True):
img_list_lines = open(list_file, 'r').readlines()
dataset = []
for idx, line in enumerate(img_list_lines):
line = line.strip()
vid = int(line.split(' ')[1])
imgid = line.split(' ')[0]
img_path = osp.join(self.image_dir, imgid + '.jpg')
if is_train:
vid = self.dataset_name + '_' + str(vid)
dataset.append((img_path, vid, imgid))
if is_train: return dataset
else:
random.shuffle(dataset)
vid_container = set()
query = []
gallery = []
for sample in dataset:
if sample[1] not in vid_container:
vid_container.add(sample[1])
gallery.append(sample)
else:
query.append(sample)
return query, gallery
@DATASET_REGISTRY.register()
class SmallVehicleID(VehicleID):
"""VehicleID.
Small test dataset statistics:
- identities: 800.
- images: 6493.
"""
def __init__(self, root='datasets', **kwargs):
dataset_dir = osp.join(root, self.dataset_dir)
self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_800.txt')
super(SmallVehicleID, self).__init__(root, self.test_list, **kwargs)
@DATASET_REGISTRY.register()
class MediumVehicleID(VehicleID):
"""VehicleID.
Medium test dataset statistics:
- identities: 1600.
- images: 13377.
"""
def __init__(self, root='datasets', **kwargs):
dataset_dir = osp.join(root, self.dataset_dir)
self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_1600.txt')
super(MediumVehicleID, self).__init__(root, self.test_list, **kwargs)
@DATASET_REGISTRY.register()
class LargeVehicleID(VehicleID):
"""VehicleID.
Large test dataset statistics:
- identities: 2400.
- images: 19777.
"""
def __init__(self, root='datasets', **kwargs):
dataset_dir = osp.join(root, self.dataset_dir)
self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_2400.txt')
super(LargeVehicleID, self).__init__(root, self.test_list, **kwargs)
| 30.056
| 108
| 0.620708
|
4a0aba1b020862032f87786e7f35d81670da9d0c
| 151
|
py
|
Python
|
datasets/__init__.py
|
ml-research/Do-Not-Trust-Prediction-Scores-for-Membership-Inference-Attacks
|
396a4d903a97e10c3a1ecfec131a660aaf9a384e
|
[
"MIT"
] | 3
|
2021-11-17T14:14:52.000Z
|
2021-12-13T09:35:11.000Z
|
datasets/__init__.py
|
ml-research/Do-Not-Trust-Prediction-Scores-for-Membership-Inference-Attacks
|
396a4d903a97e10c3a1ecfec131a660aaf9a384e
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
ml-research/Do-Not-Trust-Prediction-Scores-for-Membership-Inference-Attacks
|
396a4d903a97e10c3a1ecfec131a660aaf9a384e
|
[
"MIT"
] | 1
|
2021-12-10T07:37:16.000Z
|
2021-12-10T07:37:16.000Z
|
from .fake_cifar10 import FakeCIFAR10
from .stanford_dogs import StanfordDogs
from .afhq import AFHQ
__all__ = ['FakeCIFAR10', 'StanfordDogs', 'AFHQ']
| 30.2
| 49
| 0.794702
|
4a0abbe9a67f18568113435048d75483eb4a5cd4
| 5,237
|
py
|
Python
|
src/grammar_learner/kmeans.py
|
vsbogd/language-learning
|
601e7bc7f97a0b6c1f713f8108fc6e81d492e921
|
[
"MIT"
] | 21
|
2018-01-30T08:18:20.000Z
|
2020-02-18T08:15:53.000Z
|
src/grammar_learner/kmeans.py
|
vsbogd/language-learning
|
601e7bc7f97a0b6c1f713f8108fc6e81d492e921
|
[
"MIT"
] | 3
|
2018-02-09T18:22:45.000Z
|
2018-07-10T10:31:25.000Z
|
src/grammar_learner/kmeans.py
|
vsbogd/language-learning
|
601e7bc7f97a0b6c1f713f8108fc6e81d492e921
|
[
"MIT"
] | 10
|
2018-01-30T10:31:43.000Z
|
2021-01-26T07:54:24.000Z
|
#language-learning/src/grammar_learner/kmeans.py POC.0.5 80725 as-was tmp
import logging
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
from sklearn.metrics import pairwise_distances, silhouette_score
from statistics import mode
from .utl import round1, round2, round3
# -from kmeans import cluster_words_kmeans #this module
def cluster_words_kmeans(words_df, n_clusters):
words_list = words_df['word'].tolist()
df = words_df.copy()
del df['word']
kmeans_model = KMeans(init='k-means++', n_clusters=n_clusters, n_init=10)
kmeans_model.fit(df)
labels = kmeans_model.labels_
inertia = kmeans_model.inertia_
centroids = np.asarray(kmeans_model.cluster_centers_[:(max(labels)+1)])
silhouette = silhouette_score(df, labels, metric ='euclidean')
cdf = pd.DataFrame(centroids)
cdf = cdf.applymap(lambda x: x if abs(x) > 1e-12 else 0.)
cdf.columns = [x+1 if type(x)==int else x for x in cdf.columns]
cols = cdf.columns.tolist()
def cluster_word_list(i):
return [words_list[j] for j,x in enumerate(labels) if x==i]
cdf['cluster'] = cdf.index
cdf['cluster_words'] = cdf['cluster'].apply(cluster_word_list)
#+cdf = cdf.sort_values(by=[1,2,3], ascending=[True,True,True])
cdf = cdf.sort_values(by=[1,2], ascending=[True,True])
cdf.index = range(1, len(cdf)+1)
def cluster_id(row): return 'C' + str(row.name).zfill(2)
cdf['cluster'] = cdf.apply(cluster_id, axis=1)
cols = ['cluster', 'cluster_words'] + cols
cdf = cdf[cols]
return cdf, silhouette, inertia
def number_of_clusters(vdf, cluster_range, algorithm='kmeans', \
criteria='silhouette', level=0.9, verbose='none'):
logger = logging.getLogger(__name__ + ".number_of_clusters")
if(len(cluster_range) < 2 or cluster_range[2] < 1):
return cluster_range[0]
sil_range = pd.DataFrame(columns=['Np','Nc','Silhouette','Inertia'])
# if verbose == 'debug':
# print('clustering/poc.py/number_of_clusters: vdf:\n', \
# vdf.applymap(round2).sort_values(by=[1,2], ascending=[True,True]))
logger.debug('clustering/poc.py/number_of_clusters: vdf:\n{}'.format(
vdf.applymap(round2).sort_values(by=[1,2], ascending=[True,True])))
# Check number of clusters <= word vector dimensionality
max_clusters = min(cluster_range[1], len(vdf), \
max([x for x in list(vdf) if isinstance(x,int)]))
#?if max([x for x in list(vdf) if isinstance(x,int)]) < cluster_range[0]+1:
#? max_clusters = min(cluster_range[1], len(vdf)) #FIXME: hack 80420!
if max([x for x in list(vdf) if isinstance(x,int)]) == 2:
# if verbose in ['max','debug']: print('2 dim word space -- 4 clusters')
logger.info('2 dim word space -- 4 clusters')
return 4 #FIXME: hack 80420!
# if verbose in ['max', 'debug']:
# print('number_of_clusters: max_clusters =', max_clusters)
logger.info('number_of_clusters: max_clusters = {}'.format(max_clusters))
n_clusters = max_clusters #80623: cure case max < range.min
#FIXME: unstable number of clusters #80422
lst = []
attempts = 1 #12
for k in range(attempts):
for i,j in enumerate(range(cluster_range[0], max_clusters, cluster_range[2])):
cdf, silhouette, inertia = cluster_words_kmeans(vdf, j)
# if verbose in ['debug']:
# print(j, 'clusters ⇒ silhouette =', silhouette)
logger.debug(f'{j} clusters ⇒ silhouette = {silhouette}')
sil_range.loc[i] = [j, len(cdf), round(silhouette,4), round(inertia,2)]
if level > 0.9999: # 1 - max Silhouette index
n_clusters = sil_range.loc[sil_range['Silhouette'].idxmax()]['Nc']
elif level < 0.0001: # 0 - max number pf clusters
n_clusters = sil_range.loc[sil_range['Nc'].idxmax()]['Nc']
else:
thresh = level * sil_range.loc[sil_range['Silhouette'].idxmax()]['Silhouette']
n_clusters = min(sil_range.loc[sil_range['Silhouette'] > thresh]['Nc'].tolist())
lst.append(int(n_clusters))
dct = dict()
for n in lst:
if n in dct:
dct[n] += 1
else: dct[n] = 1
n_clusters = int(round(np.mean(lst),0))
n2 = list(dct.keys())[list(dct.values()).index(max(list(dct.values())))]
if n2 != n_clusters:
if len(list(dct.values())) == len(set(list(dct.values()))):
n3 = mode(lst) # Might get error
else: n3 = n_clusters
n_clusters = int(round((n_clusters + n2 + n3)/3.0, 0))
# if verbose in ['max', 'debug']:
# if len(dct) > 1:
# print('Clusters:', sorted(lst), '⇒', n_clusters)
if len(dct) > 1:
logger.info(f'Clusters: {sorted(lst)} ⇒ {n_clusters}')
return int(n_clusters)
#80219 update cluster_kmeans 80209 ⇒ cluster_words_kmeans: DataFrames, in and out
#80617 kmeans_model = KMeans(init='random', n_clusters=n_clusters, n_init=30) #fails?
#80725 POC 0.1-0.4 deleted, 0.5 restructured
#80802 cluster_words_kmeans ⇒ clustering.py for further dev,
#number_of_clusters copied here from clustering.py,
#this file left for POC.0.5 legacy FIXME:DEL (wait)
| 43.641667
| 96
| 0.638152
|
4a0abcb18de9ee16b7edff3bb436b2225c04f2b5
| 192
|
py
|
Python
|
button/base_btn.py
|
PitPietro/gpiozero-pyqt5
|
0384d34348841d193c025a1909d909d1bf772a7d
|
[
"MIT"
] | null | null | null |
button/base_btn.py
|
PitPietro/gpiozero-pyqt5
|
0384d34348841d193c025a1909d909d1bf772a7d
|
[
"MIT"
] | null | null | null |
button/base_btn.py
|
PitPietro/gpiozero-pyqt5
|
0384d34348841d193c025a1909d909d1bf772a7d
|
[
"MIT"
] | null | null | null |
from gpiozero import Button
button = Button(2)
while True:
if button.is_pressed:
print("Button is pressed - LOW")
else:
print("Button is not pressed - HIGH")
| 19.2
| 45
| 0.614583
|
4a0abce08077970a4b73fb0b39738f1c555aa2a4
| 676
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_MISC/misc-examples/python3-book-examples/time/time_get_clock_info.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/misc-examples/python3-book-examples/time/time_get_clock_info.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/misc-examples/python3-book-examples/time/time_get_clock_info.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
"""The difference between clock and time.
"""
# end_pymotw_header
import textwrap
import time
available_clocks = [
("monotonic", time.monotonic),
("perf_counter", time.perf_counter),
("process_time", time.process_time),
("time", time.time),
]
for clock_name, func in available_clocks:
print(
textwrap.dedent(
"""\
{name}:
adjustable : {info.adjustable}
implementation: {info.implementation}
monotonic : {info.monotonic}
resolution : {info.resolution}
current : {current}
"""
).format(name=clock_name, info=time.get_clock_info(clock_name), current=func())
)
| 24.142857
| 87
| 0.615385
|
4a0abdb3269da3835943a0f144f3b4cc4938b15c
| 75,134
|
py
|
Python
|
rawdata.py
|
icogg/psl
|
d53329fc4b72ceb55fff1f4c73f6d13666a2caf1
|
[
"MIT"
] | null | null | null |
rawdata.py
|
icogg/psl
|
d53329fc4b72ceb55fff1f4c73f6d13666a2caf1
|
[
"MIT"
] | null | null | null |
rawdata.py
|
icogg/psl
|
d53329fc4b72ceb55fff1f4c73f6d13666a2caf1
|
[
"MIT"
] | null | null | null |
blankpsl = """
A P P : C o u r i e r
T Y P E : S c h e m e L o g i c E d i t o r
F O R M A T : 1 . 0
V E R S I O N : 4 . 0 0
D O M A I N : 0 0 S e t t i n g s
S U B D O M A I N : 0 P S L S e t t i n g G r p 1
M O D E L : P 1 4 2 1 1 7 B 4 M 0 4 3 0 J
R E F E R E N C E :
D D B D E S C R I P T I O N F I L E :
F i l e c r e a t e d f r o m t e m p l a t e o n T h u r s d a y , D e c e m b e r 2 0 , 2 0 1 8 Â X @ +++++++++++++++++!!!!!!!!!!!!!!!!++++++++++++++++""""""""EEEEEEEEEEEEEEE+++++++++++++++++FFFFFFFFGGGGGGGGGGGGGGGG((((((((((((((((I$$$$$$$$$$$$$$$$$+$$$+$CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC+CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,,,,,,,,,,,,,,,,,+C$,$$,,$$CCCCCC$$$$$$$$$$$$CCC$$$$$$CCCC$$++++++++CCC$CC$$C+$CCC$$$$++++C+++++++++++++++++++++++CCCCCCCCCC$$$$$$$$CC++++++CCCCCC$C$$$C$$$$$CCC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$CCCC$$$$+++++++$$$$$$$$$$$$$$$$$++++++++++++++--------------------------------////////////////////////////////////////////////////////////////11111111RRRRRRRR+++++++++++JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ+PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++C$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$CC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++Output R1 Output R2 Output R3 Output R4 Output R5 Output R6 Output R7 Output R8 Output R9 Output R10 Output R11 Output R12 Output R13 Output R14 Output R15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Input L1 Input L2 Input L3 Input L4 Input L5 Input L6 Input L7 Input L8 Input L9 Input L10 Input L11 Input L12 Input L13 Input L14 Input L15 Input L16 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED 1 LED 2 LED 3 LED 4 LED 5 LED 6 LED 7 LED 8 Relay Cond 1 Relay Cond 2 Relay Cond 3 Relay Cond 4 Relay Cond 5 Relay Cond 6 Relay Cond 7 Relay Cond 8 Relay Cond 9 Relay Cond 10 Relay Cond 11 Relay Cond 12 Relay Cond 13 Relay Cond 14 Relay Cond 15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED Cond IN 1 LED Cond IN 2 LED Cond IN 3 LED Cond IN 4 LED Cond IN 5 LED Cond IN 6 LED Cond IN 7 LED Cond IN 8 Timer in 1 Timer in 2 Timer in 3 Timer in 4 Timer in 5 Timer in 6 Timer in 7 Timer in 8 Timer in 9 Timer in 10 Timer in 11 Timer in 12 Timer in 13 Timer in 14 Timer in 15 Timer in 16 Timer out 1 Timer out 2 Timer out 3 Timer out 4 Timer out 5 Timer out 6 Timer out 7 Timer out 8 Timer out 9 Timer out 10 Timer out 11 Timer out 12 Timer out 13 Timer out 14 Timer out 15 Timer out 16 Fault REC TRIG SG-opto Invalid Prot'n Disabled F out of Range VT Fail Alarm CT Fail Alarm CB Fail Alarm I^ Maint Alarm I^ Lockout AlarmCB Ops Maint CB Ops Lockout CB Op Time MaintCB Op Time Lock Fault Freq Lock CB Status Alarm Man CB Trip FailMan CB Cls Fail Man CB UnhealthyUnused AR Lockout AR CB Unhealthy AR No Sys Check Unused UV Block SR User Alarm 1 SR User Alarm 2 SR User Alarm 3 SR User Alarm 4 SR User Alarm 5 SR User Alarm 6 SR User Alarm 7 SR User Alarm 8 SR User Alarm 9 SR User Alarm 10SR User Alarm 11SR User Alarm 12SR User Alarm 13SR User Alarm 14SR User Alarm 15SR User Alarm 16SR User Alarm 17MR User Alarm 18MR User Alarm 19MR User Alarm 20MR User Alarm 21MR User Alarm 22MR User Alarm 23MR User Alarm 24MR User Alarm 25MR User Alarm 26MR User Alarm 27MR User Alarm 28MR User Alarm 29MR User Alarm 30MR User Alarm 31MR User Alarm 32MR User Alarm 33MR User Alarm 34MR User Alarm 35I>1 Timer Block I>2 Timer Block I>3 Timer Block I>4 Timer Block Unused IN1>1 Timer Blk IN1>2 Timer Blk IN1>3 Timer Blk IN1>4 Timer Blk IN2>1 Timer Blk IN2>2 Timer Blk IN2>3 Timer Blk IN2>4 Timer Blk ISEF>1 Timer BlkISEF>2 Timer BlkISEF>3 Timer BlkISEF>4 Timer BlkVN>1 Timer Blk VN>2 Timer Blk V<1 Timer Block V<2 Timer Block V>1 Timer Block V>2 Timer Block CLP Initiate Ext. Trip 3ph CB Aux 3ph(52-A)CB Aux 3ph(52-B)CB Healthy MCB/VTS Init Trip CB Init Close CB Reset Close Dly Reset Relays/LEDReset Thermal Reset Lockout Reset CB Data Block AR Live Line Mode Auto Mode Telecontrol ModeI>1 Trip I>1 Trip A I>1 Trip B I>1 Trip C I>2 Trip I>2 Trip A I>2 Trip B I>2 Trip C I>3 Trip I>3 Trip A I>3 Trip B I>3 Trip C I>4 Trip I>4 Trip A I>4 Trip B I>4 Trip C Unused Broken Line TripIN1>1 Trip IN1>2 Trip IN1>3 Trip IN1>4 Trip IN2>1 Trip IN2>2 Trip IN2>3 Trip IN2>4 Trip ISEF>1 Trip ISEF>2 Trip ISEF>3 Trip ISEF>4 Trip IREF> Trip VN>1 Trip VN>2 Trip Thermal Trip V2> Trip V<1 Trip V<1 Trip A/AB V<1 Trip B/BC V<1 Trip C/CA V<2 Trip V<2 Trip A/AB V<2 Trip B/BC V<2 Trip C/CA V>1 Trip V>1 Trip A/AB V>1 Trip B/BC V>1 Trip C/CA V>2 Trip V>2 Trip A/AB V>2 Trip B/BC V>2 Trip C/CA Any Start I>1 Start I>1 Start A I>1 Start B I>1 Start C I>2 Start I>2 Start A I>2 Start B I>2 Start C I>3 Start I>3 Start A I>3 Start B I>3 Start C I>4 Start I>4 Start A I>4 Start B I>4 Start C VCO Start AB VCO Start BC VCO Start CA Unused IN1>1 Start IN1>2 Start IN1>3 Start IN1>4 Start IN2>1 Start IN2>2 Start IN2>3 Start IN2>4 Start ISEF>1 Start ISEF>2 Start ISEF>3 Start ISEF>4 Start VN>1 Start VN>2 Start Thermal Alarm V2> Start V<1 Start V<1 Start A/AB V<1 Start B/BC V<1 Start C/CA V<2 Start V<2 Start A/AB V<2 Start B/BC V<2 Start C/CA V>1 Start V>1 Start A/AB V>1 Start B/BC V>1 Start C/CA V>2 Start V>2 Start A/AB V>2 Start B/BC V>2 Start C/CA CLP Operation I> BlockStart IN/SEF>Blk StartVTS Fast Block VTS Slow Block CTS Block Bfail1 Trip 3ph Bfail2 Trip 3ph Control Trip Control Close Close in Prog Block Main Prot Block SEF Prot AR In Progress AR In Service Seq Counter = 0 Seq Counter = 1 Seq Counter = 2 Seq Counter = 3 Seq Counter = 4 Successful CloseDead T in Prog Protection LocktReset Lckout AlmAuto Close AR Trip Test IA< Start IB< Start IC< Start IN< Start ISEF< Start CB Open 3 ph CB Closed 3 ph All Poles Dead Any Pole Dead Pole Dead A Pole Dead B Pole Dead C VTS Acc Ind VTS Volt Dep VTS IA> VTS IB> VTS IC> VTS VA> VTS VB> VTS VC> VTS I2> VTS V2> VTS IA delta> VTS IB delta> VTS IC delta> CBF SEF Trip CBF Non I Trip CBF SEF Trip-1 CBF Non I Trip-1Unused AR Sys Checks OKLockout Alarm Pre-Lockout Freq High Freq Low Stop Freq Track Start N Field volts failFreq Not Found F<1 Timer Block F<2 Timer Block F<3 Timer Block F<4 Timer Block F>1 Timer Block F>2 Timer Block F<1 Start F<2 Start F<3 Start F<4 Start F>1 Start F>2 Start F<1 Trip F<2 Trip F<3 Trip F<4 Trip F>1 Trip F>2 Trip YN> Timer Block GN> Timer Block BN> Timer Block YN> Start GN> Start BN> Start YN> Trip GN> Trip BN> Trip Ext AR Prot TripExt AR Prot StrtTest Mode Inhibit SEF Live Line Dead Line Unused Unused Unused Unused Unused Unused Unused Unused DAR Complete CB in Service AR Restart AR In Progress 1DeadTime EnabledDT OK To Start DT Complete Reclose Checks Circuits OK Unused AR SysChecks OK AR Init TripTest103 MonitorBlock103 CommandBlockISEF>1 Start 2 ISEF>2 Start 2 ISEF>3 Start 2 ISEF>4 Start 2 Unused Unused Unused Unused Time Synch Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused External Trip A External Trip B External Trip C External Trip EFExternal TripSEFI2> Inhibit I2>1 Tmr Blk I2>2 Tmr Blk I2>3 Tmr Blk I2>4 Tmr Blk I2>1 Start I2>2 Start I2>3 Start I2>4 Start I2>1 Trip I2>2 Trip I2>3 Trip I2>4 Trip V2> Accelerate Trip LED TriggerUnused Unused Unused Unused Unused Unused Blk Rmt. CB Ops SG Select x1 SG Select 1x IN1> Inhibit IN2> Inhibit AR Skip Shot 1 Logic 0 Ref. Inh Reclaim TimeReclaim In Prog Reclaim CompleteBrokenLine StartTrip Command In Trip Command OutIA2H Start IB2H Start IC2H Start I2H Any Start RP1 Read Only RP2 Read Only NIC Read Only Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Power>1 3PhStartPower>1 A Start Power>1 B Start Power>1 C Start Power>2 3PhStartPower>2 A Start Power>2 B Start Power>2 C Start Power<1 3PhStartPower<1 A Start Power<1 B Start Power<1 C Start Power<2 3PhStartPower<2 A Start Power<2 B Start Power<2 C Start Power>1 3Ph TripPower>1 A Trip Power>1 B Trip Power>1 C Trip Power>2 3Ph TripPower>2 A Trip Power>2 B Trip Power>2 C Trip Power<1 3Ph TripPower<1 A Trip Power<1 B Trip Power<1 C Trip Power<2 3Ph TripPower<2 A Trip Power<2 B Trip Power<2 C Trip Power>1 Block Power>2 Block Power<1 Block Power<2 Block SensP1 Start A SensP2 Start A SensP1 Trip A SensP2 Trip A Unused Unused Unused Unused Unused Unused Unused Battery Fail Rear Comm 2 FailGOOSE IED AbsentNIC Not Fitted NIC No Response NIC Fatal Error NIC Soft. ReloadBad TCP/IP Cfg. Bad OSI Config. NIC Link Fail NIC SW Mis-MatchIP Addr ConflictIM Loopback IM Msg Fail IM DCD Fail IM Chan Fail Backup Setting Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Control Input 1 Control Input 2 Control Input 3 Control Input 4 Control Input 5 Control Input 6 Control Input 7 Control Input 8 Control Input 9 Control Input 10Control Input 11Control Input 12Control Input 13Control Input 14Control Input 15Control Input 16Control Input 17Control Input 18Control Input 19Control Input 20Control Input 21Control Input 22Control Input 23Control Input 24Control Input 25Control Input 26Control Input 27Control Input 28Control Input 29Control Input 30Control Input 31Control Input 32Virtual Input 1 Virtual Input 2 Virtual Input 3 Virtual Input 4 Virtual Input 5 Virtual Input 6 Virtual Input 7 Virtual Input 8 Virtual Input 9 Virtual Input 10Virtual Input 11Virtual Input 12Virtual Input 13Virtual Input 14Virtual Input 15Virtual Input 16Virtual Input 17Virtual Input 18Virtual Input 19Virtual Input 20Virtual Input 21Virtual Input 22Virtual Input 23Virtual Input 24Virtual Input 25Virtual Input 26Virtual Input 27Virtual Input 28Virtual Input 29Virtual Input 30Virtual Input 31Virtual Input 32Virtual Input 33Virtual Input 34Virtual Input 35Virtual Input 36Virtual Input 37Virtual Input 38Virtual Input 39Virtual Input 40Virtual Input 41Virtual Input 42Virtual Input 43Virtual Input 44Virtual Input 45Virtual Input 46Virtual Input 47Virtual Input 48Virtual Input 49Virtual Input 50Virtual Input 51Virtual Input 52Virtual Input 53Virtual Input 54Virtual Input 55Virtual Input 56Virtual Input 57Virtual Input 58Virtual Input 59Virtual Input 60Virtual Input 61Virtual Input 62Virtual Input 63Virtual Input 64InterMiCOM in 1 InterMiCOM in 2 InterMiCOM in 3 InterMiCOM in 4 InterMiCOM in 5 InterMiCOM in 6 InterMiCOM in 7 InterMiCOM in 8 InterMiCOM out 1InterMiCOM out 2InterMiCOM out 3InterMiCOM out 4InterMiCOM out 5InterMiCOM out 6InterMiCOM out 7InterMiCOM out 8Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused PSL Internal 001PSL Internal 002PSL Internal 003PSL Internal 004PSL Internal 005PSL Internal 006PSL Internal 007PSL Internal 008PSL Internal 009PSL Internal 010PSL Internal 011PSL Internal 012PSL Internal 013PSL Internal 014PSL Internal 015PSL Internal 016PSL Internal 017PSL Internal 018PSL Internal 019PSL Internal 020PSL Internal 021PSL Internal 022PSL Internal 023PSL Internal 024PSL Internal 025PSL Internal 026PSL Internal 027PSL Internal 028PSL Internal 029PSL Internal 030PSL Internal 031PSL Internal 032PSL Internal 033PSL Internal 034PSL Internal 035PSL Internal 036PSL Internal 037PSL Internal 038PSL Internal 039PSL Internal 040PSL Internal 041PSL Internal 042PSL Internal 043PSL Internal 044PSL Internal 045PSL Internal 046PSL Internal 047PSL Internal 048PSL Internal 049PSL Internal 050PSL Internal 051PSL Internal 052PSL Internal 053PSL Internal 054PSL Internal 055PSL Internal 056PSL Internal 057PSL Internal 058PSL Internal 059PSL Internal 060PSL Internal 061PSL Internal 062PSL Internal 063PSL Internal 064PSL Internal 065PSL Internal 066PSL Internal 067PSL Internal 068PSL Internal 069PSL Internal 070PSL Internal 071PSL Internal 072PSL Internal 073PSL Internal 074PSL Internal 075PSL Internal 076PSL Internal 077PSL Internal 078PSL Internal 079PSL Internal 080PSL Internal 081PSL Internal 082PSL Internal 083PSL Internal 084PSL Internal 085PSL Internal 086PSL Internal 087PSL Internal 088PSL Internal 089PSL Internal 090PSL Internal 091PSL Internal 092PSL Internal 093PSL Internal 094PSL Internal 095PSL Internal 096PSL Internal 097PSL Internal 098PSL Internal 099PSL Internal 100Unused Virtual Output 1Virtual Output 2Virtual Output 3Virtual Output 4Virtual Output 5Virtual Output 6Virtual Output 7Virtual Output 8Virtual Output 9Virtual Output10Virtual Output11Virtual Output12Virtual Output13Virtual Output14Virtual Output15Virtual Output16Virtual Output17Virtual Output18Virtual Output19Virtual Output20Virtual Output21Virtual Output22Virtual Output23Virtual Output24Virtual Output25Virtual Output26Virtual Output27Virtual Output28Virtual Output29Virtual Output30Virtual Output31Virtual Output32Quality VIP 1 Quality VIP 2 Quality VIP 3 Quality VIP 4 Quality VIP 5 Quality VIP 6 Quality VIP 7 Quality VIP 8 Quality VIP 9 Quality VIP 10 Quality VIP 11 Quality VIP 12 Quality VIP 13 Quality VIP 14 Quality VIP 15 Quality VIP 16 Quality VIP 17 Quality VIP 18 Quality VIP 19 Quality VIP 20 Quality VIP 21 Quality VIP 22 Quality VIP 23 Quality VIP 24 Quality VIP 25 Quality VIP 26 Quality VIP 27 Quality VIP 28 Quality VIP 29 Quality VIP 30 Quality VIP 31 Quality VIP 32 Quality VIP 33 Quality VIP 34 Quality VIP 35 Quality VIP 36 Quality VIP 37 Quality VIP 38 Quality VIP 39 Quality VIP 40 Quality VIP 41 Quality VIP 42 Quality VIP 43 Quality VIP 44 Quality VIP 45 Quality VIP 46 Quality VIP 47 Quality VIP 48 Quality VIP 49 Quality VIP 50 Quality VIP 51 Quality VIP 52 Quality VIP 53 Quality VIP 54 Quality VIP 55 Quality VIP 56 Quality VIP 57 Quality VIP 58 Quality VIP 59 Quality VIP 60 Quality VIP 61 Quality VIP 62 Quality VIP 63 Quality VIP 64 PubPres VIP 1 PubPres VIP 2 PubPres VIP 3 PubPres VIP 4 PubPres VIP 5 PubPres VIP 6 PubPres VIP 7 PubPres VIP 8 PubPres VIP 9 PubPres VIP 10 PubPres VIP 11 PubPres VIP 12 PubPres VIP 13 PubPres VIP 14 PubPres VIP 15 PubPres VIP 16 PubPres VIP 17 PubPres VIP 18 PubPres VIP 19 PubPres VIP 20 PubPres VIP 21 PubPres VIP 22 PubPres VIP 23 PubPres VIP 24 PubPres VIP 25 PubPres VIP 26 PubPres VIP 27 PubPres VIP 28 PubPres VIP 29 PubPres VIP 30 PubPres VIP 31 PubPres VIP 32 PubPres VIP 33 PubPres VIP 34 PubPres VIP 35 PubPres VIP 36 PubPres VIP 37 PubPres VIP 38 PubPres VIP 39 PubPres VIP 40 PubPres VIP 41 PubPres VIP 42 PubPres VIP 43 PubPres VIP 44 PubPres VIP 45 PubPres VIP 46 PubPres VIP 47 PubPres VIP 48 PubPres VIP 49 PubPres VIP 50 PubPres VIP 51 PubPres VIP 52 PubPres VIP 53 PubPres VIP 54 PubPres VIP 55 PubPres VIP 56 PubPres VIP 57 PubPres VIP 58 PubPres VIP 59 PubPres VIP 60 PubPres VIP 61 PubPres VIP 62 PubPres VIP 63 PubPres VIP 64 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Adv Freq Inh Stg1 f+t Sta Stg1 f+t Trp Stg1 f+df/dt TrpStg1 df/dt+t StaStg1 df/dt+t TrpStg1 f+Df/Dt StaStg1 f+Df/Dt TrpStg1 Block Unused Unused Stg1 Restore ClsStg1 Restore StaUnused Unused Stg2 f+t Sta Stg2 f+t Trp Stg2 f+df/dt TrpStg2 df/dt+t StaStg2 df/dt+t TrpStg2 f+Df/Dt StaStg2 f+Df/Dt TrpStg2 Block Unused Unused Stg2 Restore ClsStg2 Restore StaUnused Unused Stg3 f+t Sta Stg3 f+t Trp Stg3 f+df/dt TrpStg3 df/dt+t StaStg3 df/dt+t TrpStg3 f+Df/Dt StaStg3 f+Df/Dt TrpStg3 Block Unused Unused Stg3 Restore ClsStg3 Restore StaUnused Unused Stg4 f+t Sta Stg4 f+t Trp Stg4 f+df/dt TrpStg4 df/dt+t StaStg4 df/dt+t TrpStg4 f+Df/Dt StaStg4 f+Df/Dt TrpStg4 Block Unused Unused Stg4 Restore ClsStg4 Restore StaUnused Unused Stg5 f+t Sta Stg5 f+t Trp Stg5 f+df/dt TrpStg5 df/dt+t StaStg5 df/dt+t TrpStg5 f+Df/Dt StaStg5 f+Df/Dt TrpStg5 Block Unused Unused Stg5 Restore ClsStg5 Restore StaUnused Unused Stg6 f+t Sta Stg6 f+t Trp Stg6 f+df/dt TrpStg6 df/dt+t StaStg6 df/dt+t TrpStg6 f+Df/Dt StaStg6 f+Df/Dt TrpStg6 Block Unused Unused Stg6 Restore ClsStg6 Restore StaUnused Unused Stg7 f+t Sta Stg7 f+t Trp Stg7 f+df/dt TrpStg7 df/dt+t StaStg7 df/dt+t TrpStg7 f+Df/Dt StaStg7 f+Df/Dt TrpStg7 Block Unused Unused Stg7 Restore ClsStg7 Restore StaUnused Unused Stg8 f+t Sta Stg8 f+t Trp Stg8 f+df/dt TrpStg8 df/dt+t StaStg8 df/dt+t TrpStg8 f+Df/Dt StaStg8 f+Df/Dt TrpStg8 Block Unused Unused Stg8 Restore ClsStg8 Restore StaUnused Unused Stg9 f+t Sta Stg9 f+t Trp Stg9 f+df/dt TrpStg9 df/dt+t StaStg9 df/dt+t TrpStg9 f+Df/Dt StaStg9 f+Df/Dt TrpStg9 Block Unused Unused Stg9 Restore ClsStg9 Restore StaRestore Reset Reset Stats Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused ÿÿ CODStringProperty T ÿþÿ € ^ ÿþÿ ÿÿ CODBoolProperty | ÿÿ CODEditProperties 2 € ( ÿÿ CODDWordProperty r ÿÿÿ € h € r ÿÿÿ ”
) w i n s p o o l \ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 I P _ 1 0 . 3 2 . 1 2 9 . 1 2 9 Ü ¸\ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 Ü ¸Sÿ€ š4d X X A 4 PRIVâ0 ''' ' (ü ¼ P4 (ˆ þr”
ÿ ÿ ( SMTJ X e r o x W o r k C e n t r e 7 6 6 5 r e v 2 P S InputSlot *UseFormTrayTable PageSize A4 PageRegion LeadingEdge Resolution 600x600dpi Duplex DuplexNoTumble Collate True StapleLocation None XrxInputSlot True Rotation True ¼ 9XRX MOCX ^ l „ 4 š x l ³ e wœ ‘ i j o p q r ’ “ ” ȶ ÉØ ñ ò ß à û f — ¡ ˜c ¢c ™ £ – Ño Òê
Û áo âê
ã ê´ ëö Ö8 ×z º¼ »þ Î Ð Í Ï ƒ † ‡
‰ Œ Š ‹ Ê z@ | } Ë X YZ / Z [ \ ] % ( & ' ! 3 0 0 1 3 1 2 ú MSCF à , 90 R T P9e //Uncompressed-Data// yòói TCKãbb``ìsdHaÈeÈdÈâb††"†D ™¤GÁH ŒŒŒm6ö¹9
e©EÅ™ùy¶J†zJ
©yÉù)™yé¶J¡!nºJöv¼\6Å™i
@ÅV9‰ ©Ô<ÝÐ`%Œ>XÊ ÿÿÿÿ TCOM5 > > > > > > b R t ÿÿÿ
"""
oneconnection = """
A P P : C o u r i e r
T Y P E : S c h e m e L o g i c E d i t o r
F O R M A T : 1 . 0
V E R S I O N : 4 . 0 0
D O M A I N : 0 0 S e t t i n g s
S U B D O M A I N : 0 P S L S e t t i n g G r p 1
M O D E L : P 1 4 2 1 1 7 B 4 M 0 4 3 0 J
R E F E R E N C E :
D D B D E S C R I P T I O N F I L E :
F i l e c r e a t e d f r o m t e m p l a t e o n T h u r s d a y , D e c e m b e r 2 0 , 2 0 1 8 Â X @ +++++++++++++++++!!!!!!!!!!!!!!!!++++++++++++++++""""""""EEEEEEEEEEEEEEE+++++++++++++++++FFFFFFFFGGGGGGGGGGGGGGGG((((((((((((((((I$$$$$$$$$$$$$$$$$+$$$+$CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC+CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$+$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$,,,,,,,,,,,,,,,,,+C$,$$,,$$CCCCCC$$$$$$$$$$$$CCC$$$$$$CCCC$$++++++++CCC$CC$$C+$CCC$$$$++++C+++++++++++++++++++++++CCCCCCCCCC$$$$$$$$CC++++++CCCCCC$C$$$C$$$$$CCC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$CCCC$$$$+++++++$$$$$$$$$$$$$$$$$++++++++++++++--------------------------------////////////////////////////////////////////////////////////////11111111RRRRRRRR+++++++++++JJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJJ+PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++C$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$++$$$$$$$C++$$CC+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++Output R1 Output R2 Output R3 Output R4 Output R5 Output R6 Output R7 Output R8 Output R9 Output R10 Output R11 Output R12 Output R13 Output R14 Output R15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Input L1 Input L2 Input L3 Input L4 Input L5 Input L6 Input L7 Input L8 Input L9 Input L10 Input L11 Input L12 Input L13 Input L14 Input L15 Input L16 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED 1 LED 2 LED 3 LED 4 LED 5 LED 6 LED 7 LED 8 Relay Cond 1 Relay Cond 2 Relay Cond 3 Relay Cond 4 Relay Cond 5 Relay Cond 6 Relay Cond 7 Relay Cond 8 Relay Cond 9 Relay Cond 10 Relay Cond 11 Relay Cond 12 Relay Cond 13 Relay Cond 14 Relay Cond 15 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused LED Cond IN 1 LED Cond IN 2 LED Cond IN 3 LED Cond IN 4 LED Cond IN 5 LED Cond IN 6 LED Cond IN 7 LED Cond IN 8 Timer in 1 Timer in 2 Timer in 3 Timer in 4 Timer in 5 Timer in 6 Timer in 7 Timer in 8 Timer in 9 Timer in 10 Timer in 11 Timer in 12 Timer in 13 Timer in 14 Timer in 15 Timer in 16 Timer out 1 Timer out 2 Timer out 3 Timer out 4 Timer out 5 Timer out 6 Timer out 7 Timer out 8 Timer out 9 Timer out 10 Timer out 11 Timer out 12 Timer out 13 Timer out 14 Timer out 15 Timer out 16 Fault REC TRIG SG-opto Invalid Prot'n Disabled F out of Range VT Fail Alarm CT Fail Alarm CB Fail Alarm I^ Maint Alarm I^ Lockout AlarmCB Ops Maint CB Ops Lockout CB Op Time MaintCB Op Time Lock Fault Freq Lock CB Status Alarm Man CB Trip FailMan CB Cls Fail Man CB UnhealthyUnused AR Lockout AR CB Unhealthy AR No Sys Check Unused UV Block SR User Alarm 1 SR User Alarm 2 SR User Alarm 3 SR User Alarm 4 SR User Alarm 5 SR User Alarm 6 SR User Alarm 7 SR User Alarm 8 SR User Alarm 9 SR User Alarm 10SR User Alarm 11SR User Alarm 12SR User Alarm 13SR User Alarm 14SR User Alarm 15SR User Alarm 16SR User Alarm 17MR User Alarm 18MR User Alarm 19MR User Alarm 20MR User Alarm 21MR User Alarm 22MR User Alarm 23MR User Alarm 24MR User Alarm 25MR User Alarm 26MR User Alarm 27MR User Alarm 28MR User Alarm 29MR User Alarm 30MR User Alarm 31MR User Alarm 32MR User Alarm 33MR User Alarm 34MR User Alarm 35I>1 Timer Block I>2 Timer Block I>3 Timer Block I>4 Timer Block Unused IN1>1 Timer Blk IN1>2 Timer Blk IN1>3 Timer Blk IN1>4 Timer Blk IN2>1 Timer Blk IN2>2 Timer Blk IN2>3 Timer Blk IN2>4 Timer Blk ISEF>1 Timer BlkISEF>2 Timer BlkISEF>3 Timer BlkISEF>4 Timer BlkVN>1 Timer Blk VN>2 Timer Blk V<1 Timer Block V<2 Timer Block V>1 Timer Block V>2 Timer Block CLP Initiate Ext. Trip 3ph CB Aux 3ph(52-A)CB Aux 3ph(52-B)CB Healthy MCB/VTS Init Trip CB Init Close CB Reset Close Dly Reset Relays/LEDReset Thermal Reset Lockout Reset CB Data Block AR Live Line Mode Auto Mode Telecontrol ModeI>1 Trip I>1 Trip A I>1 Trip B I>1 Trip C I>2 Trip I>2 Trip A I>2 Trip B I>2 Trip C I>3 Trip I>3 Trip A I>3 Trip B I>3 Trip C I>4 Trip I>4 Trip A I>4 Trip B I>4 Trip C Unused Broken Line TripIN1>1 Trip IN1>2 Trip IN1>3 Trip IN1>4 Trip IN2>1 Trip IN2>2 Trip IN2>3 Trip IN2>4 Trip ISEF>1 Trip ISEF>2 Trip ISEF>3 Trip ISEF>4 Trip IREF> Trip VN>1 Trip VN>2 Trip Thermal Trip V2> Trip V<1 Trip V<1 Trip A/AB V<1 Trip B/BC V<1 Trip C/CA V<2 Trip V<2 Trip A/AB V<2 Trip B/BC V<2 Trip C/CA V>1 Trip V>1 Trip A/AB V>1 Trip B/BC V>1 Trip C/CA V>2 Trip V>2 Trip A/AB V>2 Trip B/BC V>2 Trip C/CA Any Start I>1 Start I>1 Start A I>1 Start B I>1 Start C I>2 Start I>2 Start A I>2 Start B I>2 Start C I>3 Start I>3 Start A I>3 Start B I>3 Start C I>4 Start I>4 Start A I>4 Start B I>4 Start C VCO Start AB VCO Start BC VCO Start CA Unused IN1>1 Start IN1>2 Start IN1>3 Start IN1>4 Start IN2>1 Start IN2>2 Start IN2>3 Start IN2>4 Start ISEF>1 Start ISEF>2 Start ISEF>3 Start ISEF>4 Start VN>1 Start VN>2 Start Thermal Alarm V2> Start V<1 Start V<1 Start A/AB V<1 Start B/BC V<1 Start C/CA V<2 Start V<2 Start A/AB V<2 Start B/BC V<2 Start C/CA V>1 Start V>1 Start A/AB V>1 Start B/BC V>1 Start C/CA V>2 Start V>2 Start A/AB V>2 Start B/BC V>2 Start C/CA CLP Operation I> BlockStart IN/SEF>Blk StartVTS Fast Block VTS Slow Block CTS Block Bfail1 Trip 3ph Bfail2 Trip 3ph Control Trip Control Close Close in Prog Block Main Prot Block SEF Prot AR In Progress AR In Service Seq Counter = 0 Seq Counter = 1 Seq Counter = 2 Seq Counter = 3 Seq Counter = 4 Successful CloseDead T in Prog Protection LocktReset Lckout AlmAuto Close AR Trip Test IA< Start IB< Start IC< Start IN< Start ISEF< Start CB Open 3 ph CB Closed 3 ph All Poles Dead Any Pole Dead Pole Dead A Pole Dead B Pole Dead C VTS Acc Ind VTS Volt Dep VTS IA> VTS IB> VTS IC> VTS VA> VTS VB> VTS VC> VTS I2> VTS V2> VTS IA delta> VTS IB delta> VTS IC delta> CBF SEF Trip CBF Non I Trip CBF SEF Trip-1 CBF Non I Trip-1Unused AR Sys Checks OKLockout Alarm Pre-Lockout Freq High Freq Low Stop Freq Track Start N Field volts failFreq Not Found F<1 Timer Block F<2 Timer Block F<3 Timer Block F<4 Timer Block F>1 Timer Block F>2 Timer Block F<1 Start F<2 Start F<3 Start F<4 Start F>1 Start F>2 Start F<1 Trip F<2 Trip F<3 Trip F<4 Trip F>1 Trip F>2 Trip YN> Timer Block GN> Timer Block BN> Timer Block YN> Start GN> Start BN> Start YN> Trip GN> Trip BN> Trip Ext AR Prot TripExt AR Prot StrtTest Mode Inhibit SEF Live Line Dead Line Unused Unused Unused Unused Unused Unused Unused Unused DAR Complete CB in Service AR Restart AR In Progress 1DeadTime EnabledDT OK To Start DT Complete Reclose Checks Circuits OK Unused AR SysChecks OK AR Init TripTest103 MonitorBlock103 CommandBlockISEF>1 Start 2 ISEF>2 Start 2 ISEF>3 Start 2 ISEF>4 Start 2 Unused Unused Unused Unused Time Synch Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused External Trip A External Trip B External Trip C External Trip EFExternal TripSEFI2> Inhibit I2>1 Tmr Blk I2>2 Tmr Blk I2>3 Tmr Blk I2>4 Tmr Blk I2>1 Start I2>2 Start I2>3 Start I2>4 Start I2>1 Trip I2>2 Trip I2>3 Trip I2>4 Trip V2> Accelerate Trip LED TriggerUnused Unused Unused Unused Unused Unused Blk Rmt. CB Ops SG Select x1 SG Select 1x IN1> Inhibit IN2> Inhibit AR Skip Shot 1 Logic 0 Ref. Inh Reclaim TimeReclaim In Prog Reclaim CompleteBrokenLine StartTrip Command In Trip Command OutIA2H Start IB2H Start IC2H Start I2H Any Start RP1 Read Only RP2 Read Only NIC Read Only Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Power>1 3PhStartPower>1 A Start Power>1 B Start Power>1 C Start Power>2 3PhStartPower>2 A Start Power>2 B Start Power>2 C Start Power<1 3PhStartPower<1 A Start Power<1 B Start Power<1 C Start Power<2 3PhStartPower<2 A Start Power<2 B Start Power<2 C Start Power>1 3Ph TripPower>1 A Trip Power>1 B Trip Power>1 C Trip Power>2 3Ph TripPower>2 A Trip Power>2 B Trip Power>2 C Trip Power<1 3Ph TripPower<1 A Trip Power<1 B Trip Power<1 C Trip Power<2 3Ph TripPower<2 A Trip Power<2 B Trip Power<2 C Trip Power>1 Block Power>2 Block Power<1 Block Power<2 Block SensP1 Start A SensP2 Start A SensP1 Trip A SensP2 Trip A Unused Unused Unused Unused Unused Unused Unused Battery Fail Rear Comm 2 FailGOOSE IED AbsentNIC Not Fitted NIC No Response NIC Fatal Error NIC Soft. ReloadBad TCP/IP Cfg. Bad OSI Config. NIC Link Fail NIC SW Mis-MatchIP Addr ConflictIM Loopback IM Msg Fail IM DCD Fail IM Chan Fail Backup Setting Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Control Input 1 Control Input 2 Control Input 3 Control Input 4 Control Input 5 Control Input 6 Control Input 7 Control Input 8 Control Input 9 Control Input 10Control Input 11Control Input 12Control Input 13Control Input 14Control Input 15Control Input 16Control Input 17Control Input 18Control Input 19Control Input 20Control Input 21Control Input 22Control Input 23Control Input 24Control Input 25Control Input 26Control Input 27Control Input 28Control Input 29Control Input 30Control Input 31Control Input 32Virtual Input 1 Virtual Input 2 Virtual Input 3 Virtual Input 4 Virtual Input 5 Virtual Input 6 Virtual Input 7 Virtual Input 8 Virtual Input 9 Virtual Input 10Virtual Input 11Virtual Input 12Virtual Input 13Virtual Input 14Virtual Input 15Virtual Input 16Virtual Input 17Virtual Input 18Virtual Input 19Virtual Input 20Virtual Input 21Virtual Input 22Virtual Input 23Virtual Input 24Virtual Input 25Virtual Input 26Virtual Input 27Virtual Input 28Virtual Input 29Virtual Input 30Virtual Input 31Virtual Input 32Virtual Input 33Virtual Input 34Virtual Input 35Virtual Input 36Virtual Input 37Virtual Input 38Virtual Input 39Virtual Input 40Virtual Input 41Virtual Input 42Virtual Input 43Virtual Input 44Virtual Input 45Virtual Input 46Virtual Input 47Virtual Input 48Virtual Input 49Virtual Input 50Virtual Input 51Virtual Input 52Virtual Input 53Virtual Input 54Virtual Input 55Virtual Input 56Virtual Input 57Virtual Input 58Virtual Input 59Virtual Input 60Virtual Input 61Virtual Input 62Virtual Input 63Virtual Input 64InterMiCOM in 1 InterMiCOM in 2 InterMiCOM in 3 InterMiCOM in 4 InterMiCOM in 5 InterMiCOM in 6 InterMiCOM in 7 InterMiCOM in 8 InterMiCOM out 1InterMiCOM out 2InterMiCOM out 3InterMiCOM out 4InterMiCOM out 5InterMiCOM out 6InterMiCOM out 7InterMiCOM out 8Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused PSL Internal 001PSL Internal 002PSL Internal 003PSL Internal 004PSL Internal 005PSL Internal 006PSL Internal 007PSL Internal 008PSL Internal 009PSL Internal 010PSL Internal 011PSL Internal 012PSL Internal 013PSL Internal 014PSL Internal 015PSL Internal 016PSL Internal 017PSL Internal 018PSL Internal 019PSL Internal 020PSL Internal 021PSL Internal 022PSL Internal 023PSL Internal 024PSL Internal 025PSL Internal 026PSL Internal 027PSL Internal 028PSL Internal 029PSL Internal 030PSL Internal 031PSL Internal 032PSL Internal 033PSL Internal 034PSL Internal 035PSL Internal 036PSL Internal 037PSL Internal 038PSL Internal 039PSL Internal 040PSL Internal 041PSL Internal 042PSL Internal 043PSL Internal 044PSL Internal 045PSL Internal 046PSL Internal 047PSL Internal 048PSL Internal 049PSL Internal 050PSL Internal 051PSL Internal 052PSL Internal 053PSL Internal 054PSL Internal 055PSL Internal 056PSL Internal 057PSL Internal 058PSL Internal 059PSL Internal 060PSL Internal 061PSL Internal 062PSL Internal 063PSL Internal 064PSL Internal 065PSL Internal 066PSL Internal 067PSL Internal 068PSL Internal 069PSL Internal 070PSL Internal 071PSL Internal 072PSL Internal 073PSL Internal 074PSL Internal 075PSL Internal 076PSL Internal 077PSL Internal 078PSL Internal 079PSL Internal 080PSL Internal 081PSL Internal 082PSL Internal 083PSL Internal 084PSL Internal 085PSL Internal 086PSL Internal 087PSL Internal 088PSL Internal 089PSL Internal 090PSL Internal 091PSL Internal 092PSL Internal 093PSL Internal 094PSL Internal 095PSL Internal 096PSL Internal 097PSL Internal 098PSL Internal 099PSL Internal 100Unused Virtual Output 1Virtual Output 2Virtual Output 3Virtual Output 4Virtual Output 5Virtual Output 6Virtual Output 7Virtual Output 8Virtual Output 9Virtual Output10Virtual Output11Virtual Output12Virtual Output13Virtual Output14Virtual Output15Virtual Output16Virtual Output17Virtual Output18Virtual Output19Virtual Output20Virtual Output21Virtual Output22Virtual Output23Virtual Output24Virtual Output25Virtual Output26Virtual Output27Virtual Output28Virtual Output29Virtual Output30Virtual Output31Virtual Output32Quality VIP 1 Quality VIP 2 Quality VIP 3 Quality VIP 4 Quality VIP 5 Quality VIP 6 Quality VIP 7 Quality VIP 8 Quality VIP 9 Quality VIP 10 Quality VIP 11 Quality VIP 12 Quality VIP 13 Quality VIP 14 Quality VIP 15 Quality VIP 16 Quality VIP 17 Quality VIP 18 Quality VIP 19 Quality VIP 20 Quality VIP 21 Quality VIP 22 Quality VIP 23 Quality VIP 24 Quality VIP 25 Quality VIP 26 Quality VIP 27 Quality VIP 28 Quality VIP 29 Quality VIP 30 Quality VIP 31 Quality VIP 32 Quality VIP 33 Quality VIP 34 Quality VIP 35 Quality VIP 36 Quality VIP 37 Quality VIP 38 Quality VIP 39 Quality VIP 40 Quality VIP 41 Quality VIP 42 Quality VIP 43 Quality VIP 44 Quality VIP 45 Quality VIP 46 Quality VIP 47 Quality VIP 48 Quality VIP 49 Quality VIP 50 Quality VIP 51 Quality VIP 52 Quality VIP 53 Quality VIP 54 Quality VIP 55 Quality VIP 56 Quality VIP 57 Quality VIP 58 Quality VIP 59 Quality VIP 60 Quality VIP 61 Quality VIP 62 Quality VIP 63 Quality VIP 64 PubPres VIP 1 PubPres VIP 2 PubPres VIP 3 PubPres VIP 4 PubPres VIP 5 PubPres VIP 6 PubPres VIP 7 PubPres VIP 8 PubPres VIP 9 PubPres VIP 10 PubPres VIP 11 PubPres VIP 12 PubPres VIP 13 PubPres VIP 14 PubPres VIP 15 PubPres VIP 16 PubPres VIP 17 PubPres VIP 18 PubPres VIP 19 PubPres VIP 20 PubPres VIP 21 PubPres VIP 22 PubPres VIP 23 PubPres VIP 24 PubPres VIP 25 PubPres VIP 26 PubPres VIP 27 PubPres VIP 28 PubPres VIP 29 PubPres VIP 30 PubPres VIP 31 PubPres VIP 32 PubPres VIP 33 PubPres VIP 34 PubPres VIP 35 PubPres VIP 36 PubPres VIP 37 PubPres VIP 38 PubPres VIP 39 PubPres VIP 40 PubPres VIP 41 PubPres VIP 42 PubPres VIP 43 PubPres VIP 44 PubPres VIP 45 PubPres VIP 46 PubPres VIP 47 PubPres VIP 48 PubPres VIP 49 PubPres VIP 50 PubPres VIP 51 PubPres VIP 52 PubPres VIP 53 PubPres VIP 54 PubPres VIP 55 PubPres VIP 56 PubPres VIP 57 PubPres VIP 58 PubPres VIP 59 PubPres VIP 60 PubPres VIP 61 PubPres VIP 62 PubPres VIP 63 PubPres VIP 64 Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Adv Freq Inh Stg1 f+t Sta Stg1 f+t Trp Stg1 f+df/dt TrpStg1 df/dt+t StaStg1 df/dt+t TrpStg1 f+Df/Dt StaStg1 f+Df/Dt TrpStg1 Block Unused Unused Stg1 Restore ClsStg1 Restore StaUnused Unused Stg2 f+t Sta Stg2 f+t Trp Stg2 f+df/dt TrpStg2 df/dt+t StaStg2 df/dt+t TrpStg2 f+Df/Dt StaStg2 f+Df/Dt TrpStg2 Block Unused Unused Stg2 Restore ClsStg2 Restore StaUnused Unused Stg3 f+t Sta Stg3 f+t Trp Stg3 f+df/dt TrpStg3 df/dt+t StaStg3 df/dt+t TrpStg3 f+Df/Dt StaStg3 f+Df/Dt TrpStg3 Block Unused Unused Stg3 Restore ClsStg3 Restore StaUnused Unused Stg4 f+t Sta Stg4 f+t Trp Stg4 f+df/dt TrpStg4 df/dt+t StaStg4 df/dt+t TrpStg4 f+Df/Dt StaStg4 f+Df/Dt TrpStg4 Block Unused Unused Stg4 Restore ClsStg4 Restore StaUnused Unused Stg5 f+t Sta Stg5 f+t Trp Stg5 f+df/dt TrpStg5 df/dt+t StaStg5 df/dt+t TrpStg5 f+Df/Dt StaStg5 f+Df/Dt TrpStg5 Block Unused Unused Stg5 Restore ClsStg5 Restore StaUnused Unused Stg6 f+t Sta Stg6 f+t Trp Stg6 f+df/dt TrpStg6 df/dt+t StaStg6 df/dt+t TrpStg6 f+Df/Dt StaStg6 f+Df/Dt TrpStg6 Block Unused Unused Stg6 Restore ClsStg6 Restore StaUnused Unused Stg7 f+t Sta Stg7 f+t Trp Stg7 f+df/dt TrpStg7 df/dt+t StaStg7 df/dt+t TrpStg7 f+Df/Dt StaStg7 f+Df/Dt TrpStg7 Block Unused Unused Stg7 Restore ClsStg7 Restore StaUnused Unused Stg8 f+t Sta Stg8 f+t Trp Stg8 f+df/dt TrpStg8 df/dt+t StaStg8 df/dt+t TrpStg8 f+Df/Dt StaStg8 f+Df/Dt TrpStg8 Block Unused Unused Stg8 Restore ClsStg8 Restore StaUnused Unused Stg9 f+t Sta Stg9 f+t Trp Stg9 f+df/dt TrpStg9 df/dt+t StaStg9 df/dt+t TrpStg9 f+Df/Dt StaStg9 f+Df/Dt TrpStg9 Block Unused Unused Stg9 Restore ClsStg9 Restore StaRestore Reset Reset Stats Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused Unused ÿÿ CODStringProperty T ÿþÿ € ^ ÿþÿ ÿÿ CODBoolProperty | ÿÿ CODEditProperties 2 € ( ÿÿ CODDWordProperty r ÿÿÿ € h € r ÿÿÿ ÿÿ CODSymbolComponentÿÿÿÿ ÿÿ
CODCirclePort ˜A‰™ ! Ÿ ' ÿÿ CODTransform €? €? C @Á € T ÿþÿP o r t € ^ ÿþÿ
O u t p u t P o r t € | ÿÿ CODFillProperties ÿÿÿ ÿÿÿ € ( € ÑƒÌ ! ' € €? €? *à @Á € T ÿþÿP o r t € ^ ÿþÿP o r t € ÿÿÿ ÿÿÿ € I R ! X ' € T ÿþÿP o r t € ^ ÿþÿP o r t € | € P € ( ÿÿ
CODConnection€ œ $ ™ ! Ÿ ' € P € ÿÿÿ ÿÿÿ € ^ ÿþÿP o r t € T ÿþÿP o r t € ( 8 Ÿ 0 € €? €? @A ÀA ÿÿ CPSLSignalProperties õ + ; ÿþÿI n p u t L 1 2 € T ÿþÿO p t o S i g n a l € ^ ÿþÿS i g n a l 1 € | € ( ÿÿ CODRectComponent ¾ { - € T ÿþÿ R e c t a n g l e € ^ ÿþÿS i g n a l C o l o u r R e c t 1 € ÿÿ ÿÿÿ ÿÿ CODLineProperties
+ ÿÿ CODPolygonComponent $ v $ ‚ - v 6 6 ‹6L. ˜ - € €? €? A  € T ÿþÿP o l y g o n € ^ ÿþÿS i g n a l B o d y P o l y g o n 1 € ÿÿÿ ÿÿÿ 9 + ÿÿ CODLineComponent »s„˜ $ œ $ € €? €? C @Á € T ÿþÿL i n e € ^ ÿþÿP o r t L i n e 1 9 + ÿÿ CODTextComponent ÿþÿD D B # 0 4 3 îÿÿÿfÿÿÿ* fÿÿÿ* îÿÿÿ Iü. ’ 0 €ÐÕ? VI=}~BÀe^A € T ÿþÿT e x t € ^ ÿþÿ
D D B N u m b e r T e x t 1 8€
€ ÿÿÿ ÿÿÿ ÿÿ CODFontProperties ÿþÿA r i a l
€ æ € ð ÿÿ CODIntProperty Ò P€ Ü + E€ ÿþÿ I n p u t L 1 2 òÿÿÿþÿÿ- þÿÿ- ¢ òÿÿÿ¢ àTË . “ ( €"ëÚ? Þ<(lfBqå@ H € ^ ÿþÿS i g n a l N a m e T e x t 1 J K M N O Q R + ÿÿ CODImageComponent ÿþÿ+U : \ 5 0 3 2 0 \ D E V \ s c h e m e o v \ r e s \ s i g n a l - o p t o . b m p BMv v ( € € €€ € € € €€ ÀÀÀ €€€ ÿ ÿ ÿÿ ÿ ÿ ÿ ÿÿ ÿÿÿ ÿÿÿÿÿÿÿðÿÿÿÿÿÿÿðÿÿÿÿÿÿÿðÿÿÿÿÿ ð ÿÿÿÿÿ ðÿÿÿÿÿ ÿÿÿÿ ÿÿÿÿÿð ÿÿÿÿð ÿ ÿÿððÿ ðÿð ÿÿÿ ÿ ÿð ÿÿ ÿðÿ ÿÿÿÿÿð ÿÿÿÿÿÿðÿÿÿÿÿÿÿðÿÿÿÿÿÿÿð ú7 - , € €? €? ˆA €@ € T ÿþÿI m a g e € ^ ÿþÿI m a g e 1 + @€ " ¨Z .
C € ^ ÿþÿL i n e 1 9 P€ M 2 P€ L 2 P€ K 2 P€ J 2 € + @€ ! Æì - . - € €? €? €?
C \ 1 9 ] ^ _ ` a +
€ÿÿÿÿ € ºj+‘ ! — ' € €? €? èÁ PÁ € T ÿþÿP o r t € ^ ÿþÿ
O u t p u t P o r t € ÿÿÿ ÿÿÿ € ( € ‹â !
' € €? €? Á PÁ € T ÿþÿP o r t € ^ ÿþÿP o r t € ÿÿÿ ÿÿÿ j € I J ! P ' € T ÿþÿP o r t € ^ ÿþÿP o r t € | € P o € ( $€€ $ !
' ' ( ) * + k ÒQ$ — 0 € €? €? D ÀA -€ õ 6 ÿþÿT r i p C o m m a n d I n € T ÿþÿ
O u t p u t S i g n a l € ^ ÿþÿO u t p u t S i g n a l 2 1 2 3€ ƒ ƒ D¿Õ‡ ” - 5 6 1 9 € ÿ ÿÿÿ + :€ m
m N- u - € €? €? €@ €¿ = > 1 9 ? + E€ ÿþÿD D B # 5 3 6 îÿÿÿ€ÿÿÿ* €ÿÿÿ* ! îÿÿÿ! Á×½ u 0 €ÐÕ? VI=}*BÀeNA H I 1 J K M N O Q R + @€ © ÃŽ $ $ € €? €? &à PÁ C € ^ ÿþÿO u t p u t P o r t L i n e 1 9 + E€ ÿþÿT r i p C o m m a n d I n òÿÿÿÜþÿÿ- Üþÿÿ- Ñ òÿÿÿÑ úÚ u ( €"ëÚ? Þ<(lBqÅ@ H U 1 J K M N O Q R + V€ ÿþÿ-U : \ 5 0 3 2 0 \ D E V \ s c h e m e o v \ r e s \ s i g n a l - o u t p u t . b m p BMv v ( € € €€ € € € €€ ÀÀÀ €€€ ÿ ÿ ÿÿ ÿ ÿ ÿ ÿÿ ÿÿÿ ÿÿÿÿÿÿÿÿÿÿÿÿð ÿÿÿÿ ÿÿÿð ÿÿ ÿÿÿ ÿÿ ÿÿð ÿÿÿ ÿÿ ÿÿÿ ÿÿÿÿ ÿÿÿÿ ÿÿ ÿÿÿ ÿÿð ÿÿÿ ÿÿÿ ÿÿ ÿÿÿð ÿÿ ÿÿÿÿ ÿÿÿÿð ÿÿÿÿÿÿÿÿ ¨V16v † , € €? €? äB @@ Y Z 1 + @€ ƒ q Ãùu ‡
C \ 1 9 ] ^ _ ` a + @€ ƒ q íu - ‡ - € €? €? €?
C \ 1 9 ] ^ _ ` a + ÿÿ CPSLLinkComponent ÿÿÿÿ & w € Q $ N ! T ' ' 1 € ^ ÿþÿ € T ÿþÿ € ( % v dáW0œ $ $ € d € F P€ ¤ 1 € ^ ÿþÿL i n k € T ÿþÿL i n k ‘ ÿÿ CODLineLinkShape œ $ Q $ Q $ $ ƽ~!œ $ $
9 ] ^ _ ` a 1 \ C + ”
) w i n s p o o l \ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 I P _ 1 0 . 3 2 . 1 2 9 . 1 2 9 Ü ¸\ \ g b s r d 0 1 p s 0 4 \ g b s r d 0 1 p 7 2 2 6 4 Ü ¸Sÿ€ š4d X X A 4 PRIVâ0 ''' ' (ü ¼ P4 (ˆ þr”
ÿ ÿ ( SMTJ X e r o x W o r k C e n t r e 7 6 6 5 r e v 2 P S InputSlot *UseFormTrayTable PageSize A4 PageRegion LeadingEdge Resolution 600x600dpi Duplex DuplexNoTumble Collate True StapleLocation None XrxInputSlot True Rotation True ¼ 9XRX MOCX ^ l „ 4 š x l ³ e wœ ‘ i j o p q r ’ “ ” ȶ ÉØ ñ ò ß à û f — ¡ ˜c ¢c ™ £ – Ño Òê
Û áo âê
ã ê´ ëö Ö8 ×z º¼ »þ Î Ð Í Ï ƒ † ‡
‰ Œ Š ‹ Ê z@ | } Ë X YZ / Z [ \ ] % ( & ' ! 3 0 0 1 3 1 2 ú MSCF à , 90 R T P9e //Uncompressed-Data// yòói TCKãbb``ìsdHaÈeÈdÈâb††"†D ™¤GÁH ŒŒŒm6ö¹9
e©EÅ™ùy¶J†zJ
©yÉù)™yé¶J¡!nºJöv¼\6Å™i
@ÅV9‰ ©Ô<ÝÐ`%Œ>XÊ ÿÿÿÿ TCOM5 > > > > > > b R t ÿÿÿ
"""
| 853.795455
| 26,608
| 0.431789
|
4a0abf9b2303e770e78f0ea7211973988c7c3267
| 111,605
|
py
|
Python
|
mne/source_space.py
|
jdammers/mne-python
|
1dc1502215a53385cda15c6c336fcc4341dc4d3b
|
[
"BSD-3-Clause"
] | null | null | null |
mne/source_space.py
|
jdammers/mne-python
|
1dc1502215a53385cda15c6c336fcc4341dc4d3b
|
[
"BSD-3-Clause"
] | null | null | null |
mne/source_space.py
|
jdammers/mne-python
|
1dc1502215a53385cda15c6c336fcc4341dc4d3b
|
[
"BSD-3-Clause"
] | null | null | null |
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Matti Hamalainen <msh@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
from copy import deepcopy
from functools import partial
from gzip import GzipFile
import os
import os.path as op
import numpy as np
from scipy import sparse, linalg
from .io.constants import FIFF
from .io.meas_info import create_info
from .io.tree import dir_tree_find
from .io.tag import find_tag, read_tag
from .io.open import fiff_open
from .io.write import (start_block, end_block, write_int,
write_float_sparse_rcs, write_string,
write_float_matrix, write_int_matrix,
write_coord_trans, start_file, end_file, write_id)
from .bem import read_bem_surfaces, ConductorModel
from .surface import (read_surface, _create_surf_spacing, _get_ico_surface,
_tessellate_sphere_surf, _get_surf_neighbors,
_normalize_vectors, _get_solids, _triangle_neighbors,
complete_surface_info, _compute_nearest, fast_cross_3d,
mesh_dist)
from .utils import (get_subjects_dir, run_subprocess, has_freesurfer,
has_nibabel, check_fname, logger, verbose,
check_version, _get_call_line, warn, _check_fname)
from .parallel import parallel_func, check_n_jobs
from .transforms import (invert_transform, apply_trans, _print_coord_trans,
combine_transforms, _get_trans,
_coord_frame_name, Transform, _str_to_frame,
_ensure_trans)
from .externals.six import string_types
def _get_lut():
"""Get the FreeSurfer LUT."""
data_dir = op.join(op.dirname(__file__), 'data')
lut_fname = op.join(data_dir, 'FreeSurferColorLUT.txt')
dtype = [('id', '<i8'), ('name', 'U47'),
('R', '<i8'), ('G', '<i8'), ('B', '<i8'), ('A', '<i8')]
return np.genfromtxt(lut_fname, dtype=dtype)
def _get_lut_id(lut, label, use_lut):
"""Convert a label to a LUT ID number."""
if not use_lut:
return 1
assert isinstance(label, string_types)
mask = (lut['name'] == label)
assert mask.sum() == 1
return lut['id'][mask]
_src_kind_dict = {
'vol': 'volume',
'surf': 'surface',
'discrete': 'discrete',
}
class SourceSpaces(list):
"""Represent a list of source space.
Currently implemented as a list of dictionaries containing the source
space information
Parameters
----------
source_spaces : list
A list of dictionaries containing the source space information.
info : dict
Dictionary with information about the creation of the source space
file. Has keys 'working_dir' and 'command_line'.
Attributes
----------
info : dict
Dictionary with information about the creation of the source space
file. Has keys 'working_dir' and 'command_line'.
"""
def __init__(self, source_spaces, info=None): # noqa: D102
super(SourceSpaces, self).__init__(source_spaces)
if info is None:
self.info = dict()
else:
self.info = dict(info)
@verbose
def plot(self, head=False, brain=None, skull=None, subjects_dir=None,
trans=None, verbose=None):
"""Plot the source space.
Parameters
----------
head : bool
If True, show head surface.
brain : bool | str
If True, show the brain surfaces. Can also be a str for
surface type (e.g., 'pial', same as True). Default is None,
which means 'white' for surface source spaces and False otherwise.
skull : bool | str | list of str | list of dict | None
Whether to plot skull surface. If string, common choices would be
'inner_skull', or 'outer_skull'. Can also be a list to plot
multiple skull surfaces. If a list of dicts, each dict must
contain the complete surface info (such as you get from
:func:`mne.make_bem_model`). True is an alias of 'outer_skull'.
The subjects bem and bem/flash folders are searched for the 'surf'
files. Defaults to None, which is False for surface source spaces,
and True otherwise.
subjects_dir : string, or None
Path to SUBJECTS_DIR if it is not set in the environment.
trans : str | 'auto' | dict | None
The full path to the head<->MRI transform ``*-trans.fif`` file
produced during coregistration. If trans is None, an identity
matrix is assumed. This is only needed when the source space is in
head coordinates.
verbose : bool, str, int, or None
If not None, override default verbose level (see
:func:`mne.verbose` and :ref:`Logging documentation <tut_logging>`
for more).
Returns
-------
fig : instance of mlab Figure
The figure.
"""
from .viz import plot_alignment
surfaces = list()
bem = None
if brain is None:
brain = 'white' if any(ss['type'] == 'surf'
for ss in self) else False
if isinstance(brain, string_types):
surfaces.append(brain)
elif brain:
surfaces.append('brain')
if skull is None:
skull = False if self.kind == 'surface' else True
if isinstance(skull, string_types):
surfaces.append(skull)
elif skull is True:
surfaces.append('outer_skull')
elif skull is not False: # list
if isinstance(skull[0], dict): # bem
skull_map = {FIFF.FIFFV_BEM_SURF_ID_BRAIN: 'inner_skull',
FIFF.FIFFV_BEM_SURF_ID_SKULL: 'outer_skull',
FIFF.FIFFV_BEM_SURF_ID_HEAD: 'outer_skin'}
for this_skull in skull:
surfaces.append(skull_map[this_skull['id']])
bem = skull
else: # list of str
for surf in skull:
surfaces.append(surf)
if head:
surfaces.append('head')
if self[0]['coord_frame'] == FIFF.FIFFV_COORD_HEAD:
coord_frame = 'head'
if trans is None:
raise ValueError('Source space is in head coordinates, but no '
'head<->MRI transform was given. Please '
'specify the full path to the appropriate '
'*-trans.fif file as the "trans" parameter.')
else:
coord_frame = 'mri'
info = create_info(0, 1000., 'eeg')
return plot_alignment(
info, trans=trans, subject=self[0]['subject_his_id'],
subjects_dir=subjects_dir, surfaces=surfaces,
coord_frame=coord_frame, meg=(), eeg=False, dig=False, ecog=False,
bem=bem, src=self
)
def __repr__(self): # noqa: D105
ss_repr = []
for ss in self:
ss_type = ss['type']
r = _src_kind_dict[ss_type]
if ss_type == 'vol':
if 'seg_name' in ss:
r += " (%s)" % (ss['seg_name'],)
else:
r += ", shape=%s" % (ss['shape'],)
elif ss_type == 'surf':
r += (" (%s), n_vertices=%i" % (_get_hemi(ss)[0], ss['np']))
r += (', n_used=%i, coordinate_frame=%s'
% (ss['nuse'], _coord_frame_name(int(ss['coord_frame']))))
ss_repr.append('<%s>' % r)
return "<SourceSpaces: [%s]>" % ', '.join(ss_repr)
@property
def kind(self):
"""The kind of source space (surface, volume, discrete)."""
ss_types = list(set([ss['type'] for ss in self]))
if len(ss_types) != 1:
return 'combined'
return _src_kind_dict[ss_types[0]]
def __add__(self, other):
"""Combine source spaces."""
return SourceSpaces(list.__add__(self, other))
def copy(self):
"""Make a copy of the source spaces.
Returns
-------
src : instance of SourceSpaces
The copied source spaces.
"""
src = deepcopy(self)
return src
def save(self, fname, overwrite=False):
"""Save the source spaces to a fif file.
Parameters
----------
fname : str
File to write.
overwrite : bool
If True, the destination file (if it exists) will be overwritten.
If False (default), an error will be raised if the file exists.
"""
write_source_spaces(fname, self, overwrite)
@verbose
def export_volume(self, fname, include_surfaces=True,
include_discrete=True, dest='mri', trans=None,
mri_resolution=False, use_lut=True, verbose=None):
"""Export source spaces to nifti or mgz file.
Parameters
----------
fname : str
Name of nifti or mgz file to write.
include_surfaces : bool
If True, include surface source spaces.
include_discrete : bool
If True, include discrete source spaces.
dest : 'mri' | 'surf'
If 'mri' the volume is defined in the coordinate system of the
original T1 image. If 'surf' the coordinate system of the
FreeSurfer surface is used (Surface RAS).
trans : dict, str, or None
Either a transformation filename (usually made using mne_analyze)
or an info dict (usually opened using read_trans()).
If string, an ending of `.fif` or `.fif.gz` will be assumed to be
in FIF format, any other ending will be assumed to be a text file
with a 4x4 transformation matrix (like the `--trans` MNE-C option.
Must be provided if source spaces are in head coordinates and
include_surfaces and mri_resolution are True.
mri_resolution : bool
If True, the image is saved in MRI resolution
(e.g. 256 x 256 x 256).
use_lut : bool
If True, assigns a numeric value to each source space that
corresponds to a color on the freesurfer lookup table.
verbose : bool, str, int, or None
If not None, override default verbose level (see
:func:`mne.verbose` and :ref:`Logging documentation <tut_logging>`
for more).
Notes
-----
This method requires nibabel.
"""
# import nibabel or raise error
try:
import nibabel as nib
except ImportError:
raise ImportError('This function requires nibabel.')
# Check coordinate frames of each source space
coord_frames = np.array([s['coord_frame'] for s in self])
# Raise error if trans is not provided when head coordinates are used
# and mri_resolution and include_surfaces are true
if (coord_frames == FIFF.FIFFV_COORD_HEAD).all():
coords = 'head' # all sources in head coordinates
if mri_resolution and include_surfaces:
if trans is None:
raise ValueError('trans containing mri to head transform '
'must be provided if mri_resolution and '
'include_surfaces are true and surfaces '
'are in head coordinates')
elif trans is not None:
logger.info('trans is not needed and will not be used unless '
'include_surfaces and mri_resolution are True.')
elif (coord_frames == FIFF.FIFFV_COORD_MRI).all():
coords = 'mri' # all sources in mri coordinates
if trans is not None:
logger.info('trans is not needed and will not be used unless '
'sources are in head coordinates.')
# Raise error if all sources are not in the same space, or sources are
# not in mri or head coordinates
else:
raise ValueError('All sources must be in head coordinates or all '
'sources must be in mri coordinates.')
# use lookup table to assign values to source spaces
logger.info('Reading FreeSurfer lookup table')
# read the lookup table
lut = _get_lut()
# Setup a dictionary of source types
src_types = dict(volume=[], surface=[], discrete=[])
# Populate dictionary of source types
for src in self:
# volume sources
if src['type'] == 'vol':
src_types['volume'].append(src)
# surface sources
elif src['type'] == 'surf':
src_types['surface'].append(src)
# discrete sources
elif src['type'] == 'discrete':
src_types['discrete'].append(src)
# raise an error if dealing with source type other than volume
# surface or discrete
else:
raise ValueError('Unrecognized source type: %s.' % src['type'])
# Get shape, inuse array and interpolation matrix from volume sources
inuse = 0
for ii, vs in enumerate(src_types['volume']):
# read the lookup table value for segmented volume
if 'seg_name' not in vs:
raise ValueError('Volume sources should be segments, '
'not the entire volume.')
# find the color value for this volume
id_ = _get_lut_id(lut, vs['seg_name'], use_lut)
if ii == 0:
# get the inuse array
if mri_resolution:
# read the mri file used to generate volumes
aseg_data = nib.load(vs['mri_file']).get_data()
# get the voxel space shape
shape3d = (vs['mri_height'], vs['mri_depth'],
vs['mri_width'])
else:
# get the volume source space shape
# read the shape in reverse order
# (otherwise results are scrambled)
shape3d = vs['shape'][2::-1]
if mri_resolution:
# get the values for this volume
use = id_ * (aseg_data == id_).astype(int).ravel('F')
else:
use = id_ * vs['inuse']
inuse += use
# Raise error if there are no volume source spaces
if np.array(inuse).ndim == 0:
raise ValueError('Source spaces must contain at least one volume.')
# create 3d grid in the MRI_VOXEL coordinate frame
# len of inuse array should match shape regardless of mri_resolution
assert len(inuse) == np.prod(shape3d)
# setup the image in 3d space
img = inuse.reshape(shape3d).T
# include surface and/or discrete source spaces
if include_surfaces or include_discrete:
# setup affine transform for source spaces
if mri_resolution:
# get the MRI to MRI_VOXEL transform
affine = invert_transform(vs['vox_mri_t'])
else:
# get the MRI to SOURCE (MRI_VOXEL) transform
affine = invert_transform(vs['src_mri_t'])
# modify affine if in head coordinates
if coords == 'head':
# read mri -> head transformation
mri_head_t = _get_trans(trans)[0]
# get the HEAD to MRI transform
head_mri_t = invert_transform(mri_head_t)
# combine transforms, from HEAD to MRI_VOXEL
affine = combine_transforms(head_mri_t, affine,
'head', 'mri_voxel')
# loop through the surface source spaces
if include_surfaces:
# get the surface names (assumes left, right order. may want
# to add these names during source space generation
surf_names = ['Left-Cerebral-Cortex', 'Right-Cerebral-Cortex']
for i, surf in enumerate(src_types['surface']):
# convert vertex positions from their native space
# (either HEAD or MRI) to MRI_VOXEL space
srf_rr = apply_trans(affine['trans'], surf['rr'])
# convert to numeric indices
ix_orig, iy_orig, iz_orig = srf_rr.T.round().astype(int)
# clip indices outside of volume space
ix_clip = np.maximum(np.minimum(ix_orig, shape3d[2] - 1),
0)
iy_clip = np.maximum(np.minimum(iy_orig, shape3d[1] - 1),
0)
iz_clip = np.maximum(np.minimum(iz_orig, shape3d[0] - 1),
0)
# compare original and clipped indices
n_diff = np.array((ix_orig != ix_clip, iy_orig != iy_clip,
iz_orig != iz_clip)).any(0).sum()
# generate use warnings for clipping
if n_diff > 0:
warn('%s surface vertices lay outside of volume space.'
' Consider using a larger volume space.' % n_diff)
# get surface id or use default value
i = _get_lut_id(lut, surf_names[i], use_lut)
# update image to include surface voxels
img[ix_clip, iy_clip, iz_clip] = i
# loop through discrete source spaces
if include_discrete:
for i, disc in enumerate(src_types['discrete']):
# convert vertex positions from their native space
# (either HEAD or MRI) to MRI_VOXEL space
disc_rr = apply_trans(affine['trans'], disc['rr'])
# convert to numeric indices
ix_orig, iy_orig, iz_orig = disc_rr.T.astype(int)
# clip indices outside of volume space
ix_clip = np.maximum(np.minimum(ix_orig, shape3d[2] - 1),
0)
iy_clip = np.maximum(np.minimum(iy_orig, shape3d[1] - 1),
0)
iz_clip = np.maximum(np.minimum(iz_orig, shape3d[0] - 1),
0)
# compare original and clipped indices
n_diff = np.array((ix_orig != ix_clip, iy_orig != iy_clip,
iz_orig != iz_clip)).any(0).sum()
# generate use warnings for clipping
if n_diff > 0:
warn('%s discrete vertices lay outside of volume '
'space. Consider using a larger volume space.'
% n_diff)
# set default value
img[ix_clip, iy_clip, iz_clip] = 1
if use_lut:
logger.info('Discrete sources do not have values on '
'the lookup table. Defaulting to 1.')
# calculate affine transform for image (MRI_VOXEL to RAS)
if mri_resolution:
# MRI_VOXEL to MRI transform
transform = vs['vox_mri_t'].copy()
else:
# MRI_VOXEL to MRI transform
# NOTE: 'src' indicates downsampled version of MRI_VOXEL
transform = vs['src_mri_t'].copy()
if dest == 'mri':
# combine with MRI to RAS transform
transform = combine_transforms(transform, vs['mri_ras_t'],
transform['from'],
vs['mri_ras_t']['to'])
# now setup the affine for volume image
affine = transform['trans']
# make sure affine converts from m to mm
affine[:3] *= 1e3
# save volume data
# setup image for file
if fname.endswith(('.nii', '.nii.gz')): # save as nifit
# setup the nifti header
hdr = nib.Nifti1Header()
hdr.set_xyzt_units('mm')
# save the nifti image
img = nib.Nifti1Image(img, affine, header=hdr)
elif fname.endswith('.mgz'): # save as mgh
# convert to float32 (float64 not currently supported)
img = img.astype('float32')
# save the mgh image
img = nib.freesurfer.mghformat.MGHImage(img, affine)
else:
raise(ValueError('Unrecognized file extension'))
# write image to file
nib.save(img, fname)
def _add_patch_info(s):
"""Patch information in a source space.
Generate the patch information from the 'nearest' vector in
a source space. For vertex in the source space it provides
the list of neighboring vertices in the high resolution
triangulation.
Parameters
----------
s : dict
The source space.
"""
nearest = s['nearest']
if nearest is None:
s['pinfo'] = None
s['patch_inds'] = None
return
logger.info(' Computing patch statistics...')
indn = np.argsort(nearest)
nearest_sorted = nearest[indn]
steps = np.where(nearest_sorted[1:] != nearest_sorted[:-1])[0] + 1
starti = np.r_[[0], steps]
stopi = np.r_[steps, [len(nearest)]]
pinfo = list()
for start, stop in zip(starti, stopi):
pinfo.append(np.sort(indn[start:stop]))
s['pinfo'] = pinfo
# compute patch indices of the in-use source space vertices
patch_verts = nearest_sorted[steps - 1]
s['patch_inds'] = np.searchsorted(patch_verts, s['vertno'])
logger.info(' Patch information added...')
@verbose
def _read_source_spaces_from_tree(fid, tree, patch_stats=False,
verbose=None):
"""Read the source spaces from a FIF file.
Parameters
----------
fid : file descriptor
An open file descriptor.
tree : dict
The FIF tree structure if source is a file id.
patch_stats : bool, optional (default False)
Calculate and add cortical patch statistics to the surfaces.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
src : SourceSpaces
The source spaces.
"""
# Find all source spaces
spaces = dir_tree_find(tree, FIFF.FIFFB_MNE_SOURCE_SPACE)
if len(spaces) == 0:
raise ValueError('No source spaces found')
src = list()
for s in spaces:
logger.info(' Reading a source space...')
this = _read_one_source_space(fid, s)
logger.info(' [done]')
if patch_stats:
_complete_source_space_info(this)
src.append(this)
logger.info(' %d source spaces read' % len(spaces))
return SourceSpaces(src)
@verbose
def read_source_spaces(fname, patch_stats=False, verbose=None):
"""Read the source spaces from a FIF file.
Parameters
----------
fname : str
The name of the file, which should end with -src.fif or
-src.fif.gz.
patch_stats : bool, optional (default False)
Calculate and add cortical patch statistics to the surfaces.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
src : SourceSpaces
The source spaces.
See Also
--------
write_source_spaces, setup_source_space, setup_volume_source_space
"""
# be more permissive on read than write (fwd/inv can contain src)
check_fname(fname, 'source space', ('-src.fif', '-src.fif.gz',
'-fwd.fif', '-fwd.fif.gz',
'-inv.fif', '-inv.fif.gz'))
ff, tree, _ = fiff_open(fname)
with ff as fid:
src = _read_source_spaces_from_tree(fid, tree, patch_stats=patch_stats,
verbose=verbose)
src.info['fname'] = fname
node = dir_tree_find(tree, FIFF.FIFFB_MNE_ENV)
if node:
node = node[0]
for p in range(node['nent']):
kind = node['directory'][p].kind
pos = node['directory'][p].pos
tag = read_tag(fid, pos)
if kind == FIFF.FIFF_MNE_ENV_WORKING_DIR:
src.info['working_dir'] = tag.data
elif kind == FIFF.FIFF_MNE_ENV_COMMAND_LINE:
src.info['command_line'] = tag.data
return src
@verbose
def _read_one_source_space(fid, this, verbose=None):
"""Read one source space."""
FIFF_BEM_SURF_NTRI = 3104
FIFF_BEM_SURF_TRIANGLES = 3106
res = dict()
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_ID)
if tag is None:
res['id'] = int(FIFF.FIFFV_MNE_SURF_UNKNOWN)
else:
res['id'] = int(tag.data)
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_TYPE)
if tag is None:
raise ValueError('Unknown source space type')
else:
src_type = int(tag.data)
if src_type == FIFF.FIFFV_MNE_SPACE_SURFACE:
res['type'] = 'surf'
elif src_type == FIFF.FIFFV_MNE_SPACE_VOLUME:
res['type'] = 'vol'
elif src_type == FIFF.FIFFV_MNE_SPACE_DISCRETE:
res['type'] = 'discrete'
else:
raise ValueError('Unknown source space type (%d)' % src_type)
if res['type'] == 'vol':
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_VOXEL_DIMS)
if tag is not None:
res['shape'] = tuple(tag.data)
tag = find_tag(fid, this, FIFF.FIFF_COORD_TRANS)
if tag is not None:
res['src_mri_t'] = tag.data
parent_mri = dir_tree_find(this, FIFF.FIFFB_MNE_PARENT_MRI_FILE)
if len(parent_mri) == 0:
# MNE 2.7.3 (and earlier) didn't store necessary information
# about volume coordinate translations. Although there is a
# FFIF_COORD_TRANS in the higher level of the FIFF file, this
# doesn't contain all the info we need. Safer to return an
# error unless a user really wants us to add backward compat.
raise ValueError('Can not find parent MRI location. The volume '
'source space may have been made with an MNE '
'version that is too old (<= 2.7.3). Consider '
'updating and regenerating the inverse.')
mri = parent_mri[0]
for d in mri['directory']:
if d.kind == FIFF.FIFF_COORD_TRANS:
tag = read_tag(fid, d.pos)
trans = tag.data
if trans['from'] == FIFF.FIFFV_MNE_COORD_MRI_VOXEL:
res['vox_mri_t'] = tag.data
if trans['to'] == FIFF.FIFFV_MNE_COORD_RAS:
res['mri_ras_t'] = tag.data
tag = find_tag(fid, mri, FIFF.FIFF_MNE_SOURCE_SPACE_INTERPOLATOR)
if tag is not None:
res['interpolator'] = tag.data
else:
logger.info("Interpolation matrix for MRI not found.")
tag = find_tag(fid, mri, FIFF.FIFF_MNE_SOURCE_SPACE_MRI_FILE)
if tag is not None:
res['mri_file'] = tag.data
tag = find_tag(fid, mri, FIFF.FIFF_MRI_WIDTH)
if tag is not None:
res['mri_width'] = int(tag.data)
tag = find_tag(fid, mri, FIFF.FIFF_MRI_HEIGHT)
if tag is not None:
res['mri_height'] = int(tag.data)
tag = find_tag(fid, mri, FIFF.FIFF_MRI_DEPTH)
if tag is not None:
res['mri_depth'] = int(tag.data)
tag = find_tag(fid, mri, FIFF.FIFF_MNE_FILE_NAME)
if tag is not None:
res['mri_volume_name'] = tag.data
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NNEIGHBORS)
if tag is not None:
nneighbors = tag.data
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NEIGHBORS)
offset = 0
neighbors = []
for n in nneighbors:
neighbors.append(tag.data[offset:offset + n])
offset += n
res['neighbor_vert'] = neighbors
tag = find_tag(fid, this, FIFF.FIFF_COMMENT)
if tag is not None:
res['seg_name'] = tag.data
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NPOINTS)
if tag is None:
raise ValueError('Number of vertices not found')
res['np'] = int(tag.data)
tag = find_tag(fid, this, FIFF_BEM_SURF_NTRI)
if tag is None:
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NTRI)
if tag is None:
res['ntri'] = 0
else:
res['ntri'] = int(tag.data)
else:
res['ntri'] = tag.data
tag = find_tag(fid, this, FIFF.FIFF_MNE_COORD_FRAME)
if tag is None:
raise ValueError('Coordinate frame information not found')
res['coord_frame'] = tag.data[0]
# Vertices, normals, and triangles
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_POINTS)
if tag is None:
raise ValueError('Vertex data not found')
res['rr'] = tag.data.astype(np.float) # double precision for mayavi
if res['rr'].shape[0] != res['np']:
raise ValueError('Vertex information is incorrect')
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NORMALS)
if tag is None:
raise ValueError('Vertex normals not found')
res['nn'] = tag.data.copy()
if res['nn'].shape[0] != res['np']:
raise ValueError('Vertex normal information is incorrect')
if res['ntri'] > 0:
tag = find_tag(fid, this, FIFF_BEM_SURF_TRIANGLES)
if tag is None:
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_TRIANGLES)
if tag is None:
raise ValueError('Triangulation not found')
else:
res['tris'] = tag.data - 1 # index start at 0 in Python
else:
res['tris'] = tag.data - 1 # index start at 0 in Python
if res['tris'].shape[0] != res['ntri']:
raise ValueError('Triangulation information is incorrect')
else:
res['tris'] = None
# Which vertices are active
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NUSE)
if tag is None:
res['nuse'] = 0
res['inuse'] = np.zeros(res['nuse'], dtype=np.int)
res['vertno'] = None
else:
res['nuse'] = int(tag.data)
tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_SELECTION)
if tag is None:
raise ValueError('Source selection information missing')
res['inuse'] = tag.data.astype(np.int).T
if len(res['inuse']) != res['np']:
raise ValueError('Incorrect number of entries in source space '
'selection')
res['vertno'] = np.where(res['inuse'])[0]
# Use triangulation
tag1 = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NUSE_TRI)
tag2 = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_USE_TRIANGLES)
if tag1 is None or tag2 is None:
res['nuse_tri'] = 0
res['use_tris'] = None
else:
res['nuse_tri'] = tag1.data
res['use_tris'] = tag2.data - 1 # index start at 0 in Python
# Patch-related information
tag1 = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NEAREST)
tag2 = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NEAREST_DIST)
if tag1 is None or tag2 is None:
res['nearest'] = None
res['nearest_dist'] = None
else:
res['nearest'] = tag1.data
res['nearest_dist'] = tag2.data.T
_add_patch_info(res)
# Distances
tag1 = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_DIST)
tag2 = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_DIST_LIMIT)
if tag1 is None or tag2 is None:
res['dist'] = None
res['dist_limit'] = None
else:
res['dist'] = tag1.data
res['dist_limit'] = tag2.data
# Add the upper triangle
res['dist'] = res['dist'] + res['dist'].T
if (res['dist'] is not None):
logger.info(' Distance information added...')
tag = find_tag(fid, this, FIFF.FIFF_SUBJ_HIS_ID)
if tag is None:
res['subject_his_id'] = None
else:
res['subject_his_id'] = tag.data
return res
@verbose
def _complete_source_space_info(this, verbose=None):
"""Add more info on surface."""
# Main triangulation
logger.info(' Completing triangulation info...')
this['tri_area'] = np.zeros(this['ntri'])
r1 = this['rr'][this['tris'][:, 0], :]
r2 = this['rr'][this['tris'][:, 1], :]
r3 = this['rr'][this['tris'][:, 2], :]
this['tri_cent'] = (r1 + r2 + r3) / 3.0
this['tri_nn'] = fast_cross_3d((r2 - r1), (r3 - r1))
this['tri_area'] = _normalize_vectors(this['tri_nn']) / 2.0
logger.info('[done]')
# Selected triangles
logger.info(' Completing selection triangulation info...')
if this['nuse_tri'] > 0:
r1 = this['rr'][this['use_tris'][:, 0], :]
r2 = this['rr'][this['use_tris'][:, 1], :]
r3 = this['rr'][this['use_tris'][:, 2], :]
this['use_tri_cent'] = (r1 + r2 + r3) / 3.0
this['use_tri_nn'] = fast_cross_3d((r2 - r1), (r3 - r1))
this['use_tri_area'] = np.linalg.norm(this['use_tri_nn'], axis=1) / 2.
logger.info('[done]')
def find_source_space_hemi(src):
"""Return the hemisphere id for a source space.
Parameters
----------
src : dict
The source space to investigate
Returns
-------
hemi : int
Deduced hemisphere id
"""
xave = src['rr'][:, 0].sum()
if xave < 0:
hemi = int(FIFF.FIFFV_MNE_SURF_LEFT_HEMI)
else:
hemi = int(FIFF.FIFFV_MNE_SURF_RIGHT_HEMI)
return hemi
def label_src_vertno_sel(label, src):
"""Find vertex numbers and indices from label.
Parameters
----------
label : Label
Source space label
src : dict
Source space
Returns
-------
vertices : list of length 2
Vertex numbers for lh and rh
src_sel : array of int (len(idx) = len(vertices[0]) + len(vertices[1]))
Indices of the selected vertices in sourse space
"""
if src[0]['type'] != 'surf':
return Exception('Labels are only supported with surface source '
'spaces')
vertno = [src[0]['vertno'], src[1]['vertno']]
if label.hemi == 'lh':
vertno_sel = np.intersect1d(vertno[0], label.vertices)
src_sel = np.searchsorted(vertno[0], vertno_sel)
vertno[0] = vertno_sel
vertno[1] = np.array([], int)
elif label.hemi == 'rh':
vertno_sel = np.intersect1d(vertno[1], label.vertices)
src_sel = np.searchsorted(vertno[1], vertno_sel) + len(vertno[0])
vertno[0] = np.array([], int)
vertno[1] = vertno_sel
elif label.hemi == 'both':
vertno_sel_lh = np.intersect1d(vertno[0], label.lh.vertices)
src_sel_lh = np.searchsorted(vertno[0], vertno_sel_lh)
vertno_sel_rh = np.intersect1d(vertno[1], label.rh.vertices)
src_sel_rh = np.searchsorted(vertno[1], vertno_sel_rh) + len(vertno[0])
src_sel = np.hstack((src_sel_lh, src_sel_rh))
vertno = [vertno_sel_lh, vertno_sel_rh]
else:
raise Exception("Unknown hemisphere type")
return vertno, src_sel
def _get_vertno(src):
return [s['vertno'] for s in src]
###############################################################################
# Write routines
@verbose
def _write_source_spaces_to_fid(fid, src, verbose=None):
"""Write the source spaces to a FIF file.
Parameters
----------
fid : file descriptor
An open file descriptor.
src : list
The list of source spaces.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
"""
for s in src:
logger.info(' Write a source space...')
start_block(fid, FIFF.FIFFB_MNE_SOURCE_SPACE)
_write_one_source_space(fid, s, verbose)
end_block(fid, FIFF.FIFFB_MNE_SOURCE_SPACE)
logger.info(' [done]')
logger.info(' %d source spaces written' % len(src))
@verbose
def write_source_spaces(fname, src, overwrite=False, verbose=None):
"""Write source spaces to a file.
Parameters
----------
fname : str
The name of the file, which should end with -src.fif or
-src.fif.gz.
src : SourceSpaces
The source spaces (as returned by read_source_spaces).
overwrite : bool
If True, the destination file (if it exists) will be overwritten.
If False (default), an error will be raised if the file exists.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
See Also
--------
read_source_spaces
"""
check_fname(fname, 'source space', ('-src.fif', '-src.fif.gz'))
_check_fname(fname, overwrite=overwrite)
fid = start_file(fname)
start_block(fid, FIFF.FIFFB_MNE)
if src.info:
start_block(fid, FIFF.FIFFB_MNE_ENV)
write_id(fid, FIFF.FIFF_BLOCK_ID)
data = src.info.get('working_dir', None)
if data:
write_string(fid, FIFF.FIFF_MNE_ENV_WORKING_DIR, data)
data = src.info.get('command_line', None)
if data:
write_string(fid, FIFF.FIFF_MNE_ENV_COMMAND_LINE, data)
end_block(fid, FIFF.FIFFB_MNE_ENV)
_write_source_spaces_to_fid(fid, src, verbose)
end_block(fid, FIFF.FIFFB_MNE)
end_file(fid)
def _write_one_source_space(fid, this, verbose=None):
"""Write one source space."""
if this['type'] == 'surf':
src_type = FIFF.FIFFV_MNE_SPACE_SURFACE
elif this['type'] == 'vol':
src_type = FIFF.FIFFV_MNE_SPACE_VOLUME
elif this['type'] == 'discrete':
src_type = FIFF.FIFFV_MNE_SPACE_DISCRETE
else:
raise ValueError('Unknown source space type (%s)' % this['type'])
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_TYPE, src_type)
if this['id'] >= 0:
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_ID, this['id'])
data = this.get('subject_his_id', None)
if data:
write_string(fid, FIFF.FIFF_SUBJ_HIS_ID, data)
write_int(fid, FIFF.FIFF_MNE_COORD_FRAME, this['coord_frame'])
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NPOINTS, this['np'])
write_float_matrix(fid, FIFF.FIFF_MNE_SOURCE_SPACE_POINTS, this['rr'])
write_float_matrix(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NORMALS, this['nn'])
# Which vertices are active
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_SELECTION, this['inuse'])
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NUSE, this['nuse'])
if this['ntri'] > 0:
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NTRI, this['ntri'])
write_int_matrix(fid, FIFF.FIFF_MNE_SOURCE_SPACE_TRIANGLES,
this['tris'] + 1)
if this['type'] != 'vol' and this['use_tris'] is not None:
# Use triangulation
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NUSE_TRI, this['nuse_tri'])
write_int_matrix(fid, FIFF.FIFF_MNE_SOURCE_SPACE_USE_TRIANGLES,
this['use_tris'] + 1)
if this['type'] == 'vol':
neighbor_vert = this.get('neighbor_vert', None)
if neighbor_vert is not None:
nneighbors = np.array([len(n) for n in neighbor_vert])
neighbors = np.concatenate(neighbor_vert)
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NNEIGHBORS, nneighbors)
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NEIGHBORS, neighbors)
write_coord_trans(fid, this['src_mri_t'])
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_VOXEL_DIMS, this['shape'])
start_block(fid, FIFF.FIFFB_MNE_PARENT_MRI_FILE)
write_coord_trans(fid, this['mri_ras_t'])
write_coord_trans(fid, this['vox_mri_t'])
mri_volume_name = this.get('mri_volume_name', None)
if mri_volume_name is not None:
write_string(fid, FIFF.FIFF_MNE_FILE_NAME, mri_volume_name)
write_float_sparse_rcs(fid, FIFF.FIFF_MNE_SOURCE_SPACE_INTERPOLATOR,
this['interpolator'])
if 'mri_file' in this and this['mri_file'] is not None:
write_string(fid, FIFF.FIFF_MNE_SOURCE_SPACE_MRI_FILE,
this['mri_file'])
write_int(fid, FIFF.FIFF_MRI_WIDTH, this['mri_width'])
write_int(fid, FIFF.FIFF_MRI_HEIGHT, this['mri_height'])
write_int(fid, FIFF.FIFF_MRI_DEPTH, this['mri_depth'])
end_block(fid, FIFF.FIFFB_MNE_PARENT_MRI_FILE)
# Patch-related information
if this['nearest'] is not None:
write_int(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NEAREST, this['nearest'])
write_float_matrix(fid, FIFF.FIFF_MNE_SOURCE_SPACE_NEAREST_DIST,
this['nearest_dist'])
# Distances
if this['dist'] is not None:
# Save only upper triangular portion of the matrix
dists = this['dist'].copy()
dists = sparse.triu(dists, format=dists.format)
write_float_sparse_rcs(fid, FIFF.FIFF_MNE_SOURCE_SPACE_DIST, dists)
write_float_matrix(fid, FIFF.FIFF_MNE_SOURCE_SPACE_DIST_LIMIT,
this['dist_limit'])
# Segmentation data
if this['type'] == 'vol' and ('seg_name' in this):
# Save the name of the segment
write_string(fid, FIFF.FIFF_COMMENT, this['seg_name'])
##############################################################################
# Surface to MNI conversion
@verbose
def vertex_to_mni(vertices, hemis, subject, subjects_dir=None, mode=None,
verbose=None):
"""Convert the array of vertices for a hemisphere to MNI coordinates.
Parameters
----------
vertices : int, or list of int
Vertex number(s) to convert
hemis : int, or list of int
Hemisphere(s) the vertices belong to
subject : string
Name of the subject to load surfaces from.
subjects_dir : string, or None
Path to SUBJECTS_DIR if it is not set in the environment.
mode : string | None
Either 'nibabel' or 'freesurfer' for the software to use to
obtain the transforms. If None, 'nibabel' is tried first, falling
back to 'freesurfer' if it fails. Results should be equivalent with
either option, but nibabel may be quicker (and more pythonic).
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
coordinates : n_vertices x 3 array of float
The MNI coordinates (in mm) of the vertices
Notes
-----
This function requires either nibabel (in Python) or Freesurfer
(with utility "mri_info") to be correctly installed.
"""
if not has_freesurfer() and not has_nibabel():
raise RuntimeError('NiBabel (Python) or Freesurfer (Unix) must be '
'correctly installed and accessible from Python')
if not isinstance(vertices, list) and not isinstance(vertices, np.ndarray):
vertices = [vertices]
if not isinstance(hemis, list) and not isinstance(hemis, np.ndarray):
hemis = [hemis] * len(vertices)
if not len(hemis) == len(vertices):
raise ValueError('hemi and vertices must match in length')
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
surfs = [op.join(subjects_dir, subject, 'surf', '%s.white' % h)
for h in ['lh', 'rh']]
# read surface locations in MRI space
rr = [read_surface(s)[0] for s in surfs]
# take point locations in MRI space and convert to MNI coordinates
xfm = _read_talxfm(subject, subjects_dir, mode)
data = np.array([rr[h][v, :] for h, v in zip(hemis, vertices)])
return apply_trans(xfm['trans'], data)
##############################################################################
# Volume to MNI conversion
@verbose
def head_to_mni(pos, subject, mri_head_t, subjects_dir=None,
verbose=None):
"""Convert pos from head coordinate system to MNI ones.
Parameters
----------
pos : array, shape (n_pos, 3)
The coordinates (in m) in head coordinate system
subject : string
Name of the subject.
mri_head_t: instance of Transform
MRI<->Head coordinate transformation
subjects_dir : string, or None
Path to SUBJECTS_DIR if it is not set in the environment.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
coordinates : array, shape (n_pos, 3)
The MNI coordinates (in mm) of pos
Notes
-----
This function requires either nibabel (in Python) or Freesurfer
(with utility "mri_info") to be correctly installed.
"""
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
# before we go from head to MRI (surface RAS)
head_mri_t = _ensure_trans(mri_head_t, 'head', 'mri')
coo_MRI_RAS = apply_trans(head_mri_t, pos)
# convert to MNI coordinates
xfm = _read_talxfm(subject, subjects_dir)
return apply_trans(xfm['trans'], coo_MRI_RAS * 1000)
@verbose
def _read_talxfm(subject, subjects_dir, mode=None, verbose=None):
"""Read MNI transform from FreeSurfer talairach.xfm file.
Adapted from freesurfer m-files. Altered to deal with Norig
and Torig correctly.
"""
if mode is not None and mode not in ['nibabel', 'freesurfer']:
raise ValueError('mode must be "nibabel" or "freesurfer"')
fname = op.join(subjects_dir, subject, 'mri', 'transforms',
'talairach.xfm')
# read the RAS to MNI transform from talairach.xfm
with open(fname, 'r') as fid:
logger.debug('Reading FreeSurfer talairach.xfm file:\n%s' % fname)
# read lines until we get the string 'Linear_Transform', which precedes
# the data transformation matrix
got_it = False
comp = 'Linear_Transform'
for line in fid:
if line[:len(comp)] == comp:
# we have the right line, so don't read any more
got_it = True
break
if got_it:
xfm = list()
# read the transformation matrix (3x4)
for ii, line in enumerate(fid):
digs = [float(s) for s in line.strip('\n;').split()]
xfm.append(digs)
if ii == 2:
break
xfm.append([0., 0., 0., 1.])
xfm = np.array(xfm, dtype=float)
else:
raise ValueError('failed to find \'Linear_Transform\' string in '
'xfm file:\n%s' % fname)
# Setup the RAS to MNI transform
ras_mni_t = {'from': FIFF.FIFFV_MNE_COORD_RAS,
'to': FIFF.FIFFV_MNE_COORD_MNI_TAL, 'trans': xfm}
# now get Norig and Torig
# (i.e. vox_ras_t and vox_mri_t, respectively)
path = op.join(subjects_dir, subject, 'mri', 'orig.mgz')
if not op.isfile(path):
path = op.join(subjects_dir, subject, 'mri', 'T1.mgz')
if not op.isfile(path):
raise IOError('mri not found: %s' % path)
if has_nibabel():
use_nibabel = True
else:
use_nibabel = False
if mode == 'nibabel':
raise ImportError('Tried to import nibabel but failed, try using '
'mode=None or mode=Freesurfer')
# note that if mode == None, then we default to using nibabel
if use_nibabel is True and mode == 'freesurfer':
use_nibabel = False
if use_nibabel:
hdr = _get_mri_header(path)
# read the MRI_VOXEL to RAS transform
n_orig = hdr.get_vox2ras()
# read the MRI_VOXEL to MRI transform
ds = np.array(hdr.get_zooms())
ns = (np.array(hdr.get_data_shape()[:3]) * ds) / 2.0
t_orig = np.array([[-ds[0], 0, 0, ns[0]],
[0, 0, ds[2], -ns[2]],
[0, -ds[1], 0, ns[1]],
[0, 0, 0, 1]], dtype=float)
nt_orig = [n_orig, t_orig]
else:
nt_orig = list()
for conv in ['--vox2ras', '--vox2ras-tkr']:
stdout, stderr = run_subprocess(['mri_info', conv, path])
stdout = np.fromstring(stdout, sep=' ').astype(float)
if not stdout.size == 16:
raise ValueError('Could not parse Freesurfer mri_info output')
nt_orig.append(stdout.reshape(4, 4))
# extract the MRI_VOXEL to RAS transform
n_orig = nt_orig[0]
vox_ras_t = {'from': FIFF.FIFFV_MNE_COORD_MRI_VOXEL,
'to': FIFF.FIFFV_MNE_COORD_RAS,
'trans': n_orig}
# extract the MRI_VOXEL to MRI transform
t_orig = nt_orig[1]
vox_mri_t = Transform('mri_voxel', 'mri', t_orig)
# invert MRI_VOXEL to MRI to get the MRI to MRI_VOXEL transform
mri_vox_t = invert_transform(vox_mri_t)
# construct an MRI to RAS transform
mri_ras_t = combine_transforms(mri_vox_t, vox_ras_t, 'mri', 'ras')
# construct the MRI to MNI transform
mri_mni_t = combine_transforms(mri_ras_t, ras_mni_t, 'mri', 'mni_tal')
return mri_mni_t
###############################################################################
# Creation and decimation
@verbose
def _check_spacing(spacing, verbose=None):
"""Check spacing parameter."""
# check to make sure our parameters are good, parse 'spacing'
space_err = ('"spacing" must be a string with values '
'"ico#", "oct#", or "all", and "ico" and "oct"'
'numbers must be integers')
if not isinstance(spacing, string_types) or len(spacing) < 3:
raise ValueError(space_err)
if spacing == 'all':
stype = 'all'
sval = ''
elif spacing[:3] == 'ico':
stype = 'ico'
sval = spacing[3:]
elif spacing[:3] == 'oct':
stype = 'oct'
sval = spacing[3:]
else:
raise ValueError(space_err)
try:
if stype in ['ico', 'oct']:
sval = int(sval)
elif stype == 'spacing': # spacing
sval = float(sval)
except Exception:
raise ValueError(space_err)
if stype == 'all':
logger.info('Include all vertices')
ico_surf = None
src_type_str = 'all'
else:
src_type_str = '%s = %s' % (stype, sval)
if stype == 'ico':
logger.info('Icosahedron subdivision grade %s' % sval)
ico_surf = _get_ico_surface(sval)
elif stype == 'oct':
logger.info('Octahedron subdivision grade %s' % sval)
ico_surf = _tessellate_sphere_surf(sval)
return stype, sval, ico_surf, src_type_str
@verbose
def setup_source_space(subject, spacing='oct6', surface='white',
subjects_dir=None, add_dist=True, n_jobs=1,
verbose=None):
"""Set up bilateral hemisphere surface-based source space with subsampling.
Parameters
----------
subject : str
Subject to process.
spacing : str
The spacing to use. Can be ``'ico#'`` for a recursively subdivided
icosahedron, ``'oct#'`` for a recursively subdivided octahedron,
or ``'all'`` for all points.
surface : str
The surface to use.
subjects_dir : string, or None
Path to SUBJECTS_DIR if it is not set in the environment.
add_dist : bool
Add distance and patch information to the source space. This takes some
time so precomputing it is recommended.
n_jobs : int
Number of jobs to run in parallel. Will use at most 2 jobs
(one for each hemisphere).
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
src : SourceSpaces
The source space for each hemisphere.
See Also
--------
setup_volume_source_space
"""
cmd = ('setup_source_space(%s, spacing=%s, surface=%s, '
'subjects_dir=%s, add_dist=%s, verbose=%s)'
% (subject, spacing, surface, subjects_dir, add_dist, verbose))
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
surfs = [op.join(subjects_dir, subject, 'surf', hemi + surface)
for hemi in ['lh.', 'rh.']]
for surf, hemi in zip(surfs, ['LH', 'RH']):
if surf is not None and not op.isfile(surf):
raise IOError('Could not find the %s surface %s'
% (hemi, surf))
logger.info('Setting up the source space with the following parameters:\n')
logger.info('SUBJECTS_DIR = %s' % subjects_dir)
logger.info('Subject = %s' % subject)
logger.info('Surface = %s' % surface)
stype, sval, ico_surf, src_type_str = _check_spacing(spacing)
logger.info('')
del spacing
logger.info('>>> 1. Creating the source space...\n')
# mne_make_source_space ... actually make the source spaces
src = []
# pre-load ico/oct surf (once) for speed, if necessary
if stype != 'all':
logger.info('Doing the %shedral vertex picking...'
% (dict(ico='icosa', oct='octa')[stype],))
for hemi, surf in zip(['lh', 'rh'], surfs):
logger.info('Loading %s...' % surf)
# Setup the surface spacing in the MRI coord frame
if stype != 'all':
logger.info('Mapping %s %s -> %s (%d) ...'
% (hemi, subject, stype, sval))
s = _create_surf_spacing(surf, hemi, subject, stype, ico_surf,
subjects_dir)
logger.info('loaded %s %d/%d selected to source space (%s)'
% (op.split(surf)[1], s['nuse'], s['np'], src_type_str))
src.append(s)
logger.info('') # newline after both subject types are run
# Fill in source space info
hemi_ids = [FIFF.FIFFV_MNE_SURF_LEFT_HEMI, FIFF.FIFFV_MNE_SURF_RIGHT_HEMI]
for s, s_id in zip(src, hemi_ids):
# Add missing fields
s.update(dict(dist=None, dist_limit=None, nearest=None, type='surf',
nearest_dist=None, pinfo=None, patch_inds=None, id=s_id,
coord_frame=FIFF.FIFFV_COORD_MRI))
s['rr'] /= 1000.0
del s['tri_area']
del s['tri_cent']
del s['tri_nn']
del s['neighbor_tri']
# upconvert to object format from lists
src = SourceSpaces(src, dict(working_dir=os.getcwd(), command_line=cmd))
if add_dist:
add_source_space_distances(src, n_jobs=n_jobs, verbose=verbose)
# write out if requested, then return the data
logger.info('You are now one step closer to computing the gain matrix')
return src
@verbose
def setup_volume_source_space(subject=None, pos=5.0, mri=None,
sphere=(0.0, 0.0, 0.0, 90.0), bem=None,
surface=None, mindist=5.0, exclude=0.0,
subjects_dir=None, volume_label=None,
add_interpolator=True, verbose=None):
"""Set up a volume source space with grid spacing or discrete source space.
Parameters
----------
subject : str | None
Subject to process. If None, the path to the mri volume must be
absolute. Defaults to None.
pos : float | dict
Positions to use for sources. If float, a grid will be constructed
with the spacing given by `pos` in mm, generating a volume source
space. If dict, pos['rr'] and pos['nn'] will be used as the source
space locations (in meters) and normals, respectively, creating a
discrete source space. NOTE: For a discrete source space (`pos` is
a dict), `mri` must be None.
mri : str | None
The filename of an MRI volume (mgh or mgz) to create the
interpolation matrix over. Source estimates obtained in the
volume source space can then be morphed onto the MRI volume
using this interpolator. If pos is a dict, this can be None.
sphere : ndarray, shape (4,) | ConductorModel
Define spherical source space bounds using origin and radius given
by (ox, oy, oz, rad) in mm. Only used if ``bem`` and ``surface``
are both None. Can also be a spherical ConductorModel, which will
use the origin and radius.
bem : str | None
Define source space bounds using a BEM file (specifically the inner
skull surface).
surface : str | dict | None
Define source space bounds using a FreeSurfer surface file. Can
also be a dictionary with entries `'rr'` and `'tris'`, such as
those returned by :func:`mne.read_surface`.
mindist : float
Exclude points closer than this distance (mm) to the bounding surface.
exclude : float
Exclude points closer than this distance (mm) from the center of mass
of the bounding surface.
subjects_dir : string, or None
Path to SUBJECTS_DIR if it is not set in the environment.
volume_label : str | list | None
Region of interest corresponding with freesurfer lookup table.
add_interpolator : bool
If True and ``mri`` is not None, then an interpolation matrix
will be produced.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
src : SourceSpaces
A :class:`SourceSpaces` object containing one source space for each
entry of ``volume_labels``, or a single source space if
``volume_labels`` was not specified.
See Also
--------
setup_source_space
Notes
-----
To create a discrete source space, `pos` must be a dict, 'mri' must be
None, and 'volume_label' must be None. To create a whole brain volume
source space, `pos` must be a float and 'mri' must be provided. To create
a volume source space from label, 'pos' must be a float, 'volume_label'
must be provided, and 'mri' must refer to a .mgh or .mgz file with values
corresponding to the freesurfer lookup-table (typically aseg.mgz).
"""
subjects_dir = get_subjects_dir(subjects_dir)
if bem is not None and surface is not None:
raise ValueError('Only one of "bem" and "surface" should be '
'specified')
if mri is not None:
if not op.isfile(mri):
if subject is None:
raise IOError('mri file "%s" not found' % mri)
mri = op.join(subjects_dir, subject, 'mri', mri)
if not op.isfile(mri):
raise IOError('mri file "%s" not found' % mri)
if isinstance(pos, dict):
raise ValueError('Cannot create interpolation matrix for '
'discrete source space, mri must be None if '
'pos is a dict')
if volume_label is not None:
if mri is None:
raise RuntimeError('"mri" must be provided if "volume_label" is '
'not None')
if not isinstance(volume_label, list):
volume_label = [volume_label]
# Check that volume label is found in .mgz file
volume_labels = get_volume_labels_from_aseg(mri)
for label in volume_label:
if label not in volume_labels:
raise ValueError('Volume %s not found in file %s. Double '
'check freesurfer lookup table.'
% (label, mri))
if isinstance(sphere, ConductorModel):
if not sphere['is_sphere'] or len(sphere['layers']) == 0:
raise ValueError('sphere, if a ConductorModel, must be spherical '
'with multiple layers, not a BEM or single-layer '
'sphere (got %s)' % (sphere,))
sphere = tuple(1000 * sphere['r0']) + (1000 *
sphere['layers'][0]['rad'],)
sphere = np.asarray(sphere, dtype=float)
if sphere.size != 4:
raise ValueError('"sphere" must be array_like with 4 elements, got: %s'
% (sphere,))
# triage bounding argument
if bem is not None:
logger.info('BEM file : %s', bem)
elif surface is not None:
if isinstance(surface, dict):
if not all(key in surface for key in ['rr', 'tris']):
raise KeyError('surface, if dict, must have entries "rr" '
'and "tris"')
# let's make sure we have geom info
complete_surface_info(surface, copy=False, verbose=False)
surf_extra = 'dict()'
elif isinstance(surface, string_types):
if not op.isfile(surface):
raise IOError('surface file "%s" not found' % surface)
surf_extra = surface
logger.info('Boundary surface file : %s', surf_extra)
else:
logger.info('Sphere : origin at (%.1f %.1f %.1f) mm'
% (sphere[0], sphere[1], sphere[2]))
logger.info(' radius : %.1f mm' % sphere[3])
# triage pos argument
if isinstance(pos, dict):
if not all(key in pos for key in ['rr', 'nn']):
raise KeyError('pos, if dict, must contain "rr" and "nn"')
pos_extra = 'dict()'
else: # pos should be float-like
try:
pos = float(pos)
except (TypeError, ValueError):
raise ValueError('pos must be a dict, or something that can be '
'cast to float()')
if not isinstance(pos, float):
logger.info('Source location file : %s', pos_extra)
logger.info('Assuming input in millimeters')
logger.info('Assuming input in MRI coordinates')
if isinstance(pos, float):
logger.info('grid : %.1f mm' % pos)
logger.info('mindist : %.1f mm' % mindist)
pos /= 1000.0 # convert pos from m to mm
if exclude > 0.0:
logger.info('Exclude : %.1f mm' % exclude)
if mri is not None:
logger.info('MRI volume : %s' % mri)
exclude /= 1000.0 # convert exclude from m to mm
logger.info('')
# Explicit list of points
if not isinstance(pos, float):
# Make the grid of sources
sp = _make_discrete_source_space(pos)
else:
# Load the brain surface as a template
if bem is not None:
# read bem surface in the MRI coordinate frame
surf = read_bem_surfaces(bem, s_id=FIFF.FIFFV_BEM_SURF_ID_BRAIN,
verbose=False)
logger.info('Loaded inner skull from %s (%d nodes)'
% (bem, surf['np']))
elif surface is not None:
if isinstance(surface, string_types):
# read the surface in the MRI coordinate frame
surf = read_surface(surface, return_dict=True)[-1]
else:
surf = surface
logger.info('Loaded bounding surface from %s (%d nodes)'
% (surface, surf['np']))
surf = deepcopy(surf)
surf['rr'] *= 1e-3 # must be converted to meters
else: # Load an icosahedron and use that as the surface
logger.info('Setting up the sphere...')
surf = dict(R=sphere[3] / 1000., r0=sphere[:3] / 1000.)
# Make the grid of sources in MRI space
if volume_label is not None:
sp = []
for label in volume_label:
vol_sp = _make_volume_source_space(surf, pos, exclude, mindist,
mri, label)
sp.append(vol_sp)
else:
sp = _make_volume_source_space(surf, pos, exclude, mindist, mri,
volume_label)
# Compute an interpolation matrix to show data in MRI_VOXEL coord frame
if not isinstance(sp, list):
sp = [sp]
if mri is not None:
for s in sp:
_add_interpolator(s, mri, add_interpolator)
elif sp[0]['type'] == 'vol':
# If there is no interpolator, it's actually a discrete source space
sp[0]['type'] = 'discrete'
for s in sp:
if 'vol_dims' in s:
del s['vol_dims']
# Save it
for s in sp:
s.update(dict(nearest=None, dist=None, use_tris=None, patch_inds=None,
dist_limit=None, pinfo=None, ntri=0, nearest_dist=None,
nuse_tri=0, tris=None, subject_his_id=subject))
sp = SourceSpaces(sp, dict(working_dir=os.getcwd(), command_line='None'))
return sp
def _make_voxel_ras_trans(move, ras, voxel_size):
"""Make a transformation from MRI_VOXEL to MRI surface RAS (i.e. MRI)."""
assert voxel_size.ndim == 1
assert voxel_size.size == 3
rot = ras.T * voxel_size[np.newaxis, :]
assert rot.ndim == 2
assert rot.shape[0] == 3
assert rot.shape[1] == 3
trans = np.c_[np.r_[rot, np.zeros((1, 3))], np.r_[move, 1.0]]
t = Transform('mri_voxel', 'mri', trans)
return t
def _make_discrete_source_space(pos, coord_frame='mri'):
"""Use a discrete set of source locs/oris to make src space.
Parameters
----------
pos : dict
Must have entries "rr" and "nn". Data should be in meters.
coord_frame : str
The coordinate frame in which the positions are given; default: 'mri'.
The frame must be one defined in transforms.py:_str_to_frame
Returns
-------
src : dict
The source space.
"""
# Check that coordinate frame is valid
if coord_frame not in _str_to_frame: # will fail if coord_frame not string
raise KeyError('coord_frame must be one of %s, not "%s"'
% (list(_str_to_frame.keys()), coord_frame))
coord_frame = _str_to_frame[coord_frame] # now an int
# process points (copy and cast)
rr = np.array(pos['rr'], float)
nn = np.array(pos['nn'], float)
if not (rr.ndim == nn.ndim == 2 and nn.shape[0] == nn.shape[0] and
rr.shape[1] == nn.shape[1]):
raise RuntimeError('"rr" and "nn" must both be 2D arrays with '
'the same number of rows and 3 columns')
npts = rr.shape[0]
_normalize_vectors(nn)
nz = np.sum(np.sum(nn * nn, axis=1) == 0)
if nz != 0:
raise RuntimeError('%d sources have zero length normal' % nz)
logger.info('Positions (in meters) and orientations')
logger.info('%d sources' % npts)
# Ready to make the source space
sp = dict(coord_frame=coord_frame, type='discrete', nuse=npts, np=npts,
inuse=np.ones(npts, int), vertno=np.arange(npts), rr=rr, nn=nn,
id=-1)
return sp
def _make_volume_source_space(surf, grid, exclude, mindist, mri=None,
volume_label=None, do_neighbors=True, n_jobs=1):
"""Make a source space which covers the volume bounded by surf."""
# Figure out the grid size in the MRI coordinate frame
if 'rr' in surf:
mins = np.min(surf['rr'], axis=0)
maxs = np.max(surf['rr'], axis=0)
cm = np.mean(surf['rr'], axis=0) # center of mass
maxdist = np.linalg.norm(surf['rr'] - cm, axis=1).max()
else:
mins = surf['r0'] - surf['R']
maxs = surf['r0'] + surf['R']
cm = surf['r0'].copy()
maxdist = surf['R']
# Define the sphere which fits the surface
logger.info('Surface CM = (%6.1f %6.1f %6.1f) mm'
% (1000 * cm[0], 1000 * cm[1], 1000 * cm[2]))
logger.info('Surface fits inside a sphere with radius %6.1f mm'
% (1000 * maxdist))
logger.info('Surface extent:')
for c, mi, ma in zip('xyz', mins, maxs):
logger.info(' %s = %6.1f ... %6.1f mm' % (c, 1000 * mi, 1000 * ma))
maxn = np.array([np.floor(np.abs(m) / grid) + 1 if m > 0 else -
np.floor(np.abs(m) / grid) - 1 for m in maxs], int)
minn = np.array([np.floor(np.abs(m) / grid) + 1 if m > 0 else -
np.floor(np.abs(m) / grid) - 1 for m in mins], int)
logger.info('Grid extent:')
for c, mi, ma in zip('xyz', minn, maxn):
logger.info(' %s = %6.1f ... %6.1f mm'
% (c, 1000 * mi * grid, 1000 * ma * grid))
# Now make the initial grid
ns = maxn - minn + 1
npts = np.prod(ns)
nrow = ns[0]
ncol = ns[1]
nplane = nrow * ncol
# x varies fastest, then y, then z (can use unravel to do this)
rr = np.meshgrid(np.arange(minn[2], maxn[2] + 1),
np.arange(minn[1], maxn[1] + 1),
np.arange(minn[0], maxn[0] + 1), indexing='ij')
x, y, z = rr[2].ravel(), rr[1].ravel(), rr[0].ravel()
rr = np.array([x * grid, y * grid, z * grid]).T
sp = dict(np=npts, nn=np.zeros((npts, 3)), rr=rr,
inuse=np.ones(npts, int), type='vol', nuse=npts,
coord_frame=FIFF.FIFFV_COORD_MRI, id=-1, shape=ns)
sp['nn'][:, 2] = 1.0
assert sp['rr'].shape[0] == npts
logger.info('%d sources before omitting any.', sp['nuse'])
# Exclude infeasible points
dists = np.linalg.norm(sp['rr'] - cm, axis=1)
bads = np.where(np.logical_or(dists < exclude, dists > maxdist))[0]
sp['inuse'][bads] = False
sp['nuse'] -= len(bads)
logger.info('%d sources after omitting infeasible sources.', sp['nuse'])
if 'rr' in surf:
_filter_source_spaces(surf, mindist, None, [sp], n_jobs)
else: # sphere
vertno = np.where(sp['inuse'])[0]
bads = (np.linalg.norm(sp['rr'][vertno] - surf['r0'], axis=-1) >=
surf['R'] - mindist / 1000.)
sp['nuse'] -= bads.sum()
sp['inuse'][vertno[bads]] = False
sp['vertno'] = np.where(sp['inuse'])[0]
del vertno
del surf
logger.info('%d sources remaining after excluding the sources outside '
'the surface and less than %6.1f mm inside.'
% (sp['nuse'], mindist))
if not do_neighbors:
if volume_label is not None:
raise RuntimeError('volume_label cannot be None unless '
'do_neighbors is True')
return sp
k = np.arange(npts)
neigh = np.empty((26, npts), int)
neigh.fill(-1)
# Figure out each neighborhood:
# 6-neighborhood first
idxs = [z > minn[2], x < maxn[0], y < maxn[1],
x > minn[0], y > minn[1], z < maxn[2]]
offsets = [-nplane, 1, nrow, -1, -nrow, nplane]
for n, idx, offset in zip(neigh[:6], idxs, offsets):
n[idx] = k[idx] + offset
# Then the rest to complete the 26-neighborhood
# First the plane below
idx1 = z > minn[2]
idx2 = np.logical_and(idx1, x < maxn[0])
neigh[6, idx2] = k[idx2] + 1 - nplane
idx3 = np.logical_and(idx2, y < maxn[1])
neigh[7, idx3] = k[idx3] + 1 + nrow - nplane
idx2 = np.logical_and(idx1, y < maxn[1])
neigh[8, idx2] = k[idx2] + nrow - nplane
idx2 = np.logical_and(idx1, x > minn[0])
idx3 = np.logical_and(idx2, y < maxn[1])
neigh[9, idx3] = k[idx3] - 1 + nrow - nplane
neigh[10, idx2] = k[idx2] - 1 - nplane
idx3 = np.logical_and(idx2, y > minn[1])
neigh[11, idx3] = k[idx3] - 1 - nrow - nplane
idx2 = np.logical_and(idx1, y > minn[1])
neigh[12, idx2] = k[idx2] - nrow - nplane
idx3 = np.logical_and(idx2, x < maxn[0])
neigh[13, idx3] = k[idx3] + 1 - nrow - nplane
# Then the same plane
idx1 = np.logical_and(x < maxn[0], y < maxn[1])
neigh[14, idx1] = k[idx1] + 1 + nrow
idx1 = x > minn[0]
idx2 = np.logical_and(idx1, y < maxn[1])
neigh[15, idx2] = k[idx2] - 1 + nrow
idx2 = np.logical_and(idx1, y > minn[1])
neigh[16, idx2] = k[idx2] - 1 - nrow
idx1 = np.logical_and(y > minn[1], x < maxn[0])
neigh[17, idx1] = k[idx1] + 1 - nrow - nplane
# Finally one plane above
idx1 = z < maxn[2]
idx2 = np.logical_and(idx1, x < maxn[0])
neigh[18, idx2] = k[idx2] + 1 + nplane
idx3 = np.logical_and(idx2, y < maxn[1])
neigh[19, idx3] = k[idx3] + 1 + nrow + nplane
idx2 = np.logical_and(idx1, y < maxn[1])
neigh[20, idx2] = k[idx2] + nrow + nplane
idx2 = np.logical_and(idx1, x > minn[0])
idx3 = np.logical_and(idx2, y < maxn[1])
neigh[21, idx3] = k[idx3] - 1 + nrow + nplane
neigh[22, idx2] = k[idx2] - 1 + nplane
idx3 = np.logical_and(idx2, y > minn[1])
neigh[23, idx3] = k[idx3] - 1 - nrow + nplane
idx2 = np.logical_and(idx1, y > minn[1])
neigh[24, idx2] = k[idx2] - nrow + nplane
idx3 = np.logical_and(idx2, x < maxn[0])
neigh[25, idx3] = k[idx3] + 1 - nrow + nplane
# Restrict sources to volume of interest
if volume_label is not None:
try:
import nibabel as nib
except ImportError:
raise ImportError("nibabel is required to read segmentation file.")
logger.info('Selecting voxels from %s' % volume_label)
# Read the segmentation data using nibabel
mgz = nib.load(mri)
mgz_data = mgz.get_data()
# Get the numeric index for this volume label
lut = _get_lut()
vol_id = _get_lut_id(lut, volume_label, True)
# Get indices for this volume label in voxel space
vox_bool = mgz_data == vol_id
# Get the 3 dimensional indices in voxel space
vox_xyz = np.array(np.where(vox_bool)).T
# Transform to RAS coordinates
# (use tkr normalization or volume won't align with surface sources)
trans = _get_mgz_header(mri)['vox2ras_tkr']
# Convert transform from mm to m
trans[:3] /= 1000.
rr_voi = apply_trans(trans, vox_xyz) # positions of VOI in RAS space
# Filter out points too far from volume region voxels
dists = _compute_nearest(rr_voi, sp['rr'], return_dists=True)[1]
# Maximum distance from center of mass of a voxel to any of its corners
maxdist = linalg.norm(trans[:3, :3].sum(0) / 2.)
bads = np.where(dists > maxdist)[0]
# Update source info
sp['inuse'][bads] = False
sp['vertno'] = np.where(sp['inuse'] > 0)[0]
sp['nuse'] = len(sp['vertno'])
sp['seg_name'] = volume_label
sp['mri_file'] = mri
# Update log
logger.info('%d sources remaining after excluding sources too far '
'from VOI voxels', sp['nuse'])
# Omit unused vertices from the neighborhoods
logger.info('Adjusting the neighborhood info...')
# remove non source-space points
log_inuse = sp['inuse'] > 0
neigh[:, np.logical_not(log_inuse)] = -1
# remove these points from neigh
vertno = np.where(log_inuse)[0]
sp['vertno'] = vertno
old_shape = neigh.shape
neigh = neigh.ravel()
checks = np.where(neigh >= 0)[0]
removes = np.logical_not(np.in1d(checks, vertno))
neigh[checks[removes]] = -1
neigh.shape = old_shape
neigh = neigh.T
# Thought we would need this, but C code keeps -1 vertices, so we will:
# neigh = [n[n >= 0] for n in enumerate(neigh[vertno])]
sp['neighbor_vert'] = neigh
# Set up the volume data (needed for creating the interpolation matrix)
r0 = minn * grid
voxel_size = grid * np.ones(3)
ras = np.eye(3)
sp['src_mri_t'] = _make_voxel_ras_trans(r0, ras, voxel_size)
sp['vol_dims'] = maxn - minn + 1
return sp
def _vol_vertex(width, height, jj, kk, pp):
return jj + width * kk + pp * (width * height)
def _get_mri_header(fname):
"""Get MRI header using nibabel."""
import nibabel as nib
img = nib.load(fname)
try:
return img.header
except AttributeError: # old nibabel
return img.get_header()
def _get_mgz_header(fname):
"""Adapted from nibabel to quickly extract header info."""
if not fname.endswith('.mgz'):
raise IOError('Filename must end with .mgz')
header_dtd = [('version', '>i4'), ('dims', '>i4', (4,)),
('type', '>i4'), ('dof', '>i4'), ('goodRASFlag', '>i2'),
('delta', '>f4', (3,)), ('Mdc', '>f4', (3, 3)),
('Pxyz_c', '>f4', (3,))]
header_dtype = np.dtype(header_dtd)
with GzipFile(fname, 'rb') as fid:
hdr_str = fid.read(header_dtype.itemsize)
header = np.ndarray(shape=(), dtype=header_dtype,
buffer=hdr_str)
# dims
dims = header['dims'].astype(int)
dims = dims[:3] if len(dims) == 4 else dims
# vox2ras_tkr
delta = header['delta']
ds = np.array(delta, float)
ns = np.array(dims * ds) / 2.0
v2rtkr = np.array([[-ds[0], 0, 0, ns[0]],
[0, 0, ds[2], -ns[2]],
[0, -ds[1], 0, ns[1]],
[0, 0, 0, 1]], dtype=np.float32)
# ras2vox
d = np.diag(delta)
pcrs_c = dims / 2.0
Mdc = header['Mdc'].T
pxyz_0 = header['Pxyz_c'] - np.dot(Mdc, np.dot(d, pcrs_c))
M = np.eye(4, 4)
M[0:3, 0:3] = np.dot(Mdc, d)
M[0:3, 3] = pxyz_0.T
M = linalg.inv(M)
header = dict(dims=dims, vox2ras_tkr=v2rtkr, ras2vox=M)
return header
def _add_interpolator(s, mri_name, add_interpolator):
"""Compute a sparse matrix to interpolate the data into an MRI volume."""
# extract transformation information from mri
logger.info('Reading %s...' % mri_name)
header = _get_mgz_header(mri_name)
mri_width, mri_height, mri_depth = header['dims']
s.update(dict(mri_width=mri_width, mri_height=mri_height,
mri_depth=mri_depth))
trans = header['vox2ras_tkr'].copy()
trans[:3, :] /= 1000.0
s['vox_mri_t'] = Transform('mri_voxel', 'mri', trans) # ras_tkr
trans = linalg.inv(np.dot(header['vox2ras_tkr'], header['ras2vox']))
trans[:3, 3] /= 1000.0
s['mri_ras_t'] = Transform('mri', 'ras', trans) # ras
s['mri_volume_name'] = mri_name
nvox = mri_width * mri_height * mri_depth
if not add_interpolator:
s['interpolator'] = sparse.csr_matrix((nvox, s['np']))
return
_print_coord_trans(s['src_mri_t'], 'Source space : ')
_print_coord_trans(s['vox_mri_t'], 'MRI volume : ')
_print_coord_trans(s['mri_ras_t'], 'MRI volume : ')
#
# Convert MRI voxels from destination (MRI volume) to source (volume
# source space subset) coordinates
#
combo_trans = combine_transforms(s['vox_mri_t'],
invert_transform(s['src_mri_t']),
'mri_voxel', 'mri_voxel')
combo_trans['trans'] = combo_trans['trans'].astype(np.float32)
logger.info('Setting up interpolation...')
# Loop over slices to save (lots of) memory
# Note that it is the slowest incrementing index
# This is equivalent to using mgrid and reshaping, but faster
data = []
indices = []
indptr = np.zeros(nvox + 1, np.int32)
for p in range(mri_depth):
js = np.arange(mri_width, dtype=np.float32)
js = np.tile(js[np.newaxis, :],
(mri_height, 1)).ravel()
ks = np.arange(mri_height, dtype=np.float32)
ks = np.tile(ks[:, np.newaxis],
(1, mri_width)).ravel()
ps = np.empty((mri_height, mri_width), np.float32).ravel()
ps.fill(p)
r0 = np.c_[js, ks, ps]
del js, ks, ps
# Transform our vertices from their MRI space into our source space's
# frame (this is labeled as FIFFV_MNE_COORD_MRI_VOXEL, but it's
# really a subset of the entire volume!)
r0 = apply_trans(combo_trans['trans'], r0)
rn = np.floor(r0).astype(int)
maxs = (s['vol_dims'] - 1)[np.newaxis, :]
good = np.where(np.logical_and(np.all(rn >= 0, axis=1),
np.all(rn < maxs, axis=1)))[0]
rn = rn[good]
r0 = r0[good]
# now we take each MRI voxel *in this space*, and figure out how
# to make its value the weighted sum of voxels in the volume source
# space. This is a 3D weighting scheme based (presumably) on the
# fact that we know we're interpolating from one volumetric grid
# into another.
jj = rn[:, 0]
kk = rn[:, 1]
pp = rn[:, 2]
vss = np.empty((len(jj), 8), np.int32)
width = s['vol_dims'][0]
height = s['vol_dims'][1]
jjp1 = jj + 1
kkp1 = kk + 1
ppp1 = pp + 1
vss[:, 0] = _vol_vertex(width, height, jj, kk, pp)
vss[:, 1] = _vol_vertex(width, height, jjp1, kk, pp)
vss[:, 2] = _vol_vertex(width, height, jjp1, kkp1, pp)
vss[:, 3] = _vol_vertex(width, height, jj, kkp1, pp)
vss[:, 4] = _vol_vertex(width, height, jj, kk, ppp1)
vss[:, 5] = _vol_vertex(width, height, jjp1, kk, ppp1)
vss[:, 6] = _vol_vertex(width, height, jjp1, kkp1, ppp1)
vss[:, 7] = _vol_vertex(width, height, jj, kkp1, ppp1)
del jj, kk, pp, jjp1, kkp1, ppp1
uses = np.any(s['inuse'][vss], axis=1)
if uses.size == 0:
continue
vss = vss[uses].ravel() # vertex (col) numbers in csr matrix
indices.append(vss)
indptr[good[uses] + p * mri_height * mri_width + 1] = 8
del vss
# figure out weights for each vertex
r0 = r0[uses]
rn = rn[uses]
del uses, good
xf = r0[:, 0] - rn[:, 0].astype(np.float32)
yf = r0[:, 1] - rn[:, 1].astype(np.float32)
zf = r0[:, 2] - rn[:, 2].astype(np.float32)
omxf = 1.0 - xf
omyf = 1.0 - yf
omzf = 1.0 - zf
# each entry in the concatenation corresponds to a row of vss
data.append(np.array([omxf * omyf * omzf,
xf * omyf * omzf,
xf * yf * omzf,
omxf * yf * omzf,
omxf * omyf * zf,
xf * omyf * zf,
xf * yf * zf,
omxf * yf * zf], order='F').T.ravel())
del xf, yf, zf, omxf, omyf, omzf
# Compose the sparse matrix
indptr = np.cumsum(indptr, out=indptr)
indices = np.concatenate(indices)
data = np.concatenate(data)
s['interpolator'] = sparse.csr_matrix((data, indices, indptr),
shape=(nvox, s['np']))
logger.info(' %d/%d nonzero values [done]' % (len(data), nvox))
@verbose
def _filter_source_spaces(surf, limit, mri_head_t, src, n_jobs=1,
verbose=None):
"""Remove all source space points closer than a given limit (in mm)."""
if src[0]['coord_frame'] == FIFF.FIFFV_COORD_HEAD and mri_head_t is None:
raise RuntimeError('Source spaces are in head coordinates and no '
'coordinate transform was provided!')
# How close are the source points to the surface?
out_str = 'Source spaces are in '
if src[0]['coord_frame'] == FIFF.FIFFV_COORD_HEAD:
inv_trans = invert_transform(mri_head_t)
out_str += 'head coordinates.'
elif src[0]['coord_frame'] == FIFF.FIFFV_COORD_MRI:
out_str += 'MRI coordinates.'
else:
out_str += 'unknown (%d) coordinates.' % src[0]['coord_frame']
logger.info(out_str)
out_str = 'Checking that the sources are inside the bounding surface'
if limit > 0.0:
out_str += ' and at least %6.1f mm away' % (limit)
logger.info(out_str + ' (will take a few...)')
for s in src:
vertno = np.where(s['inuse'])[0] # can't trust s['vertno'] this deep
# Convert all points here first to save time
r1s = s['rr'][vertno]
if s['coord_frame'] == FIFF.FIFFV_COORD_HEAD:
r1s = apply_trans(inv_trans['trans'], r1s)
# Check that the source is inside surface (often the inner skull)
outside = _points_outside_surface(r1s, surf, n_jobs)
omit_outside = np.sum(outside)
# vectorized nearest using BallTree (or cdist)
omit = 0
if limit > 0.0:
dists = _compute_nearest(surf['rr'], r1s, return_dists=True)[1]
close = np.logical_and(dists < limit / 1000.0,
np.logical_not(outside))
omit = np.sum(close)
outside = np.logical_or(outside, close)
s['inuse'][vertno[outside]] = False
s['nuse'] -= (omit + omit_outside)
s['vertno'] = np.where(s['inuse'])[0]
if omit_outside > 0:
extras = [omit_outside]
extras += ['s', 'they are'] if omit_outside > 1 else ['', 'it is']
logger.info('%d source space point%s omitted because %s '
'outside the inner skull surface.' % tuple(extras))
if omit > 0:
extras = [omit]
extras += ['s'] if omit_outside > 1 else ['']
extras += [limit]
logger.info('%d source space point%s omitted because of the '
'%6.1f-mm distance limit.' % tuple(extras))
# Adjust the patch inds as well if necessary
if omit + omit_outside > 0:
_adjust_patch_info(s)
logger.info('Thank you for waiting.')
@verbose
def _adjust_patch_info(s, verbose=None):
"""Adjust patch information in place after vertex omission."""
if s.get('patch_inds') is not None:
if s['nearest'] is None:
# This shouldn't happen, but if it does, we can probably come
# up with a more clever solution
raise RuntimeError('Cannot adjust patch information properly, '
'please contact the mne-python developers')
_add_patch_info(s)
@verbose
def _points_outside_surface(rr, surf, n_jobs=1, verbose=None):
"""Check whether points are outside a surface.
Parameters
----------
rr : ndarray
Nx3 array of points to check.
surf : dict
Surface with entries "rr" and "tris".
Returns
-------
outside : ndarray
1D logical array of size N for which points are outside the surface.
"""
rr = np.atleast_2d(rr)
assert rr.shape[1] == 3
assert n_jobs > 0
parallel, p_fun, _ = parallel_func(_get_solids, n_jobs)
tot_angles = parallel(p_fun(surf['rr'][tris], rr)
for tris in np.array_split(surf['tris'], n_jobs))
return np.abs(np.sum(tot_angles, axis=0) / (2 * np.pi) - 1.0) > 1e-5
@verbose
def _ensure_src(src, kind=None, verbose=None):
"""Ensure we have a source space."""
if isinstance(src, string_types):
if not op.isfile(src):
raise IOError('Source space file "%s" not found' % src)
logger.info('Reading %s...' % src)
src = read_source_spaces(src, verbose=False)
if not isinstance(src, SourceSpaces):
raise ValueError('src must be a string or instance of SourceSpaces')
if kind is not None:
if kind == 'surf':
surf = [s for s in src if s['type'] == 'surf']
if len(surf) != 2 or len(src) != 2:
raise ValueError('Source space must contain exactly two '
'surfaces.')
src = surf
return src
def _ensure_src_subject(src, subject):
src_subject = src[0].get('subject_his_id', None)
if subject is None:
subject = src_subject
if subject is None:
raise ValueError('source space is too old, subject must be '
'provided')
elif src_subject is not None and subject != src_subject:
raise ValueError('Mismatch between provided subject "%s" and subject '
'name "%s" in the source space'
% (subject, src_subject))
return subject
@verbose
def add_source_space_distances(src, dist_limit=np.inf, n_jobs=1, verbose=None):
"""Compute inter-source distances along the cortical surface.
This function will also try to add patch info for the source space.
It will only occur if the ``dist_limit`` is sufficiently high that all
points on the surface are within ``dist_limit`` of a point in the
source space.
Parameters
----------
src : instance of SourceSpaces
The source spaces to compute distances for.
dist_limit : float
The upper limit of distances to include (in meters).
Note: if limit < np.inf, scipy > 0.13 (bleeding edge as of
10/2013) must be installed.
n_jobs : int
Number of jobs to run in parallel. Will only use (up to) as many
cores as there are source spaces.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
src : instance of SourceSpaces
The original source spaces, with distance information added.
The distances are stored in src[n]['dist'].
Note: this function operates in-place.
Notes
-----
Requires scipy >= 0.11 (> 0.13 for `dist_limit < np.inf`).
This function can be memory- and CPU-intensive. On a high-end machine
(2012) running 6 jobs in parallel, an ico-5 (10242 per hemi) source space
takes about 10 minutes to compute all distances (`dist_limit = np.inf`).
With `dist_limit = 0.007`, computing distances takes about 1 minute.
We recommend computing distances once per source space and then saving
the source space to disk, as the computed distances will automatically be
stored along with the source space data for future use.
"""
from scipy.sparse.csgraph import dijkstra
n_jobs = check_n_jobs(n_jobs)
src = _ensure_src(src)
if not np.isscalar(dist_limit):
raise ValueError('limit must be a scalar, got %s' % repr(dist_limit))
if not check_version('scipy', '0.11'):
raise RuntimeError('scipy >= 0.11 must be installed (or > 0.13 '
'if dist_limit < np.inf')
if not all(s['type'] == 'surf' for s in src):
raise RuntimeError('Currently all source spaces must be of surface '
'type')
if dist_limit < np.inf:
# can't do introspection on dijkstra function because it's Cython,
# so we'll just try quickly here
try:
dijkstra(sparse.csr_matrix(np.zeros((2, 2))), limit=1.0)
except TypeError:
raise RuntimeError('Cannot use "limit < np.inf" unless scipy '
'> 0.13 is installed')
parallel, p_fun, _ = parallel_func(_do_src_distances, n_jobs)
min_dists = list()
min_idxs = list()
logger.info('Calculating source space distances (limit=%s mm)...'
% (1000 * dist_limit))
for s in src:
connectivity = mesh_dist(s['tris'], s['rr'])
d = parallel(p_fun(connectivity, s['vertno'], r, dist_limit)
for r in np.array_split(np.arange(len(s['vertno'])),
n_jobs))
# deal with indexing so we can add patch info
min_idx = np.array([dd[1] for dd in d])
min_dist = np.array([dd[2] for dd in d])
midx = np.argmin(min_dist, axis=0)
range_idx = np.arange(len(s['rr']))
min_dist = min_dist[midx, range_idx]
min_idx = min_idx[midx, range_idx]
min_dists.append(min_dist)
min_idxs.append(min_idx)
# now actually deal with distances, convert to sparse representation
d = np.concatenate([dd[0] for dd in d]).ravel() # already float32
idx = d > 0
d = d[idx]
i, j = np.meshgrid(s['vertno'], s['vertno'])
i = i.ravel()[idx]
j = j.ravel()[idx]
d = sparse.csr_matrix((d, (i, j)),
shape=(s['np'], s['np']), dtype=np.float32)
s['dist'] = d
s['dist_limit'] = np.array([dist_limit], np.float32)
# Let's see if our distance was sufficient to allow for patch info
if not any(np.any(np.isinf(md)) for md in min_dists):
# Patch info can be added!
for s, min_dist, min_idx in zip(src, min_dists, min_idxs):
s['nearest'] = min_idx
s['nearest_dist'] = min_dist
_add_patch_info(s)
else:
logger.info('Not adding patch information, dist_limit too small')
return src
def _do_src_distances(con, vertno, run_inds, limit):
"""Compute source space distances in chunks."""
from scipy.sparse.csgraph import dijkstra
if limit < np.inf:
func = partial(dijkstra, limit=limit)
else:
func = dijkstra
chunk_size = 20 # save memory by chunking (only a little slower)
lims = np.r_[np.arange(0, len(run_inds), chunk_size), len(run_inds)]
n_chunks = len(lims) - 1
# eventually we want this in float32, so save memory by only storing 32-bit
d = np.empty((len(run_inds), len(vertno)), np.float32)
min_dist = np.empty((n_chunks, con.shape[0]))
min_idx = np.empty((n_chunks, con.shape[0]), np.int32)
range_idx = np.arange(con.shape[0])
for li, (l1, l2) in enumerate(zip(lims[:-1], lims[1:])):
idx = vertno[run_inds[l1:l2]]
out = func(con, indices=idx)
midx = np.argmin(out, axis=0)
min_idx[li] = idx[midx]
min_dist[li] = out[midx, range_idx]
d[l1:l2] = out[:, vertno]
midx = np.argmin(min_dist, axis=0)
min_dist = min_dist[midx, range_idx]
min_idx = min_idx[midx, range_idx]
d[d == np.inf] = 0 # scipy will give us np.inf for uncalc. distances
return d, min_idx, min_dist
def get_volume_labels_from_aseg(mgz_fname, return_colors=False):
"""Return a list of names and colors of segmented volumes.
Parameters
----------
mgz_fname : str
Filename to read. Typically aseg.mgz or some variant in the freesurfer
pipeline.
return_colors : bool
If True returns also the labels colors
Returns
-------
label_names : list of str
The names of segmented volumes included in this mgz file.
label_colors : list of str
The RGB colors of the labels included in this mgz file.
Notes
-----
.. versionadded:: 0.9.0
"""
import nibabel as nib
# Read the mgz file using nibabel
mgz_data = nib.load(mgz_fname).get_data()
# Get the unique label names
lut = _get_lut()
label_names = [lut[lut['id'] == ii]['name'][0]
for ii in np.unique(mgz_data)]
label_colors = [[lut[lut['id'] == ii]['R'][0],
lut[lut['id'] == ii]['G'][0],
lut[lut['id'] == ii]['B'][0],
lut[lut['id'] == ii]['A'][0]]
for ii in np.unique(mgz_data)]
order = np.argsort(label_names)
label_names = [label_names[k] for k in order]
label_colors = [label_colors[k] for k in order]
if return_colors:
return label_names, label_colors
else:
return label_names
def get_volume_labels_from_src(src, subject, subjects_dir):
"""Return a list of Label of segmented volumes included in the src space.
Parameters
----------
src : instance of SourceSpaces
The source space containing the volume regions
subject: str
Subject name
subjects_dir: str
Freesurfer folder of the subjects
Returns
-------
labels_aseg : list of Label
List of Label of segmented volumes included in src space.
"""
import os.path as op
import numpy as np
from . import Label
from . import get_volume_labels_from_aseg
# Read the aseg file
aseg_fname = op.join(subjects_dir, subject, 'mri', 'aseg.mgz')
if not op.isfile(aseg_fname):
raise IOError('aseg file "%s" not found' % aseg_fname)
all_labels_aseg = get_volume_labels_from_aseg(aseg_fname,
return_colors=True)
# Create a list of Label
if len(src) < 2:
raise ValueError('No vol src space in src')
if any(np.any(s['type'] != 'vol') for s in src[2:]):
raise ValueError('source spaces have to be of vol type')
labels_aseg = list()
for nr in range(2, len(src)):
vertices = src[nr]['vertno']
pos = src[nr]['rr'][src[nr]['vertno'], :]
roi_str = src[nr]['seg_name']
try:
ind = all_labels_aseg[0].index(roi_str)
color = np.array(all_labels_aseg[1][ind]) / 255
except ValueError:
pass
if 'left' in roi_str.lower():
hemi = 'lh'
roi_str = roi_str.replace('Left-', '') + '-lh'
elif 'right' in roi_str.lower():
hemi = 'rh'
roi_str = roi_str.replace('Right-', '') + '-rh'
else:
hemi = 'both'
label = Label(vertices=vertices, pos=pos, hemi=hemi,
name=roi_str, color=color,
subject=subject)
labels_aseg.append(label)
return labels_aseg
def _get_hemi(s):
"""Get a hemisphere from a given source space."""
if s['type'] != 'surf':
raise RuntimeError('Only surface source spaces supported')
if s['id'] == FIFF.FIFFV_MNE_SURF_LEFT_HEMI:
return 'lh', 0, s['id']
elif s['id'] == FIFF.FIFFV_MNE_SURF_RIGHT_HEMI:
return 'rh', 1, s['id']
else:
raise ValueError('unknown surface ID %s' % s['id'])
def _get_vertex_map_nn(fro_src, subject_from, subject_to, hemi, subjects_dir,
to_neighbor_tri=None):
"""Get a nearest-neigbor vertex match for a given hemi src.
The to_neighbor_tri can optionally be passed in to avoid recomputation
if it's already available.
"""
# adapted from mne_make_source_space.c, knowing accurate=False (i.e.
# nearest-neighbor mode should be used)
logger.info('Mapping %s %s -> %s (nearest neighbor)...'
% (hemi, subject_from, subject_to))
regs = [op.join(subjects_dir, s, 'surf', '%s.sphere.reg' % hemi)
for s in (subject_from, subject_to)]
reg_fro, reg_to = [read_surface(r, return_dict=True)[-1] for r in regs]
if to_neighbor_tri is not None:
reg_to['neighbor_tri'] = to_neighbor_tri
if 'neighbor_tri' not in reg_to:
reg_to['neighbor_tri'] = _triangle_neighbors(reg_to['tris'],
reg_to['np'])
morph_inuse = np.zeros(len(reg_to['rr']), bool)
best = np.zeros(fro_src['np'], int)
ones = _compute_nearest(reg_to['rr'], reg_fro['rr'][fro_src['vertno']])
for v, one in zip(fro_src['vertno'], ones):
# if it were actually a proper morph map, we would do this, but since
# we know it's nearest neighbor list, we don't need to:
# this_mm = mm[v]
# one = this_mm.indices[this_mm.data.argmax()]
if morph_inuse[one]:
# Try the nearest neighbors
neigh = _get_surf_neighbors(reg_to, one) # on demand calc
was = one
one = neigh[np.where(~morph_inuse[neigh])[0]]
if len(one) == 0:
raise RuntimeError('vertex %d would be used multiple times.'
% one)
one = one[0]
logger.info('Source space vertex moved from %d to %d because of '
'double occupation.' % (was, one))
best[v] = one
morph_inuse[one] = True
return best
@verbose
def morph_source_spaces(src_from, subject_to, surf='white', subject_from=None,
subjects_dir=None, verbose=None):
"""Morph an existing source space to a different subject.
.. warning:: This can be used in place of morphing source estimates for
multiple subjects, but there may be consequences in terms
of dipole topology.
Parameters
----------
src_from : instance of SourceSpaces
Surface source spaces to morph.
subject_to : str
The destination subject.
surf : str
The brain surface to use for the new source space.
subject_from : str | None
The "from" subject. For most source spaces this shouldn't need
to be provided, since it is stored in the source space itself.
subjects_dir : str | None
Path to SUBJECTS_DIR if it is not set in the environment.
verbose : bool | str | int | None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
src : instance of SourceSpaces
The morphed source spaces.
Notes
-----
.. versionadded:: 0.10.0
"""
# adapted from mne_make_source_space.c
src_from = _ensure_src(src_from)
subject_from = _ensure_src_subject(src_from, subject_from)
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
src_out = list()
for fro in src_from:
hemi, idx, id_ = _get_hemi(fro)
to = op.join(subjects_dir, subject_to, 'surf', '%s.%s' % (hemi, surf,))
logger.info('Reading destination surface %s' % (to,))
to = read_surface(to, return_dict=True, verbose=False)[-1]
complete_surface_info(to, copy=False)
# Now we morph the vertices to the destination
# The C code does something like this, but with a nearest-neighbor
# mapping instead of the weighted one::
#
# >>> mm = read_morph_map(subject_from, subject_to, subjects_dir)
#
# Here we use a direct NN calculation, since picking the max from the
# existing morph map (which naively one might expect to be equivalent)
# differs for ~3% of vertices.
best = _get_vertex_map_nn(fro, subject_from, subject_to, hemi,
subjects_dir, to['neighbor_tri'])
for key in ('neighbor_tri', 'tri_area', 'tri_cent', 'tri_nn',
'use_tris'):
del to[key]
to['vertno'] = np.sort(best[fro['vertno']])
to['inuse'] = np.zeros(len(to['rr']), int)
to['inuse'][to['vertno']] = True
to['use_tris'] = best[fro['use_tris']]
to.update(nuse=len(to['vertno']), nuse_tri=len(to['use_tris']),
nearest=None, nearest_dist=None, patch_inds=None, pinfo=None,
dist=None, id=id_, dist_limit=None, type='surf',
coord_frame=FIFF.FIFFV_COORD_MRI, subject_his_id=subject_to,
rr=to['rr'] / 1000.)
src_out.append(to)
logger.info('[done]\n')
info = dict(working_dir=os.getcwd(),
command_line=_get_call_line(in_verbose=True))
return SourceSpaces(src_out, info=info)
@verbose
def _get_morph_src_reordering(vertices, src_from, subject_from, subject_to,
subjects_dir=None, verbose=None):
"""Get the reordering indices for a morphed source space.
Parameters
----------
vertices : list
The vertices for the left and right hemispheres.
src_from : instance of SourceSpaces
The original source space.
subject_from : str
The source subject.
subject_to : str
The destination subject.
subjects_dir : string, or None
Path to SUBJECTS_DIR if it is not set in the environment.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
data_idx : ndarray, shape (n_vertices,)
The array used to reshape the data.
from_vertices : list
The right and left hemisphere vertex numbers for the "from" subject.
"""
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
from_vertices = list()
data_idxs = list()
offset = 0
for ii, hemi in enumerate(('lh', 'rh')):
# Get the mapping from the original source space to the destination
# subject's surface vertex numbers
best = _get_vertex_map_nn(src_from[ii], subject_from, subject_to,
hemi, subjects_dir)
full_mapping = best[src_from[ii]['vertno']]
# Tragically, we might not have all of our vertno left (e.g. because
# some are omitted during fwd calc), so we must do some indexing magic:
# From all vertices, a subset could be chosen by fwd calc:
used_vertices = np.in1d(full_mapping, vertices[ii])
from_vertices.append(src_from[ii]['vertno'][used_vertices])
remaining_mapping = full_mapping[used_vertices]
if not np.array_equal(np.sort(remaining_mapping), vertices[ii]) or \
not np.in1d(vertices[ii], full_mapping).all():
raise RuntimeError('Could not map vertices, perhaps the wrong '
'subject "%s" was provided?' % subject_from)
# And our data have been implicitly remapped by the forced ascending
# vertno order in source spaces
implicit_mapping = np.argsort(remaining_mapping) # happens to data
data_idx = np.argsort(implicit_mapping) # to reverse the mapping
data_idx += offset # hemisphere offset
data_idxs.append(data_idx)
offset += len(implicit_mapping)
data_idx = np.concatenate(data_idxs)
# this one is really just a sanity check for us, should never be violated
# by users
assert np.array_equal(np.sort(data_idx),
np.arange(sum(len(v) for v in vertices)))
return data_idx, from_vertices
def _compare_source_spaces(src0, src1, mode='exact', nearest=True,
dist_tol=1.5e-3):
"""Compare two source spaces.
Note: this function is also used by forward/tests/test_make_forward.py
"""
from nose.tools import assert_equal, assert_true
from numpy.testing import assert_allclose, assert_array_equal
from scipy.spatial.distance import cdist
if mode != 'exact' and 'approx' not in mode: # 'nointerp' can be appended
raise RuntimeError('unknown mode %s' % mode)
for si, (s0, s1) in enumerate(zip(src0, src1)):
# first check the keys
a, b = set(s0.keys()), set(s1.keys())
assert_equal(a, b, str(a ^ b))
for name in ['nuse', 'ntri', 'np', 'type', 'id']:
assert_equal(s0[name], s1[name], name)
for name in ['subject_his_id']:
if name in s0 or name in s1:
assert_equal(s0[name], s1[name], name)
for name in ['interpolator']:
if name in s0 or name in s1:
diffs = (s0['interpolator'] - s1['interpolator']).data
if len(diffs) > 0 and 'nointerp' not in mode:
# 5%
assert_true(np.sqrt(np.mean(diffs ** 2)) < 0.10, name)
for name in ['nn', 'rr', 'nuse_tri', 'coord_frame', 'tris']:
if s0[name] is None:
assert_true(s1[name] is None, name)
else:
if mode == 'exact':
assert_array_equal(s0[name], s1[name], name)
else: # 'approx' in mode
atol = 1e-3 if name == 'nn' else 1e-4
assert_allclose(s0[name], s1[name], rtol=1e-3, atol=atol,
err_msg=name)
for name in ['seg_name']:
if name in s0 or name in s1:
assert_equal(s0[name], s1[name], name)
# these fields will exist if patch info was added
if nearest:
for name in ['nearest', 'nearest_dist', 'patch_inds']:
if s0[name] is None:
assert_true(s1[name] is None, name)
else:
assert_array_equal(s0[name], s1[name])
for name in ['pinfo']:
if s0[name] is None:
assert_true(s1[name] is None, name)
else:
assert_true(len(s0[name]) == len(s1[name]), name)
for p1, p2 in zip(s0[name], s1[name]):
assert_true(all(p1 == p2), name)
if mode == 'exact':
for name in ['inuse', 'vertno', 'use_tris']:
assert_array_equal(s0[name], s1[name], err_msg=name)
for name in ['dist_limit']:
assert_true(s0[name] == s1[name], name)
for name in ['dist']:
if s0[name] is not None:
assert_equal(s1[name].shape, s0[name].shape)
assert_true(len((s0['dist'] - s1['dist']).data) == 0)
else: # 'approx' in mode:
# deal with vertno, inuse, and use_tris carefully
for ii, s in enumerate((s0, s1)):
assert_array_equal(s['vertno'], np.where(s['inuse'])[0],
'src%s[%s]["vertno"] != '
'np.where(src%s[%s]["inuse"])[0]'
% (ii, si, ii, si))
assert_equal(len(s0['vertno']), len(s1['vertno']))
agreement = np.mean(s0['inuse'] == s1['inuse'])
assert_true(agreement >= 0.99, "%s < 0.99" % agreement)
if agreement < 1.0:
# make sure mismatched vertno are within 1.5mm
v0 = np.setdiff1d(s0['vertno'], s1['vertno'])
v1 = np.setdiff1d(s1['vertno'], s0['vertno'])
dists = cdist(s0['rr'][v0], s1['rr'][v1])
assert_allclose(np.min(dists, axis=1), np.zeros(len(v0)),
atol=dist_tol, err_msg='mismatched vertno')
if s0['use_tris'] is not None: # for "spacing"
assert_array_equal(s0['use_tris'].shape, s1['use_tris'].shape)
else:
assert_true(s1['use_tris'] is None)
assert_true(np.mean(s0['use_tris'] == s1['use_tris']) > 0.99)
# The above "if s0[name] is not None" can be removed once the sample
# dataset is updated to have a source space with distance info
for name in ['working_dir', 'command_line']:
if mode == 'exact':
assert_equal(src0.info[name], src1.info[name])
else: # 'approx' in mode:
if name in src0.info:
assert_true(name in src1.info, '"%s" missing' % name)
else:
assert_true(name not in src1.info,
'"%s" should not exist' % name)
| 39.173394
| 79
| 0.577169
|
4a0ac0f47a2d3a5fb4a4fd02b7b5b577afe711c8
| 642
|
py
|
Python
|
src/test_data.py
|
dadosjusbr/coletor-mprn
|
f6d0507ae59d644d46fb34a24fa5666ca65e1a31
|
[
"MIT"
] | 1
|
2021-11-23T11:58:15.000Z
|
2021-11-23T11:58:15.000Z
|
src/test_data.py
|
marcos-inja/coletor-mprn
|
5a78a2ccefd6e88b9d297b1455eff90aa6c33a1f
|
[
"MIT"
] | null | null | null |
src/test_data.py
|
marcos-inja/coletor-mprn
|
5a78a2ccefd6e88b9d297b1455eff90aa6c33a1f
|
[
"MIT"
] | 1
|
2021-12-02T12:22:16.000Z
|
2021-12-02T12:22:16.000Z
|
import unittest
from data import load
file_names = [
"src/output_test/membros-ativos-contracheque-02-2020.ods",
"src/output_test/membros-ativos-verbas-indenizatorias-02-2020.ods",
]
class TestData(unittest.TestCase):
# Validação para ver se a planilha não foi apagada no processo...
def test_validate_existence(self):
STATUS_DATA_UNAVAILABLE = 4
with self.assertRaises(SystemExit) as cm:
dados = load(file_names, '2021', '01' , 'src/output_test')
dados.validate()
self.assertEqual(cm.exception.code, STATUS_DATA_UNAVAILABLE)
if __name__ == "__main__":
unittest.main()
| 27.913043
| 71
| 0.697819
|
4a0ac39d831b60af400ee42ea8228d4744184354
| 409
|
py
|
Python
|
src/ethereum_test/common.py
|
lightclient/test-filler
|
f8b8e38c62a7682c9e08ee83cecf71d2b10fc5fc
|
[
"MIT"
] | null | null | null |
src/ethereum_test/common.py
|
lightclient/test-filler
|
f8b8e38c62a7682c9e08ee83cecf71d2b10fc5fc
|
[
"MIT"
] | 1
|
2021-11-27T06:07:47.000Z
|
2021-11-27T06:07:47.000Z
|
src/ethereum_test/common.py
|
lightclient/test-filler
|
f8b8e38c62a7682c9e08ee83cecf71d2b10fc5fc
|
[
"MIT"
] | null | null | null |
"""
Common values used in Ethereum tests.
"""
TestPrivateKey = (
"0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8"
)
TestAddress = "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b"
AddrAA = "0x00000000000000000000000000000000000000aa"
AddrBB = "0x00000000000000000000000000000000000000bb"
EmptyTrieRoot = "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" # noqa: E501
| 29.214286
| 98
| 0.848411
|
4a0ac4cb7accc71faeccb2cb97202fec89fd0b71
| 2,648
|
py
|
Python
|
src/custodians/image.py
|
patrickbcullen/aws-custodians
|
07e34d0df7d46ccb67d1d1f42ddde5cd3304a4dd
|
[
"MIT"
] | null | null | null |
src/custodians/image.py
|
patrickbcullen/aws-custodians
|
07e34d0df7d46ccb67d1d1f42ddde5cd3304a4dd
|
[
"MIT"
] | null | null | null |
src/custodians/image.py
|
patrickbcullen/aws-custodians
|
07e34d0df7d46ccb67d1d1f42ddde5cd3304a4dd
|
[
"MIT"
] | null | null | null |
from custodians.util import age_in_days, flatten_dict
import logging
import dateutil.parser
class ImageCustodian(object):
def __init__(self, aws):
self.aws = aws
self.logger = logging.getLogger(self.__class__.__name__)
def cleanup(self, dryrun, region, max_days, name_filters=[]):
try:
self.logger.info("Max retention of unsued images %dd" % max_days)
in_use_images = self.in_use_images(region)
for image in self.aws.describe_images(region):
tag_info = flatten_dict(self.aws.tags_to_dict(image.get('Tags', {})))
create_time = dateutil.parser.parse(image['CreationDate'])
days_old = age_in_days(create_time)
image_id = image['ImageId']
name = image['Name']
name_match = self.name_filter_match(name_filters, name)
if not name_match:
self.logger.info("Skipping image because none of name filters [%s] are a substring of '%s' %s %s" %
(','.join(name_filters), name, image_id, tag_info))
if days_old >= max_days:
self.cleanup_unused_image(dryrun, region, in_use_images, image_id, days_old, name, tag_info)
else:
self.logger.info("Keeping unused image because age of %sd < %sd %s '%s' %s" % (days_old, max_days, image_id, name, tag_info))
except Exception as ex:
self.logger.error("%s: %s" % (self.__class__.__name__, ex))
def name_filter_match(self, name_filters, name):
if len(name_filters) == 0:
return True
for name_filter in name_filters:
if name_filter in name:
return True
return False
def cleanup_unused_image(self, dryrun, region, in_use_images, image_id, days_old, name, tag_info):
instance_id = in_use_images.get(image_id)
if instance_id:
self.logger.info("Skipping image in use by %s (%dd) %s '%s' %s" % (instance_id, days_old, image_id, name, tag_info))
return
if not dryrun:
self.logger.info("Deleting (%dd) %s '%s' %s" % (days_old, image_id, name, tag_info))
self.aws.deregister_image(region, image_id)
else:
self.logger.info("Would have deleted (%dd) %s '%s' %s" % (days_old, image_id, name, tag_info))
def in_use_images(self, region):
in_use_images = {}
for instance in self.aws.describe_instances(region):
in_use_images[instance['ImageId']] = instance['InstanceId']
return in_use_images
| 40.121212
| 145
| 0.601964
|
4a0ac4f649657c359cafe7169e5613a3b24027b2
| 7,658
|
py
|
Python
|
contrib/devtools/update-translations.py
|
ilixom/ilixomtest
|
64794641759027f6531ea0c12ad83d3911ecbb12
|
[
"MIT"
] | null | null | null |
contrib/devtools/update-translations.py
|
ilixom/ilixomtest
|
64794641759027f6531ea0c12ad83d3911ecbb12
|
[
"MIT"
] | null | null | null |
contrib/devtools/update-translations.py
|
ilixom/ilixomtest
|
64794641759027f6531ea0c12ad83d3911ecbb12
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'dash_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
#assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one posiltle value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
# fetch_all_translations()
postprocess_translations()
| 37.539216
| 124
| 0.629277
|
4a0ac56924599fc4f6d18c2ef53c87cb5770bffe
| 9,478
|
py
|
Python
|
sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | null | null | null |
sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | 1
|
2019-06-04T18:12:16.000Z
|
2019-06-04T18:12:16.000Z
|
sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | 1
|
2019-06-17T22:18:23.000Z
|
2019-06-17T22:18:23.000Z
|
#The MIT License (MIT)
#Copyright (c) 2014 Microsoft Corporation
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import unittest
import uuid
import pytest
from six.moves import xrange
import azure.cosmos.cosmos_client as cosmos_client
from azure.cosmos.execution_context import base_execution_context as base_execution_context
import azure.cosmos.base as base
import test_config
from azure.cosmos.partition_key import PartitionKey
pytestmark = pytest.mark.cosmosEmulator
#IMPORTANT NOTES:
# Most test cases in this file create collections in your Azure Cosmos account.
# Collections are billing entities. By running these test cases, you may incur monetary costs on your account.
# To Run the test, replace the two member fields (masterKey and host) with values
# associated with your Azure Cosmos account.
@pytest.mark.usefixtures("teardown")
class QueryExecutionContextEndToEndTests(unittest.TestCase):
"""Routing Map Functionalities end to end Tests.
"""
host = test_config._test_config.host
masterKey = test_config._test_config.masterKey
connectionPolicy = test_config._test_config.connectionPolicy
@classmethod
def setUpClass(cls):
if (cls.masterKey == '[YOUR_KEY_HERE]' or
cls.host == '[YOUR_ENDPOINT_HERE]'):
raise Exception(
"You must specify your Azure Cosmos account values for "
"'masterKey' and 'host' at the top of this class to run the "
"tests.")
cls.client = cosmos_client.CosmosClient(QueryExecutionContextEndToEndTests.host,
{'masterKey': QueryExecutionContextEndToEndTests.masterKey},
"Session",
QueryExecutionContextEndToEndTests.connectionPolicy)
cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client)
cls.created_collection = cls.create_collection(cls.created_db)
cls.document_definitions = []
# create a document using the document definition
for i in xrange(20):
d = {'id' : str(i),
'name': 'sample document',
'spam': 'eggs' + str(i),
'key': 'value'}
cls.document_definitions.append(d)
cls.insert_doc(cls.document_definitions)
@classmethod
def tearDownClass(cls):
cls.created_db.delete_container(container=cls.created_collection)
def setUp(self):
# sanity check:
partition_key_ranges = list(self.client.client_connection._ReadPartitionKeyRanges(
self.GetDocumentCollectionLink(self.created_db, self.created_collection)))
self.assertGreaterEqual(len(partition_key_ranges), 1)
# sanity check: read documents after creation
queried_docs = list(self.created_collection.read_all_items())
self.assertEqual(
len(queried_docs),
len(self.document_definitions),
'create should increase the number of documents')
def test_no_query_default_execution_context(self):
options = {}
options['maxItemCount'] = 2
self._test_default_execution_context(options, None, 20)
def test_no_query_default_execution_context_with_small_last_page(self):
options = {}
options['maxItemCount'] = 3
self._test_default_execution_context(options, None, 20)
def test_simple_query_default_execution_context(self):
query = {
'query': 'SELECT * FROM root r WHERE r.id != @id',
'parameters': [
{ 'name': '@id', 'value': '5'}
]
}
options = {}
options['enableCrossPartitionQuery'] = True
options['maxItemCount'] = 2
res = self.created_collection.query_items(
query=query,
enable_cross_partition_query=True,
max_item_count=2
)
self.assertEqual(len(list(res)), 19)
self._test_default_execution_context(options, query, 19)
def test_simple_query_default_execution_context_with_small_last_page(self):
query = {
'query': 'SELECT * FROM root r WHERE r.id != @id',
'parameters': [
{ 'name': '@id', 'value': '5'}
]
}
options = {}
options['enableCrossPartitionQuery'] = True
options['maxItemCount'] = 3
self._test_default_execution_context(options, query, 19)
def _test_default_execution_context(self, options, query, expected_number_of_results):
page_size = options['maxItemCount']
collection_link = self.GetDocumentCollectionLink(self.created_db, self.created_collection)
path = base.GetPathFromLink(collection_link, 'docs')
collection_id = base.GetResourceIdOrFullNameFromLink(collection_link)
def fetch_fn(options):
return self.client.client_connection.QueryFeed(path,
collection_id,
query,
options)
######################################
# test next() behavior
######################################
ex = base_execution_context._DefaultQueryExecutionContext(self.client.client_connection, options, fetch_fn)
it = ex.__iter__()
def invokeNext():
return next(it)
results = {}
# validate that invocations of next() produces the same results as expected
for _ in xrange(expected_number_of_results):
item = invokeNext()
results[item['id']] = item
self.assertEqual(len(results), expected_number_of_results)
# after the result set is exhausted, invoking next must raise a StopIteration exception
self.assertRaises(StopIteration, invokeNext)
######################################
# test fetch_next_block() behavior
######################################
ex = base_execution_context._DefaultQueryExecutionContext(self.client.client_connection, options, fetch_fn)
results = {}
cnt = 0
while True:
fetched_res = ex.fetch_next_block()
fetched_size = len(fetched_res)
for item in fetched_res:
results[item['id']] = item
cnt += fetched_size
if (cnt < expected_number_of_results):
# backend may not necessarily return exactly page_size of results
self.assertEqual(fetched_size, page_size, "page size")
else:
if cnt == expected_number_of_results:
self.assertTrue(fetched_size <= page_size, "last page size")
break
else:
#cnt > expected_number_of_results
self.fail("more results than expected")
# validate the number of collected results
self.assertEqual(len(results), expected_number_of_results)
# no more results will be returned
self.assertEqual(ex.fetch_next_block(), [])
@classmethod
def create_collection(cls, created_db):
created_collection = created_db.create_container(
id='query_execution_context_tests collection ' + str(uuid.uuid4()),
partition_key=PartitionKey(path='/id', kind='Hash')
)
return created_collection
@classmethod
def insert_doc(cls, document_definitions):
# create a document using the document definition
created_docs = []
for d in document_definitions:
created_doc = cls.created_collection.create_item(body=d)
created_docs.append(created_doc)
return created_docs
def GetDatabaseLink(self, database):
return 'dbs/' + database.id
def GetDocumentCollectionLink(self, database, document_collection):
return self.GetDatabaseLink(database) + '/colls/' + document_collection.id
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 39.491667
| 116
| 0.618907
|
4a0ac668ca187843f7dae8ba71f481dbf9140e05
| 16,837
|
py
|
Python
|
data_loading/augmenter.py
|
justusschock/data_loading_stuff
|
08a8d7b86f14a4901a9af4981d9a3c25a63ccb2a
|
[
"MIT"
] | null | null | null |
data_loading/augmenter.py
|
justusschock/data_loading_stuff
|
08a8d7b86f14a4901a9af4981d9a3c25a63ccb2a
|
[
"MIT"
] | null | null | null |
data_loading/augmenter.py
|
justusschock/data_loading_stuff
|
08a8d7b86f14a4901a9af4981d9a3c25a63ccb2a
|
[
"MIT"
] | null | null | null |
import multiprocessing
from multiprocessing import connection as mpconnection
from collections import Callable
import abc
import os
import numpy as np
import random
from data_loading.sampler import AbstractSampler, BatchSampler
from data_loading.data_loader import DataLoader
from delira import get_current_debug_mode
class _WorkerProcess(multiprocessing.Process):
"""
A Process running an infinite loop of loading data for given indices
"""
def __init__(self, dataloader: DataLoader,
output_pipe: mpconnection.Connection,
index_pipe: mpconnection.Connection,
abort_event: multiprocessing.Event,
transforms: Callable,
process_id):
"""
Parameters
----------
dataloader : :class:`DataLoader`
the data loader which loads the data corresponding to the given
indices
output_pipe : :class:`multiprocessing.connection.Connection`
the pipe, the loaded data shoud be sent to
index_pipe : :class:`multiprocessing.connection.Connection`
the pipe to accept the indices
abort_event : class:`multiprocessing.Event`
the abortion event; will be set for every Exception;
If set: Worker terminates
transforms : :class:`collections.Callable`
the transforms to transform the data
process_id : int
the process id
"""
super().__init__()
self._data_loader = dataloader
self._output_pipe = output_pipe
self._input_pipe = index_pipe
self._abort_event = abort_event
self._process_id = process_id
self._transforms = transforms
def run(self) -> None:
# set the process id
self._data_loader.process_id = self._process_id
try:
while True:
# check if worker should terminate
if self._abort_event.is_set():
raise RuntimeError("Abort Event has been set externally")
# get indices if available (with timeout to frequently check
# for abortions
if self._input_pipe.poll(timeout=0.2):
idxs = self._input_pipe.recv()
# final indices -> shutdown workers
if idxs is None:
break
# load data
data = self._data_loader(idxs)
#
if self._transforms is not None:
data = self._transforms(**data)
self._output_pipe.send(data)
except Exception as e:
self._abort_event.set()
raise e
class AbstractAugmenter(object):
"""
Basic Augmenter Class providing a general Augmenter API
"""
def __init__(self, data_loader, sampler, transforms=None, seed=1,
drop_last=False):
"""
Parameters
----------
data_loader : :class:`DataLoader`
the dataloader, loading samples for given indices
sampler : :class:`AbstractSampler`
the sampler (may be batch sampler or usual sampler), defining the
actual sampling strategy; Is an iterable yielding indices
transforms : :class:`collections.Callable`
the transforms to apply; defaults to None
seed : int
the basic seed; default: 1
drop_last : bool
whether to drop the last (possibly smaller) batch or not
"""
self._data_loader = data_loader
if not isinstance(sampler, BatchSampler):
if isinstance(sampler, AbstractSampler):
sampler = BatchSampler
else:
raise ValueError("Invalid Sampler given: %s" % str(sampler))
self._sampler = sampler
self._drop_last = drop_last
self._transforms = transforms
self._seed = seed
# seed numpy.random and random as these are the random number
# generators, which might be used for sampling
np.random.seed(seed)
random.seed = seed
@abc.abstractmethod
def __iter__(self):
raise NotImplementedError
class _ParallelAugmenter(AbstractAugmenter):
"""
An Augmenter that loads and augments multiple batches in parallel
"""
def __init__(self, data_loader, sampler, num_processes=None,
transforms=None, seed=1, drop_last=False):
"""
Parameters
----------
data_loader : :class:`DataLoader`
the dataloader, loading samples for given indices
sampler : :class:`AbstractSampler`
the sampler (may be batch sampler or usual sampler), defining the
actual sampling strategy; Is an iterable yielding indices
num_processes : int
the number of processes to use for dataloading + augmentation;
if None: the number of available CPUs will be used as number of
processes
transforms : :class:`collections.Callable`
the transforms to apply; defaults to None
seed : int
the basic seed; default: 1
drop_last : bool
whether to drop the last (possibly smaller) batch or not
"""
super().__init__(data_loader, sampler, transforms, seed, drop_last)
if num_processes is None:
num_processes = os.cpu_count()
self._num_processes = num_processes
self._processes = []
self._index_pipes = []
self._data_pipes = []
self._index_pipe_counter = 0
self._data_pipe_counter = 0
self._abort_event = None
self._data_queued = []
self._processes_running = False
@property
def abort_event(self):
"""
Property to access the abortion Event
Returns
-------
:class:`multiprocessing.Event`
the abortion event
"""
return self._abort_event
@abort_event.setter
def abort_event(self, new_event):
"""
Setter for the abortion Event;
Ensures the old event get's set before it is overwritten
Parameters
----------
new_event : class:`multiprocessing.Event`
the new event
"""
if self._abort_event is not None and not self._abort_event.is_set():
self._abort_event.set()
self._abort_event = new_event
def _start_processes(self):
"""
Starts new processes and pipes for interprocess communication
"""
# reset abortion event
self.abort_event = multiprocessing.Event()
# for each process do:
for i in range(self._num_processes):
# start two oneway pipes (one for passing index to workers
# and one for passing back data to main process)
recv_conn_out, send_conn_out = multiprocessing.Pipe(duplex=False)
recv_conn_in, send_conn_in = multiprocessing.Pipe(duplex=False)
# create the actual process
process = _WorkerProcess(dataloader=self._data_loader,
output_pipe=send_conn_out,
index_pipe=recv_conn_in,
transforms=self._transforms,
abort_event=self._abort_event,
process_id=i)
# make the process daemonic and start it
process.daemon = True
process.start()
# append process and pipes to list
self._processes.append(process)
self._index_pipes.append(send_conn_in),
self._data_pipes.append(recv_conn_out)
self._data_queued.append(0)
self._processes_running = True
def _shutdown_processes(self):
"""
Shuts down the processes and resets all related flags and counters
"""
# shutdown workers by setting abortion event
if not self._abort_event.is_set():
self._abort_event.set()
# for each process:
for _data_conn, _index_conn, _process in zip(self._data_pipes,
self._index_pipes,
self._processes):
# send None to worker
_index_conn.send(None)
# Close Process and wait for its termination
_process.close()
_process.join()
# close connections
_index_conn.close()
_data_conn.close()
# pop corresponing pipes, counters and processes from lists
self._data_pipes.pop()
self._data_queued.pop()
self._index_pipes.pop()
self._processes.pop()
# reset running process flag and counters
self._processes_running = False
self._data_pipe_counter = 0
self._index_pipe_counter = 0
@property
def _next_index_pipe(self):
"""
Property implementing switch to next index pipe
"""
ctr = self._index_pipe_counter
new_ctr = (self._index_pipe_counter + 1) % self._num_processes
self._index_pipe_counter = new_ctr
return ctr
@property
def _next_data_pipe(self):
"""
Property implementing switch to next data pipe
"""
ctr = self._data_pipe_counter
new_ctr = (self._data_pipe_counter + 1) % self._num_processes
self._data_pipe_counter = new_ctr
return ctr
def _enqueue_indices(self, sample_idxs):
"""
Enqueues a set of indices to workers while iterating over workers in
cyclic way
Parameters
----------
sample_idxs : list
the indices to enqueue to the workers
"""
# iterating over all batch indices
for idxs in sample_idxs:
# switch to next counter
index_pipe_ctr = self._next_index_pipe
# increase number of queued batches for current worker
self._data_queued[index_pipe_ctr] += 1
# enqueue indices to worker
self._index_pipes[index_pipe_ctr].send(idxs)
def _receive_data(self):
"""
Receives data from worker
"""
# switching to next worker
_data_pipe = self._next_data_pipe
# switch to next worker while worker does not have any data enqueued
while not self._data_queued[_data_pipe]:
_data_pipe = self._next_data_pipe
# receive data from worker
data = self._data_pipes[_data_pipe].recv()
# decrease number of enqueued batches for current worker
self._data_queued[_data_pipe] -= 1
return data
def __iter__(self):
# start processes
self._start_processes()
_index_pipe = self._next_index_pipe
# create sampler iterator
sampler_iter = iter(self._sampler)
all_sampled = False
try:
# start by enqueuing two items per process as buffer
_indices = []
try:
for i in range(self._num_processes * 2):
idxs = next(sampler_iter)
_indices.append(idxs)
except StopIteration:
all_sampled = True
self._enqueue_indices(_indices)
# iterate while not all data has been sampled and any data is
# enqueued
while True:
# break if abort event has been set
if self.abort_event.is_set():
raise RuntimeError("Abort Event was set in one of the "
"workers")
# enqueue additional indices if sampler was not already
# exhausted
try:
if not all_sampled:
idxs = next(sampler_iter)
self._enqueue_indices(idxs)
except StopIteration:
all_sampled = True
# receive data from workers
if any(self._data_queued):
yield self._receive_data()
else:
break
except Exception as e:
# set abort event to shutdown workers
self._abort_event.set()
raise e
finally:
# shutdown processes
if self._processes_running:
self._shutdown_processes()
class _SequentialAugmenter(AbstractAugmenter):
"""
An Augmenter that loads and augments batches sequentially without any
parallelism
"""
def __init__(self, data_loader, sampler, transforms=None, seed=1,
drop_last=False):
"""
Parameters
----------
data_loader : :class:`DataLoader`
the dataloader, loading samples for given indices
sampler : :class:`AbstractSampler`
the sampler (may be batch sampler or usual sampler), defining the
actual sampling strategy; Is an iterable yielding indices
transforms : :class:`collections.Callable`
the transforms to apply; defaults to None
seed : int
the basic seed; default: 1
drop_last : bool
whether to drop the last (possibly smaller) batch or not
"""
super().__init__(data_loader=data_loader, sampler=sampler,
transforms=transforms, seed=seed, drop_last=drop_last)
def __iter__(self):
# create sampler iterator
sampler_iter = iter(self._sampler)
# for every index load and augment the data
for idxs in sampler_iter:
# load data
data = self._data_loader(idxs)
# transform data if transforms given
if self._transforms is not None:
data = self._transforms(**data)
yield data
class Augmenter(object):
"""
The actual Augmenter wrapping the :class:`_SequentialAugmenter` and the
:class:`_ParallelAugmenter` and switches between them by arguments and
debug mode
"""
def __init__(self, data_loader, sampler, num_processes=None,
transforms=None, seed=1, drop_last=False):
"""
Parameters
----------
data_loader : :class:`DataLoader`
the dataloader, loading samples for given indices
sampler : :class:`AbstractSampler`
the sampler (may be batch sampler or usual sampler), defining the
actual sampling strategy; Is an iterable yielding indices
num_processes : int
the number of processes to use for dataloading + augmentation;
if None: the number of available CPUs will be used as number of
processes
transforms : :class:`collections.Callable`
the transforms to apply; defaults to None
seed : int
the basic seed; default: 1
drop_last : bool
whether to drop the last (possibly smaller) batch or not
"""
self._augmenter = self._resolve_augmenter_cls(num_processes,
data_loader=data_loader,
sampler=sampler,
transforms=transforms,
seed=seed,
drop_last=drop_last)
@staticmethod
def _resolve_augmenter_cls(num_processes, **kwargs):
"""
Resolves the augmenter class by the number of specified processes and
the debug mode and creates an instance of the chosen class
Parameters
----------
num_processes : int
the number of processes to use for dataloading + augmentation;
if None: the number of available CPUs will be used as number of
processes
**kwargs :
additional keyword arguments, used for instantiation of the chosen
class
Returns
-------
:class:`AbstractAugmenter`
an instance of the chosen augmenter class
"""
if get_current_debug_mode() or num_processes == 0:
return _SequentialAugmenter(**kwargs)
return _ParallelAugmenter(num_processes=num_processes, **kwargs)
def __iter__(self):
"""
Makes the Augmenter iterable by generators
Returns
-------
Generator
a generator function yielding the arguments
"""
yield from self._augmenter
| 32.378846
| 79
| 0.575756
|
4a0ac6d46a662d2000376854ef0ba980088b3537
| 176
|
py
|
Python
|
saltshaker/exceptions.py
|
diegotoral/SaltShaker
|
86c7619f3347c1b56ed3e680b8bb558d93b0e385
|
[
"MIT"
] | 2
|
2017-07-02T20:29:26.000Z
|
2017-07-02T21:18:51.000Z
|
saltshaker/exceptions.py
|
diegotoral/SaltShaker
|
86c7619f3347c1b56ed3e680b8bb558d93b0e385
|
[
"MIT"
] | null | null | null |
saltshaker/exceptions.py
|
diegotoral/SaltShaker
|
86c7619f3347c1b56ed3e680b8bb558d93b0e385
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
class ShakerError(Exception):
"""
Base class for all SaltShaker exceptions
"""
pass
class ConfigurationError(ShakerError):
pass
| 13.538462
| 44
| 0.636364
|
4a0ac77e798e8d590ff30d35f9da84f3677e5546
| 1,164
|
py
|
Python
|
bus_board/migrations/0005_ticket.py
|
AG371/bus-board
|
a30561ab19c1af14483a0222e78959f7902a8a96
|
[
"MIT"
] | 11
|
2018-01-31T12:58:40.000Z
|
2022-03-25T19:23:12.000Z
|
bus_board/migrations/0005_ticket.py
|
AG371/bus-board
|
a30561ab19c1af14483a0222e78959f7902a8a96
|
[
"MIT"
] | 7
|
2020-02-12T00:38:19.000Z
|
2021-09-07T23:51:50.000Z
|
bus_board/migrations/0005_ticket.py
|
AG371/bus-board
|
a30561ab19c1af14483a0222e78959f7902a8a96
|
[
"MIT"
] | 7
|
2018-03-07T22:00:00.000Z
|
2021-04-15T16:34:47.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-01-18 09:24
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
import uuid
class Migration(migrations.Migration):
dependencies = [
('bus_board', '0004_schedule'),
]
operations = [
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('phone_number', phonenumber_field.modelfields.PhoneNumberField(max_length=128)),
('ticket_number', models.UUIDField(default=uuid.uuid4, editable=False)),
('transaction_code', models.CharField(max_length=255)),
('schedule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bus_board.Schedule')),
],
),
]
| 36.375
| 118
| 0.633162
|
4a0ac78a42ffcc76d39a3567e06ab771330e38f2
| 1,380
|
py
|
Python
|
gameservice/universeservice.py
|
willwybrow/starlines-web
|
c6c0ef4c02362bd666b750980a1e005394fd423d
|
[
"MIT"
] | null | null | null |
gameservice/universeservice.py
|
willwybrow/starlines-web
|
c6c0ef4c02362bd666b750980a1e005394fd423d
|
[
"MIT"
] | null | null | null |
gameservice/universeservice.py
|
willwybrow/starlines-web
|
c6c0ef4c02362bd666b750980a1e005394fd423d
|
[
"MIT"
] | null | null | null |
import random
from typing import Dict
from game.geometry import Point
from game.star import ClusterID, Star, StarID
from game.universe import MASS_PER_NEW_CLUSTER, CLUSTER_SIZE
from gameservice.service import Service
class UniverseService(Service):
def __init__(self, repository):
super().__init__(repository)
self.repository.ensure_minimum_universe_size(UniverseService.create_new_cluster_of_stars_at)
def new_universe(self):
for x in range(-1 * CLUSTER_SIZE, CLUSTER_SIZE + 1):
for y in range(-1 * CLUSTER_SIZE, CLUSTER_SIZE + 1):
self.create_new_cluster_of_stars_at(ClusterID(x, y))
return
@staticmethod
def create_new_cluster_of_stars_at(cluster_coordinate: ClusterID) -> Dict[Point, Star]:
total_mass_to_distribute = MASS_PER_NEW_CLUSTER
new_star_masses = []
while total_mass_to_distribute >= 0:
star_mass = random.randint(1, MASS_PER_NEW_CLUSTER // 2)
new_star_masses.append(star_mass)
total_mass_to_distribute -= star_mass
random_points = random.sample([Point(x, y) for x in range(1, CLUSTER_SIZE) for y in range(1, CLUSTER_SIZE)], len(new_star_masses))
stars = {random_points[i]: Star(StarID.generate(), new_star_masses[i], int(new_star_masses[i] * 1.75), 0) for i in range(len(random_points))}
return stars
| 44.516129
| 149
| 0.713043
|
4a0ac78cd503bdac920a2d31e3380ca5a98e53f4
| 4,442
|
py
|
Python
|
app.py
|
mohamed17717/Toss
|
4b756cb9d06f2ba27551e77c02bf2bf92aaba253
|
[
"MIT"
] | null | null | null |
app.py
|
mohamed17717/Toss
|
4b756cb9d06f2ba27551e77c02bf2bf92aaba253
|
[
"MIT"
] | null | null | null |
app.py
|
mohamed17717/Toss
|
4b756cb9d06f2ba27551e77c02bf2bf92aaba253
|
[
"MIT"
] | null | null | null |
import os
from view import *
from flask import Flask, request, redirect, url_for
from flask_session import Session
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
app = Flask(__name__, template_folder='templates', static_folder='static')
# Check for environment variable
if not os.getenv("DATABASE_URL"):
raise RuntimeError("DATABASE_URL is not set")
# Configure session to use filesystem
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Set up database
engine = create_engine(os.getenv("DATABASE_URL"))
db = scoped_session(sessionmaker(bind=engine))
loginSystem = LoginSystem()
valid = LoginSystem()
@app.route("/", methods=['GET'])
@app.route('/<int:pageNumber>/', methods=['GET'])
def home(pageNumber=1):
clearFilterGames()
return index(pageNumber)
@app.route('/login/', methods=['POST'])
def login():
if logined(): return takeMeHome()
return loginSystem.login(request.form)
@app.route('/register/', methods=['POST'])
def register():
if logined(): return takeMeHome()
return loginSystem.register(request.form)
@app.route('/logout/', methods=['GET'])
def logout():
if not logined(): return takeMeHome()
return loginSystem.logout()
@app.route('/profile/<int:userId>/', methods=['GET'])
def profile(userId):
if not logined(): return takeMeHome()
return getProfile(userId)
@app.route('/game/<int:rolletId>/', methods=['GET'])
def rollet(rolletId):
if not logined(): return takeMeHome()
return getRollet(rolletId)
@app.route('/createGame/', methods=['POST'])
def createGame():
if not logined(): return takeMeHome()
try:
howLong = int(request.form.get('howLong'))
ticket = int(request.form.get('ticket'))
percentOfWinners = int(request.form.get('percentOfWinners'))
except:
message('Can\'t convert int ', 'danger fail')
return takeMeHome()
## check inputs
if valid.numInRange(howLong, 1, 24) and ticket > 0 and valid.integer(percentOfWinners):
rolletId = create_Rollet(howLong, ticket, percentOfWinners)
if rolletId:
message('Congrtulation You Now a Founder For This Game', 'success')
return redirect( url_for('rollet', rolletId=rolletId) )
message('Sorry Ther Is Something Wrong We Can\'t Create A Game Right Now', 'danger')
return takeMeHome()
@app.route('/dareGame/<int:rolletId>/', methods=['POST'])
def dareGame(rolletId):
if not logined(): return takeMeHome()
if dare_Rollet(rolletId):
message('Shame.. You Dare Well', 'success')
return takeMeHome()
message('Sorry You Can\'t Dare The Game', 'danger')
return takeMeHome()
@app.route('/removeGame/<int:rolletId>/', methods=['POST'])
def removeGame(rolletId):
if not logined(): return takeMeHome()
if delete_Rollet(rolletId):
message('Game Deleted', 'success')
return takeMeHome()
message('Game Can\'t Deleted', 'danger')
return redirect(url_for('rollet', rolletId = rolletId))
@app.route('/joinGame/<int:rolletId>/', methods=['POST'])
def joinGame(rolletId):
if not logined(): return takeMeHome()
try:
times = int(request.form.get('times'))
except:
message('failed to join the game', 'danger')
return takeMeHome()
for i in range(times):
status = participate_Rollet(rolletId)
if not status: break
return redirect(url_for('rollet', rolletId = rolletId))
@app.route('/endGame/<int:rolletId>/', methods=['POST'])
def endGame(rolletId):
if not logined(): return takeMeHome()
end_Rollet(rolletId)
return redirect( url_for('rollet', rolletId=rolletId))
@app.route('/filterGames/', methods=['POST'])
@app.route('/filterGames/<int:pageNumber>/', methods=['GET'])
def filterGames(pageNumber=1):
if not logined(): return takeMeHome()
if request.method == 'POST':
upNow = request.form.get('upNow') or 1
percentOfWinnersStart = request.form.get('percentOfWinnersStart') or 0
percentOfWinnersEnd = request.form.get('percentOfWinnersEnd') or 100
ticketStart = request.form.get('ticketStart') or 0
ticketEnd = request.form.get('ticketEnd') or 10**6
filterRollets(
upNow=upNow,
percentOfWinnersBetween=(percentOfWinnersStart, percentOfWinnersEnd),
ticketBetween= (ticketStart, ticketEnd),
)
return index(pageNumber)
@app.route('/clearFilter/', methods=['GET','POST'])
def clearFilterGames():
if session.get('filteredRollets'): session.pop('filteredRollets')
return takeMeHome()
@app.errorhandler(404)
def page_not_found(e):
return renderTemplate('404.html'), 404
| 28.113924
| 88
| 0.724448
|
4a0ac7917cc05600075b21724a0723764a933e98
| 817
|
py
|
Python
|
concourse-driver-python/tests/__init__.py
|
surendranaidu/concourse
|
ba9d4bdad708f03b015a59cb49d8ba96cf2b1c6a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
concourse-driver-python/tests/__init__.py
|
surendranaidu/concourse
|
ba9d4bdad708f03b015a59cb49d8ba96cf2b1c6a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
concourse-driver-python/tests/__init__.py
|
surendranaidu/concourse
|
ba9d4bdad708f03b015a59cb49d8ba96cf2b1c6a
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null |
# Copyright (c) 2013-2019 Cinchapi Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Jeff Nelson'
from nose.plugins.skip import SkipTest
def ignore(func):
"""Ignore a nose test using the @ignore decorator
"""
def x(caller):
raise SkipTest
x.__name__ = func.__name__
return x
| 31.423077
| 74
| 0.734394
|
4a0ac7d67f99669c44def1c9d0190ea85fe9a7e7
| 3,926
|
py
|
Python
|
xarray/backends/cfgrib_.py
|
mmann1123/xarray
|
a0c71c1508f34345ad7eef244cdbbe224e031c1b
|
[
"CC-BY-4.0",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
xarray/backends/cfgrib_.py
|
mmann1123/xarray
|
a0c71c1508f34345ad7eef244cdbbe224e031c1b
|
[
"CC-BY-4.0",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
xarray/backends/cfgrib_.py
|
mmann1123/xarray
|
a0c71c1508f34345ad7eef244cdbbe224e031c1b
|
[
"CC-BY-4.0",
"PSF-2.0",
"BSD-2-Clause",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
import os
import numpy as np
from ..core import indexing
from ..core.utils import Frozen, FrozenDict, close_on_error
from ..core.variable import Variable
from .common import (
BACKEND_ENTRYPOINTS,
AbstractDataStore,
BackendArray,
BackendEntrypoint,
)
from .locks import SerializableLock, ensure_lock
from .store import open_backend_dataset_store
try:
import cfgrib
has_cfgrib = True
except ModuleNotFoundError:
has_cfgrib = False
# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
# in most circumstances. See:
# https://confluence.ecmwf.int/display/ECC/Frequently+Asked+Questions
ECCODES_LOCK = SerializableLock()
class CfGribArrayWrapper(BackendArray):
def __init__(self, datastore, array):
self.datastore = datastore
self.shape = array.shape
self.dtype = array.dtype
self.array = array
def __getitem__(self, key):
return indexing.explicit_indexing_adapter(
key, self.shape, indexing.IndexingSupport.BASIC, self._getitem
)
def _getitem(self, key):
with self.datastore.lock:
return self.array[key]
class CfGribDataStore(AbstractDataStore):
"""
Implements the ``xr.AbstractDataStore`` read-only API for a GRIB file.
"""
def __init__(self, filename, lock=None, **backend_kwargs):
if lock is None:
lock = ECCODES_LOCK
self.lock = ensure_lock(lock)
self.ds = cfgrib.open_file(filename, **backend_kwargs)
def open_store_variable(self, name, var):
if isinstance(var.data, np.ndarray):
data = var.data
else:
wrapped_array = CfGribArrayWrapper(self, var.data)
data = indexing.LazilyOuterIndexedArray(wrapped_array)
encoding = self.ds.encoding.copy()
encoding["original_shape"] = var.data.shape
return Variable(var.dimensions, data, var.attributes, encoding)
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(k, v)) for k, v in self.ds.variables.items()
)
def get_attrs(self):
return Frozen(self.ds.attributes)
def get_dimensions(self):
return Frozen(self.ds.dimensions)
def get_encoding(self):
dims = self.get_dimensions()
encoding = {"unlimited_dims": {k for k, v in dims.items() if v is None}}
return encoding
def guess_can_open_cfgrib(store_spec):
try:
_, ext = os.path.splitext(store_spec)
except TypeError:
return False
return ext in {".grib", ".grib2", ".grb", ".grb2"}
def open_backend_dataset_cfgrib(
filename_or_obj,
*,
mask_and_scale=True,
decode_times=None,
concat_characters=None,
decode_coords=None,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
lock=None,
indexpath="{path}.{short_hash}.idx",
filter_by_keys={},
read_keys=[],
encode_cf=("parameter", "time", "geography", "vertical"),
squeeze=True,
time_dims=("time", "step"),
):
store = CfGribDataStore(
filename_or_obj,
indexpath=indexpath,
filter_by_keys=filter_by_keys,
read_keys=read_keys,
encode_cf=encode_cf,
squeeze=squeeze,
time_dims=time_dims,
lock=lock,
)
with close_on_error(store):
ds = open_backend_dataset_store(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
return ds
cfgrib_backend = BackendEntrypoint(
open_dataset=open_backend_dataset_cfgrib, guess_can_open=guess_can_open_cfgrib
)
if has_cfgrib:
BACKEND_ENTRYPOINTS["cfgrib"] = cfgrib_backend
| 26.527027
| 85
| 0.663271
|
4a0ac7dcf5191e6ebf65233ff289bf8e06174180
| 1,633
|
py
|
Python
|
pages/register.py
|
mektebi/UIMagentoPy
|
07444a223de0bd5e1c1bdbc362a9ed6aaa381d47
|
[
"MIT"
] | null | null | null |
pages/register.py
|
mektebi/UIMagentoPy
|
07444a223de0bd5e1c1bdbc362a9ed6aaa381d47
|
[
"MIT"
] | null | null | null |
pages/register.py
|
mektebi/UIMagentoPy
|
07444a223de0bd5e1c1bdbc362a9ed6aaa381d47
|
[
"MIT"
] | null | null | null |
"""
This module contains RegisterPage,
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
import names
class RegisterPage:
# URL
URL = 'https://m2.leanscale.com/customer/account/create/'
# Locators
FIRSTNAME_INPUT = (By.ID, 'firstname')
LASTNAME_INPUT = (By.ID, 'lastname')
EMAIL_INPUT = (By.ID, 'email_address')
PASSWORD_INPUT = (By.ID, 'password')
CONFIRM_PASSWORD_INPUT = (By.ID, 'password-confirmation')
CREATE_BUTTON = (By.XPATH, '//*[@id="form-validate"]/div/div[1]/button')
ERROR_MESSAGE = (By.XPATH, '/html/body/div[1]/main/div[2]/div[2]/div/div/div')
# Initializer
def __init__(self, browser):
self.browser = browser
# Interaction Methods
def load(self):
self.browser.get(self.URL)
def register(self, fname, lname, email, password, confirm_password):
firstname_input = self.browser.find_element(*self.FIRSTNAME_INPUT)
firstname_input.send_keys(fname)
lastname_input = self.browser.find_element(*self.LASTNAME_INPUT)
lastname_input.send_keys(lname)
email_input = self.browser.find_element(*self.EMAIL_INPUT)
email_input.send_keys(email)
password_input = self.browser.find_element(*self.PASSWORD_INPUT)
password_input.send_keys(password)
password_confirmation_input = self.browser.find_element(*self.CONFIRM_PASSWORD_INPUT)
password_confirmation_input.send_keys(confirm_password)
create_button = self.browser.find_element(*self.CREATE_BUTTON)
create_button.click()
def error_message(self):
message = self.browser.find_element(*self.ERROR_MESSAGE)
return message.text
| 28.155172
| 89
| 0.74158
|
4a0ac8b8380cefd7c6e9d3033363686516d78015
| 3,066
|
py
|
Python
|
leet/plan/algorithms/back_track/n_queen_problem_reference.py
|
manojkumar-github/DataStructures-DynamicProgramming-in-Python-JAVA-Cplusplus
|
16722a60c4c744ad3d240469b28f5d6ab6e9c25d
|
[
"MIT"
] | null | null | null |
leet/plan/algorithms/back_track/n_queen_problem_reference.py
|
manojkumar-github/DataStructures-DynamicProgramming-in-Python-JAVA-Cplusplus
|
16722a60c4c744ad3d240469b28f5d6ab6e9c25d
|
[
"MIT"
] | null | null | null |
leet/plan/algorithms/back_track/n_queen_problem_reference.py
|
manojkumar-github/DataStructures-DynamicProgramming-in-Python-JAVA-Cplusplus
|
16722a60c4c744ad3d240469b28f5d6ab6e9c25d
|
[
"MIT"
] | null | null | null |
#!/usr/bin.env python
# Copyright (C) Pearson Assessments - 2020. All Rights Reserved.
# Proprietary - Use with Pearson Written Permission Only
# Python3 program to solve N Queen
# Problem using backtracking
def printSolution(board):
for i in range(4):
for j in range(4):
print(board[i][j], end=" ")
print()
# A utility function to check if a queen can
# be placed on board[row][col]. Note that this
# function is called when "col" queens are
# already placed in columns from 0 to col -1.
# So we need to check only left side for
# attacking queens
def isSafe(board, row, col, N): # (4*4, 2, 1, 4)
# Check this row on left side
for i in range(col):
if board[row][i] == 1:
return False
# Check upper diagonal on left side
for i, j in zip(range(row, -1, -1),
range(col, -1, -1)):
if board[i][j] == 1:
return False
# Check lower diagonal on left side
for i, j in zip(range(row, N, 1),
range(col, -1, -1)):
if board[i][j] == 1:
return False
return True
def solveNQUtil(board, col, N): #4*4, 2, 4
if col >= N: ### ?
return True
# Consider this column and try placing
# this queen in all rows one by one
for i in range(N): # N = 4, i = 2, col = 1
if isSafe(board, i, col, N): # (4*4, 2, 1, 4)
# Place this queen in board[i][col]
board[i][col] = 1
# do recursion to place rest of the queens
if solveNQUtil(board, col + 1, N) == True:
return True
# If placing queen in board[i][col
# doesn't lead to a solution, then
# queen from board[i][col] ---> backtrack
board[i][col] = 0
# if the queen can not be placed in any row in
# this colum col then return false
return False
def is_configuration_exist(board, col_ix, N):
if col_ix >= N:
# we have filled all columns
return True
for row_ix in range(N):
if is_safe(board, row_ix, col_ix, N):
board[row_ix][col_ix] = 1
# perform recursion to fill the next queen
if is_configuration_exist(board, col_ix + 1, N) == True:
return True
# back-tracking step
board[row_ix][col_ix] = 0
return False
# This function solves the N Queen problem using
# Backtracking. It mainly uses solveNQUtil() to
# solve the problem. It returns false if queens
# cannot be placed, otherwise return true and
# placement of queens in the form of 1s.
# note that there may be more than one
# solutions, this function prints one of the
# feasible solutions.
def solveNQ():
board = [[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]
if solveNQUtil(board, 0, 4) == False:
print("Solution does not exist")
return False
printSolution(board)
return True
# Driver Code
solveNQ()
# This code is contributed by Divyanshu Mehta
| 26.205128
| 68
| 0.576321
|
4a0ac907755b5da1faa44da45835503fcb3405e4
| 2,023
|
py
|
Python
|
python/examples/read_any_param_cache.py
|
rozum-robotics/Rozum-Servo-Drives-API
|
6657372870e6235089e38a68a422ea2d4dc4b0c3
|
[
"Apache-2.0"
] | 8
|
2018-07-24T13:32:31.000Z
|
2021-09-03T13:52:35.000Z
|
python/examples/read_any_param_cache.py
|
rozum-robotics/Rozum-Servo-Drives-API
|
6657372870e6235089e38a68a422ea2d4dc4b0c3
|
[
"Apache-2.0"
] | 4
|
2018-09-14T09:27:20.000Z
|
2021-11-16T04:08:53.000Z
|
python/examples/read_any_param_cache.py
|
rozum-robotics/Rozum-Servo-Drives-API
|
6657372870e6235089e38a68a422ea2d4dc4b0c3
|
[
"Apache-2.0"
] | 6
|
2018-07-28T00:52:38.000Z
|
2021-02-07T18:52:19.000Z
|
""" @page tutor_py_param_cache Reading few parameters synchronously
Complete source code: <br>
\snippet read_any_param_cache.py read_any_param_cache_py
"""
"""! [read_any_param_cache_py] """
import logging
import os
import argparse
import rdrive as rr
logging.basicConfig()
logger = logging.getLogger(os.path.basename(__file__))
logger.setLevel(logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument(
"--servo_1_id", type=int, help="first servo ID that you want control"
)
parser.add_argument("--interface", type=str, help="interface name")
args = parser.parse_args()
INTERFACE_NAME = args.interface
SERVO_1_ID = args.servo_1_id
if __name__ == "__main__":
logger.info("Initializing ServoApi")
api = rr.ServoApi()
logger.info("Initializing interface {}".format(INTERFACE_NAME))
interface = api.init_interface(INTERFACE_NAME)
logger.info("Initializing servo id {}".format(SERVO_1_ID))
servo = interface.init_servo(SERVO_1_ID)
logger.info("Setting servo to operational state")
servo.set_state_operational()
logger.info("Setting cache")
servo.param_cache_setup_entry(rr.APP_PARAM_POSITION_ROTOR, True)
servo.param_cache_setup_entry(rr.APP_PARAM_VELOCITY_ROTOR, True)
servo.param_cache_setup_entry(rr.APP_PARAM_VOLTAGE_INPUT, True)
servo.param_cache_setup_entry(rr.APP_PARAM_CURRENT_INPUT, True)
logger.info("Updating cache")
servo.param_cache_update()
logger.info("Reading cache")
position_rotor = servo.read_cached_parameter(rr.APP_PARAM_POSITION_ROTOR)
velocity_rotor = servo.read_cached_parameter(rr.APP_PARAM_VELOCITY_ROTOR)
voltage_input = servo.read_cached_parameter(rr.APP_PARAM_VOLTAGE_INPUT)
current_input = servo.read_cached_parameter(rr.APP_PARAM_CURRENT_INPUT)
logger.info(
"\nposition_rotor = {}\nvelocity_rotor = {}\nvoltage_input = {}\ncurrent_input = {}".format(
position_rotor, velocity_rotor, voltage_input, current_input
)
)
"""! [read_any_param_cache_py] """
| 32.629032
| 100
| 0.759763
|
4a0ac9f57642712c83558be721258def245d308d
| 1,733
|
py
|
Python
|
draw.py
|
hkxIron/pencil-python
|
27746bd957ed14b866ce9def2f3a67cb93ff12b2
|
[
"MIT"
] | null | null | null |
draw.py
|
hkxIron/pencil-python
|
27746bd957ed14b866ce9def2f3a67cb93ff12b2
|
[
"MIT"
] | null | null | null |
draw.py
|
hkxIron/pencil-python
|
27746bd957ed14b866ce9def2f3a67cb93ff12b2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
import time
from pencil import pencil_draw
from color_pencil import color_draw
import argparse
parser = argparse.ArgumentParser(description='Pencil Drawing Program. '
'You will get the productions at the output folder.')
parser.add_argument('--p', action='store_true', default=False,
dest='p', help='Add this when you want to try pencil drawing.')
parser.add_argument('--c', action='store_true', default=False,
dest='c', help='Add this when you want to try color pencil drawing, '
'please make sure you get opencv installed in your environment.')
parser.add_argument('-img', dest='image', type=str, default='input/sjtu.jpg',
help="The path of image you want to try, default is 'img/sjtu.jpg'.")
parser.add_argument('-s', dest="gammaS", type=float, default=1,
help='Larger when you want the line of strokes darker, default value is 1.')
parser.add_argument('-i', dest='gammaI', type=float, default=1,
help='Larger when you want the color of productions deeper, default value is 1.')
args = parser.parse_args()
if not args.p and not args.c:
args.p = True
if args.p:
start = time.time()
print('pencil draw begin')
pencil_draw(path=args.image, gammaS=args.gammaS, gammaI=args.gammaI)
print('pencil drawing end')
print('time consumes: {0:.2f}s'.format(time.time() - start))
if args.c:
start = time.time()
print('color pencil draw begin')
color_draw(path=args.image, gammaS=args.gammaS, gammaI=args.gammaI)
print('time consumes: {0:.2f}s'.format(time.time() - start))
| 38.511111
| 101
| 0.64397
|
4a0acae9ab096ad1e7961b687f6cf792c2554cb2
| 2,781
|
py
|
Python
|
commands/stats_commands.py
|
DamourYouKnow/HAHA-NO-4STAR
|
b34bc9634c3822c0b08d22d8a2666edce958bb6c
|
[
"MIT"
] | 2
|
2019-06-28T09:33:53.000Z
|
2022-02-25T08:39:42.000Z
|
commands/stats_commands.py
|
DamourYouKnow/HAHA-NO-4STAR
|
b34bc9634c3822c0b08d22d8a2666edce958bb6c
|
[
"MIT"
] | 11
|
2018-04-06T22:34:05.000Z
|
2022-01-13T00:43:34.000Z
|
commands/stats_commands.py
|
DamourYouKnow/HAHA-NO-4STAR
|
b34bc9634c3822c0b08d22d8a2666edce958bb6c
|
[
"MIT"
] | 1
|
2021-09-10T14:30:16.000Z
|
2021-09-10T14:30:16.000Z
|
import discord
from discord.ext import commands
from core.checks import check_mongo
from bot import HahaNo4Star
class Stats:
def __init__(self, bot: HahaNo4Star):
self.bot = bot
@commands.command(pass_context=True, aliases=['stats'])
@commands.cooldown(rate=3, per=10, type=commands.BucketType.user)
@commands.check(check_mongo)
async def mystats(self, ctx, *args: str):
"""
Description: |
Provides stats about you.
"""
user_id = ctx.message.author.id
stats = []
album = await self.bot.db.users.get_user_album(user_id, True)
counter = AlbumCounter(album)
counter.run_count()
stats.append(('Unique cards collected', counter.distinct_count))
stats.append(('Total cards', counter.total_count))
for rarity, count in counter.rarity_counts.items():
stats.append((str(rarity) + ' star cards', count))
for attribute, count in counter.attribute_counts.items():
stats.append((attribute + ' cards', count))
emb = _create_embed('Stats for ' + ctx.message.author.name, stats)
await self.bot.send_message(ctx.message.channel, embed=emb)
@commands.command(pass_context=True)
@commands.cooldown(rate=3, per=10, type=commands.BucketType.user)
@commands.check(check_mongo)
async def botstats(self, ctx, *args: str):
"""
Description: |
Provides stats about the bot.
"""
stats = []
stats.append(('Servers', len(self.bot.servers)))
stats.append(('Users', await self.bot.db.users.get_user_count()))
emb = _create_embed('My stats', stats)
await self.bot.send_message(ctx.message.channel, embed=emb)
class AlbumCounter:
def __init__(self, album):
self.album = album
self.rarity_counts = {
1: 0,
2: 0,
3: 0,
4: 0
}
self.attribute_counts = {
'Power': 0,
'Pure': 0,
'Cool': 0,
'Happy': 0
}
self.total_count = 0
self.distinct_count = 0
def run_count(self):
for card in self.album:
curr_count = card['count']
self.total_count += curr_count
self.rarity_counts[card['i_rarity']] += curr_count
self.attribute_counts[card['i_attribute']] += curr_count
self.distinct_count = len(self.album)
def _create_embed(title: str, stats: list):
"""
Create a stats embed.
:param title: Title of embed.
:param stats: List of tuples (stat name, stat value).
"""
desc = '\n'.join([str(k) + ': ' + str(v) for k,v in stats])
emb = discord.Embed(title=title, description=desc)
return emb
| 30.9
| 74
| 0.595829
|
4a0acb8c6e83ed75bc883080ad5dfae7ed926703
| 1,416
|
py
|
Python
|
cleanflux/proxy/http_request.py
|
Transatel/cleanflux
|
a0f7e5fad7f5a9d3265acb79bc23fe7a0830b66b
|
[
"MIT"
] | 38
|
2020-01-15T12:56:42.000Z
|
2022-03-16T08:54:48.000Z
|
cleanflux/proxy/http_request.py
|
Transatel/cleanflux
|
a0f7e5fad7f5a9d3265acb79bc23fe7a0830b66b
|
[
"MIT"
] | 1
|
2020-01-15T18:56:22.000Z
|
2020-01-15T18:57:31.000Z
|
cleanflux/proxy/http_request.py
|
Transatel/cleanflux
|
a0f7e5fad7f5a9d3265acb79bc23fe7a0830b66b
|
[
"MIT"
] | 3
|
2020-01-15T12:56:44.000Z
|
2022-02-17T19:41:06.000Z
|
from http.client import HTTPSConnection, HTTPConnection, IncompleteRead
import urllib.parse
import threading
class HTTPRequest(object):
"""
A simple, thread-safe wrapper around HTTP(S)Connection
"""
def __init__(self):
self.tls = threading.local()
self.tls.conns = {}
def request(self, url, body=None, headers=None, timeout=45, max_retries=3, method="GET"):
if headers is None:
headers = dict()
parsed = urllib.parse.urlsplit(url)
origin = (parsed.scheme, parsed.netloc)
for i in range(1, max_retries):
try:
conn = self.create_conn(parsed, origin, timeout)
conn.request(method, url, body=body, headers=headers)
return conn.getresponse()
except IncompleteRead as e:
return e.partial
except Exception as e:
if origin in self.tls.conns:
del self.tls.conns[origin]
if i >= max_retries:
raise e
def create_conn(self, parsed, origin, timeout):
if origin not in self.tls.conns:
if parsed.scheme == 'https':
self.tls.conns[origin] = HTTPSConnection(parsed.netloc, timeout=timeout)
else:
self.tls.conns[origin] = HTTPConnection(parsed.netloc, timeout=timeout)
return self.tls.conns[origin]
| 33.714286
| 93
| 0.586864
|
4a0acd737cf7217f1dff9d626632c3a883dd1896
| 2,437
|
py
|
Python
|
MFWSpider/MFWSpider/spiders/places.py
|
Karmenzind/mfw
|
29bee5a2e0ebc5115a2703d2f5e9c774f76cd07a
|
[
"MIT"
] | 18
|
2018-08-27T19:57:27.000Z
|
2022-03-09T15:54:52.000Z
|
MFWSpider/MFWSpider/spiders/places.py
|
Mistakey/mfw
|
46c750f692d781c8d2b28e6002d9a86d0df67657
|
[
"MIT"
] | null | null | null |
MFWSpider/MFWSpider/spiders/places.py
|
Mistakey/mfw
|
46c750f692d781c8d2b28e6002d9a86d0df67657
|
[
"MIT"
] | 1
|
2021-01-26T15:10:32.000Z
|
2021-01-26T15:10:32.000Z
|
# -*- coding: utf-8 -*-
import re
from urllib.parse import urljoin
from MFWSpider.items import Place
from MFWSpider.pipelines import MfwspiderPipeline
from scrapy.conf import settings
from scrapy.http import Request
from scrapy.spiders import CrawlSpider
from scrapy_splash import SplashRequest
db = MfwspiderPipeline()
class PlacesSpider(CrawlSpider):
name = 'places'
allowed_domains = ['www.mafengwo.cn']
base = 'http://www.mafengwo.cn'
def start_requests(self):
spec = {"lat": {"$exists": False}}
# spec['p_type'] = 'poi'
gen = db.place.find(spec)
if settings.get("IS_TEST"):
gen = gen.limit(10)
for doc in gen:
href = doc.get('href')
if not href:
continue
url = urljoin(self.base, href)
if doc.get('p_type') == 'poi':
yield SplashRequest(url,
callback=self.parse_poi,
meta={"_href": href})
elif doc.get('p_type') == 'dest':
yield Request(url,
callback=self.parse_dest,
meta={'_href': href})
def parse_poi(self, response):
item = Place()
coord_sel = response.xpath('//div[@class="m-poi"][1]//li[1]')
item['lat'] = coord_sel.xpath('@data-lat').get()
item['lng'] = coord_sel.xpath('@data-lng').get()
if not (item['lat'] and item['lng']):
self.get_coor_from_js(response, item)
item['address'] = response.xpath('//div[@class="mhd"]/p/text()').get()
yield self.check_crawled(item, response)
def parse_dest(self, response):
item = Place()
self.get_coor_from_js(response, item)
yield self.check_crawled(item, response)
def check_crawled(self, item: Place, response):
if item['lat'] and item['lng']:
item['href'] = response.meta['_href']
return item
raise AssertionError("Failed to get coordinates for %s" % response.url)
def get_coor_from_js(self, response, item):
coor_js_raw = response.xpath(
'//script[re:match(text(), "zoom")]/text()').get()
coor_js = re.sub('[\n\s\t]+', '', str(coor_js_raw))
item['lat'], item['lng'] = re.search(r"lat':(\d+.\d+),'lng':(\d+.\d+)",
coor_js).groups()
| 32.932432
| 79
| 0.546984
|
4a0acd9f659098ea4105205d89f3ad82c593574b
| 2,796
|
py
|
Python
|
source/strikeamatch.py
|
Rik89/Publicidad_Web
|
c1ea61154a7d3ed866f6b28d88e24b9523203711
|
[
"MIT"
] | null | null | null |
source/strikeamatch.py
|
Rik89/Publicidad_Web
|
c1ea61154a7d3ed866f6b28d88e24b9523203711
|
[
"MIT"
] | null | null | null |
source/strikeamatch.py
|
Rik89/Publicidad_Web
|
c1ea61154a7d3ed866f6b28d88e24b9523203711
|
[
"MIT"
] | null | null | null |
def _get_character_pairs(text):
"""Returns a defaultdict(int) of adjacent character pair counts.
>>> _get_character_pairs('Test IS')
{'IS': 1, 'TE': 1, 'ES': 1, 'ST': 1}
>>> _get_character_pairs('Test 123')
{'23': 1, '12': 1, 'TE': 1, 'ES': 1, 'ST': 1}
>>> _get_character_pairs('Test TEST')
{'TE': 2, 'ES': 2, 'ST': 2}
>>> _get_character_pairs('ai a al a')
{'AI': 1, 'AL': 1}
>>> _get_character_pairs('12345')
{'34': 1, '12': 1, '45': 1, '23': 1}
>>> _get_character_pairs('A')
{}
>>> _get_character_pairs('A B')
{}
>>> _get_character_pairs(123)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "strikeamatch.py", line 31, in _get_character_pairs
if not hasattr(text, "upper"): raise ValueError
ValueError: Invalid argument
"""
if not hasattr(text, "upper"):
raise ValueError("Invalid argument")
results = dict()
for word in text.upper().split():
for pair in [word[i]+word[i+1] for i in range(len(word)-1)]:
if pair in results:
results[pair] += 1
else:
results[pair] = 1
return results
def compare_strings(string1, string2):
""""" "Devuelve un valor entre 0.0 y 1.0 que indica la similitud entre las dos cadenas.
Un valor de 1.0 es una combinación perfecta y 0.0 hay similitud.
>>> for w in ('Sealed', 'Healthy', 'Heard', 'Herded', 'Help', 'Sold'):
... compare_strings('Healed', w)
...
0.8
0.5454545454545454
0.4444444444444444
0.4
0.25
0.0
>>> compare_strings("Horse", "Horse box")
0.8
>>> compare_strings("Horse BOX", "Horse box")
1.0
>>> compare_strings("ABCD", "AB") == compare_strings("AB", "ABCD")
True
"""
s1_pairs = _get_character_pairs(string1)
s2_pairs = _get_character_pairs(string2)
s1_size = sum(s1_pairs.values())
s2_size = sum(s2_pairs.values())
intersection_count = 0
# DETERMINA LO MAS PEQUEÑO PARA OPTIMIZAR
# INTERSECCION
if s1_size < s2_size:
smaller_dict = s1_pairs
larger_dict = s2_pairs
else:
smaller_dict = s2_pairs
larger_dict = s1_pairs
# Determinar la intersección contando las sustracciones que hacemos de ambos
for pair, smaller_pair_count in smaller_dict.items():
if pair in larger_dict and larger_dict[pair] > 0:
if smaller_pair_count < larger_dict[pair]:
intersection_count += smaller_pair_count
else:
intersection_count += larger_dict[pair]
return (2.0 * intersection_count) / (s1_size + s2_size)
if __name__ == "__main__":
import doctest
doctest.testmod()
| 27.96
| 91
| 0.587983
|
4a0acfb50307643c0f2f857b9b14ddb94c1e3090
| 10,904
|
py
|
Python
|
platformio/home/rpc/handlers/project.py
|
xeno010/platformio-core
|
94f8afec38fc8d35db1055368f5fbe4e67c89e7e
|
[
"Apache-2.0"
] | null | null | null |
platformio/home/rpc/handlers/project.py
|
xeno010/platformio-core
|
94f8afec38fc8d35db1055368f5fbe4e67c89e7e
|
[
"Apache-2.0"
] | null | null | null |
platformio/home/rpc/handlers/project.py
|
xeno010/platformio-core
|
94f8afec38fc8d35db1055368f5fbe4e67c89e7e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import shutil
import time
from os.path import basename, expanduser, getmtime, isdir, isfile, join, realpath, sep
import jsonrpc # pylint: disable=import-error
from platformio import exception, fs
from platformio.compat import PY2, get_filesystem_encoding
from platformio.home.rpc.handlers.app import AppRPC
from platformio.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.project.config import ProjectConfig
from platformio.project.helpers import (
get_project_libdeps_dir,
get_project_src_dir,
is_platformio_project,
)
class ProjectRPC(object):
@staticmethod
def _get_projects(project_dirs=None):
def _get_project_data(project_dir):
data = {"boards": [], "envLibdepsDirs": [], "libExtraDirs": []}
config = ProjectConfig(join(project_dir, "platformio.ini"))
libdeps_dir = get_project_libdeps_dir()
data["libExtraDirs"].extend(config.get("platformio", "lib_extra_dirs", []))
for section in config.sections():
if not section.startswith("env:"):
continue
data["envLibdepsDirs"].append(join(libdeps_dir, section[4:]))
if config.has_option(section, "board"):
data["boards"].append(config.get(section, "board"))
data["libExtraDirs"].extend(config.get(section, "lib_extra_dirs", []))
# skip non existing folders and resolve full path
for key in ("envLibdepsDirs", "libExtraDirs"):
data[key] = [
expanduser(d) if d.startswith("~") else realpath(d)
for d in data[key]
if isdir(d)
]
return data
def _path_to_name(path):
return (sep).join(path.split(sep)[-2:])
if not project_dirs:
project_dirs = AppRPC.load_state()["storage"]["recentProjects"]
result = []
pm = PlatformManager()
for project_dir in project_dirs:
data = {}
boards = []
try:
with fs.cd(project_dir):
data = _get_project_data(project_dir)
except exception.PlatformIOProjectException:
continue
for board_id in data.get("boards", []):
name = board_id
try:
name = pm.board_config(board_id)["name"]
except exception.PlatformioException:
pass
boards.append({"id": board_id, "name": name})
result.append(
{
"path": project_dir,
"name": _path_to_name(project_dir),
"modified": int(getmtime(project_dir)),
"boards": boards,
"envLibStorages": [
{"name": basename(d), "path": d}
for d in data.get("envLibdepsDirs", [])
],
"extraLibStorages": [
{"name": _path_to_name(d), "path": d}
for d in data.get("libExtraDirs", [])
],
}
)
return result
def get_projects(self, project_dirs=None):
return self._get_projects(project_dirs)
@staticmethod
def get_project_examples():
result = []
for manifest in PlatformManager().get_installed():
examples_dir = join(manifest["__pkg_dir"], "examples")
if not isdir(examples_dir):
continue
items = []
for project_dir, _, __ in os.walk(examples_dir):
project_description = None
try:
config = ProjectConfig(join(project_dir, "platformio.ini"))
config.validate(silent=True)
project_description = config.get("platformio", "description")
except exception.PlatformIOProjectException:
continue
path_tokens = project_dir.split(sep)
items.append(
{
"name": "/".join(
path_tokens[path_tokens.index("examples") + 1 :]
),
"path": project_dir,
"description": project_description,
}
)
result.append(
{
"platform": {
"title": manifest["title"],
"version": manifest["version"],
},
"items": sorted(items, key=lambda item: item["name"]),
}
)
return sorted(result, key=lambda data: data["platform"]["title"])
def init(self, board, framework, project_dir):
assert project_dir
state = AppRPC.load_state()
if not isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board]
if framework:
args.extend(["--project-option", "framework = %s" % framework])
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._generate_project_main, project_dir, framework)
return d
@staticmethod
def _generate_project_main(_, project_dir, framework):
main_content = None
if framework == "arduino":
main_content = "\n".join(
[
"#include <Arduino.h>",
"",
"void setup() {",
" // put your setup code here, to run once:",
"}",
"",
"void loop() {",
" // put your main code here, to run repeatedly:",
"}" "",
]
) # yapf: disable
elif framework == "mbed":
main_content = "\n".join(
[
"#include <mbed.h>",
"",
"int main() {",
"",
" // put your setup code here, to run once:",
"",
" while(1) {",
" // put your main code here, to run repeatedly:",
" }",
"}",
"",
]
) # yapf: disable
if not main_content:
return project_dir
with fs.cd(project_dir):
src_dir = get_project_src_dir()
main_path = join(src_dir, "main.cpp")
if isfile(main_path):
return project_dir
if not isdir(src_dir):
os.makedirs(src_dir)
with open(main_path, "w") as f:
f.write(main_content.strip())
return project_dir
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
board = str(board)
if arduino_project_dir and PY2:
arduino_project_dir = arduino_project_dir.encode(get_filesystem_encoding())
# don't import PIO Project
if is_platformio_project(arduino_project_dir):
return arduino_project_dir
is_arduino_project = any(
[
isfile(
join(
arduino_project_dir,
"%s.%s" % (basename(arduino_project_dir), ext),
)
)
for ext in ("ino", "pde")
]
)
if not is_arduino_project:
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4000, message="Not an Arduino project: %s" % arduino_project_dir
)
state = AppRPC.load_state()
project_dir = join(
state["storage"]["projectsDir"], time.strftime("%y%m%d-%H%M%S-") + board
)
if not isdir(project_dir):
os.makedirs(project_dir)
args = ["init", "--board", board]
args.extend(["--project-option", "framework = arduino"])
if use_arduino_libs:
args.extend(
["--project-option", "lib_extra_dirs = ~/Documents/Arduino/libraries"]
)
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
return d
@staticmethod
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
with fs.cd(project_dir):
src_dir = get_project_src_dir()
if isdir(src_dir):
fs.rmtree(src_dir)
shutil.copytree(arduino_project_dir, src_dir)
return project_dir
@staticmethod
def import_pio(project_dir):
if not project_dir or not is_platformio_project(project_dir):
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4001, message="Not an PlatformIO project: %s" % project_dir
)
new_project_dir = join(
AppRPC.load_state()["storage"]["projectsDir"],
time.strftime("%y%m%d-%H%M%S-") + basename(project_dir),
)
shutil.copytree(project_dir, new_project_dir)
state = AppRPC.load_state()
args = ["init"]
if (
state["storage"]["coreCaller"]
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
d.addCallback(lambda _: new_project_dir)
return d
| 37.730104
| 87
| 0.532373
|
4a0acfea20c4a3042ebd5aacc9b8fc2e00b5b547
| 24,864
|
py
|
Python
|
src/plant_energyse/openwind/openWindComponent.py
|
WISDEM/Plant_EnergySE
|
5ca898bf65b63fd1a87a40241591866f5f0b185a
|
[
"Apache-2.0"
] | 1
|
2019-02-26T17:54:14.000Z
|
2019-02-26T17:54:14.000Z
|
src/plant_energyse/openwind/openWindComponent.py
|
WISDEM/Plant_EnergySE
|
5ca898bf65b63fd1a87a40241591866f5f0b185a
|
[
"Apache-2.0"
] | null | null | null |
src/plant_energyse/openwind/openWindComponent.py
|
WISDEM/Plant_EnergySE
|
5ca898bf65b63fd1a87a40241591866f5f0b185a
|
[
"Apache-2.0"
] | 1
|
2021-04-19T18:40:39.000Z
|
2021-04-19T18:40:39.000Z
|
# openWindComponent.py
# 2014 10 16
'''
Execute OpenWind as an OpenMDAO Component
2014 10 16: this is based on openWindAcComponent.py
BUT: the 'academic' version of OpenWind has gone away
The new Enterprise OW included the functions of the former academic version.
They are enabled by setting the following line in OpenWind64.ini:
ExternalOptimiser Yes
__init__() has a new argument: extOpt=False
If set to True, OWcomp will have 'academic' functionality
After execute(), the following variables have been updated:
nTurbs
net_aep
gross_aep
They can be accessed through appropriate connections.
NOTE: Script file must contain an Optimize/Optimise operation - otherwise,
no results will be found.
Typical use (e.g., from an Assembly):
ow = OWcomp(owExe, scriptFile=scrptName, debug=False, stopOW=True, start_once=False, opt_log=False)
example() runs OWcomp.execute() 3 times, moving and modifying the turbines each time
'''
import os.path
import sys, time
import subprocess
from lxml import etree
import academic.owAcademicUtils as acutils
import plant_energyse.openwind.openWindUtils as utils
import plant_energyse.openwind.rwScriptXML as rwScriptXML
import plant_energyse.openwind.rwTurbXML as rwTurbXML
import plant_energyse.openwind.turbfuncs as turbfuncs
from openmdao.lib.datatypes.api import Float, Int, VarTree
from openmdao.main.api import FileMetadata, Component, VariableTree
from fusedwind.plant_flow.vt import GenericWindTurbineVT, \
GenericWindTurbinePowerCurveVT, ExtendedWindTurbinePowerCurveVT, \
GenericWindFarmTurbineLayout, ExtendedWindFarmTurbineLayout
from fusedwind.interface import implement_base
from fusedwind.plant_flow.comp import BaseAEPAggregator
#-----------------------------------------------------------------
@implement_base(BaseAEPAggregator)
class OWcomp(Component):
""" A simple OpenMDAO component for OpenWind academic
Args:
owExe (str): full path to OpenWind executable
scriptFile (str): path to XML script that OpenWind will run
"""
# inputs
rotor_diameter = Float(126.0, iotype='in', units='m', desc='connecting rotor diameter to force run on change') # todo: hack for now
availability = Float(0.95, iotype='in', desc='availability')
other_losses = Float(0.0, iotype='in', desc='soiling losses')
wt_layout = VarTree(ExtendedWindFarmTurbineLayout(), iotype='in', desc='properties for each wind turbine and layout')
dummyVbl = Float(0, iotype='in', desc='unused variable to make it easy to do DOE runs')
# outputs
gross_aep = Float(0.0, iotype='out', desc='Gross Output')
net_aep = Float(0.0, iotype='out', desc='Net Output')
nTurbs = Int(0, iotype='out', desc='Number of turbines')
#array_aep = Float(0.0, iotype='out', desc='Array output - NOT USED IN ACADEMIC VERSION')
#array_efficiency = Float(0.0, iotype='out', desc='Array Efficiency')
#array_losses = Float(0.0, iotype='out', desc='Array losses')
def __init__(self, owExe, scriptFile=None, extOpt=False, debug=False,
stopOW=True, start_once=False, opt_log=False):
""" Constructor for the OWwrapped component """
self.debug = debug
if self.debug:
sys.stderr.write('\nIn {:}.__init__()\n'.format(self.__class__))
super(OWcomp, self).__init__()
# public variables
self.input_file = 'myinput.txt'
self.output_file = 'myoutput.txt'
self.stderr = 'myerror.log'
# external_files : member of Component class = list of FileMetadata objects
self.external_files = [
FileMetadata(path=self.output_file),
FileMetadata(path=self.stderr),
]
self.stopOW = stopOW
self.start_once = start_once
self.replace_turbine = False
self.opt_log = opt_log
self.extOpt = extOpt
self.resname = '' # start with empty string
self.script_file = scriptFile
self.scriptOK = False
if scriptFile is not None:
# Check script file for validity and extract some path information
self.scriptOK = self.parse_scriptFile()
if not self.scriptOK:
raise ValueError
return
self.scriptDict = rwScriptXML.rdScript(self.script_file, self.debug)
if self.debug:
sys.stderr.write('Script File Contents:\n')
for k in self.scriptDict.keys():
sys.stderr.write(' {:12s} {:}\n'.format(k,self.scriptDict[k]))
# Log all optimization settings?
if self.opt_log:
self.olname = 'owOptLog.txt'
self.olfh = open(self.olname, 'w')
if self.debug:
sys.stderr.write('Logging optimization params to {:}\n'.format(self.olname))
# Set the version of OpenWind that we want to use
self.command = [owExe, self.script_file]
# Keep the initial value of rotor diam so we can
# see if it (or other turb param) has changed
self.rtr_diam_init = self.rotor_diameter
# ... other params ....
# Try starting OpenWind here (if self.start_once is True)
if self.start_once:
self.proc = subprocess.Popen(self.command)
self.pid = self.proc.pid
if self.debug:
sys.stderr.write('Started OpenWind with pid {:}\n'.format(self.pid))
sys.stderr.write(' OWComp: dummyVbl {:}\n'.format(self.dummyVbl))
if self.debug:
sys.stderr.write('\nLeaving {:}.__init__()\n\n'.format(self.__class__))
#------------------
def parse_scriptFile(self):
# OW looks for the notify files and writes the 'results.txt' file to the
# directory that contains the *blb workbook file
# Find where the results file will be found
if not os.path.isfile(self.script_file):
sys.stderr.write('\n*** OpenWind script file "{:}" not found\n'.format(self.script_file))
return False
try:
e = etree.parse(self.script_file)
self.rptpath = e.getroot().find('ReportPath').get('value')
except:
sys.stderr.write("\n*** Can't find ReportPath in {:}\n".format(self.script_file))
self.rptpath = 'NotFound'
return False
# Make sure there's an optimize operation - otherwise OW won't find anything
foundOpt = False
self.replace_turbine = False
ops = e.getroot().findall('.//Operation')
for op in ops:
optype = op.find('Type').get('value')
if optype == 'Optimize' or optype == 'Optimise':
foundOpt = True
break
if optype == 'Replace Turbine Type':
self.replace_turbine = True
sys.stderr.write('\n*** WARNING: start_once will be set to False because Replace Turbine\n')
sys.stderr.write(' operation is present in {:}\n\n'.format(self.script_file))
self.start_once = False
if not foundOpt:
sys.stderr.write('\n*** ERROR: no Optimize operation found in {:}\n\n'.format(self.script_file))
return False
if self.replace_turbine and self.start_once:
sys.stderr.write("*** WARNING: can't use start_once when replacing turbine\n")
sys.stderr.write(" setting start_once to False\n")
self.start_once = False
# Find the workbook folder and save as dname
self.dname = None
for op in ops:
if op.find('Type').get('value') == 'Change Workbook':
wkbk = op.find('Path').get('value')
if not os.path.isfile(wkbk):
sys.stderr.write("\n*** OpenWind workbook file {:}\n not found\n".format(wkbk))
sys.stderr.write(" (specified in script file {:})\n".format(self.script_file))
return False
self.dname = os.path.dirname(wkbk)
if self.debug:
sys.stderr.write('Working directory: {:}\n'.format(self.dname))
break
self.resname = '/'.join([self.dname,'results.txt'])
return True
#------------------
def execute(self):
""" Executes our component. """
if self.debug:
sys.stderr.write("In {0}.execute() {1}...\n".format(self.__class__, self.script_file))
if (len(self.resname) < 1):
sys.stderr.write('\n*** ERROR: OWcomp results file name not assigned! (problem with script file?)\n\n')
return False
# Prepare input file here
# - write a new script file?
# - write a new turbine file to overwrite the one referenced
# in the existing script_file?
# If there is a turbine replacement operation in the script:
# write new turbine description file based on contents of first turbine in layout
#if 'replturbpath' in self.scriptDict:
if self.replace_turbine:
if len(self.wt_layout.wt_list) < 1:
sys.stderr.write('\n*** ERROR *** OWcomp::execute(): no turbines in wt_layout!\n\n')
return False
if self.debug:
sys.stderr.write('Replacement turbine parameters:\n')
#sys.stderr.write('{:}\n'.format(turbfuncs.wtpc_dump(self.wt_layout.wt_list[0])))
sys.stderr.write('{:}\n'.format(turbfuncs.wtpc_dump(self.wt_layout.wt_list[0], shortFmt=True)))
#sys.stderr.write('{:}\n'.format(wtlDump(self.wt_layout.wt_list[0])))
newXML = turbfuncs.wtpc_to_owtg(self.wt_layout.wt_list[0],
trbname='ReplTurb',
desc='OWcomp replacement turbine')
if len(newXML) > 50:
tfname = self.scriptDict['replturbpath'] # this is the file that will be overwritten with new turbine parameters
tfh = open(tfname, 'w')
tfh.write(newXML)
tfh.close()
maxPower = self.wt_layout.wt_list[0].power_rating
if self.debug:
sys.stderr.write('Wrote new turbine file to {:} (rated pwr {:.2f} MW\n'.format(tfname, maxPower*0.000001))
else:
sys.stderr.write('*** NO new turbine file written\n')
# Execute the component and save process ID
if not self.start_once:
self.proc = subprocess.Popen(self.command)
self.pid = self.proc.pid
if self.debug:
sys.stderr.write('Started OpenWind with pid {:}\n'.format(self.pid))
sys.stderr.write(' OWComp: dummyVbl {:}\n'.format(self.dummyVbl))
#sys.stderr.write('Report Path: {:}\n'.format(self.rptpath))
# Watch for 'results.txt', meaning that OW has run once with the default locations
if self.debug:
sys.stderr.write('OWComp waiting for {:} (first run - positions unchanged)\n'.format(self.resname))
acutils.waitForNotify(watchFile=self.resname, path=self.dname, debug=False, callback=self.getCBvalue)
# Now OW is waiting for a new position file
# Write new positions and notify file - this time it should use updated positions
acutils.writePositionFile(self.wt_layout.wt_positions, path=self.dname, debug=self.debug)
# see if results.txt is there already
if os.path.exists(self.resname):
resmtime = os.path.getmtime(self.resname)
if self.debug:
sys.stderr.write('ModTime({:}): {:}\n'.format(self.resname, time.asctime(time.localtime(resmtime))))
else:
if self.debug:
sys.stderr.write('{:} does not exist yet\n'.format(self.resname))
acutils.writeNotify(path=self.dname, debug=self.debug) # tell OW that we're ready for the next (only) iteration
# 'results.txt' is in the same directory as the *blb file
if os.path.exists(self.resname):
resNewmtime = os.path.getmtime(self.resname)
if resNewmtime > resmtime: # file has changed
if self.debug:
sys.stderr.write('results.txt already updated')
else:
acutils.waitForNotify(watchFile=self.resname, path=self.dname, callback=self.getCBvalue, debug=self.debug)
else:
if self.debug:
sys.stderr.write('OWComp waiting for {:} (modified positions)\n'.format(self.resname))
acutils.waitForNotify(watchFile=self.resname, path=self.dname, callback=self.getCBvalue, debug=self.debug)
# Parse output file
# Enterprise OW writes the report file specified in the script BUT
# Academic OW writes 'results.txt' (which doesn't have as much information)
netEnergy, netNRGturb, grossNRGturb = acutils.parseACresults(fname=self.resname)
if netEnergy is None:
sys.stderr.write("Error reading results file\n")
if self.debug:
sys.stderr.write('Stopping OpenWind with pid {:}\n'.format(self.pid))
self.proc.terminate()
return False
# Set the output variables
# - array_aep is not available from Academic 'results.txt' file
self.nTurbs = len(netNRGturb)
self.net_aep = netEnergy
self.gross_aep = sum(grossNRGturb)
if self.debug:
sys.stderr.write('{:}\n'.format(self.dump()))
# Log optimization values
if self.opt_log:
self.olfh.write('{:3d} G {:.4f} N {:.4f} XY '.format(self.exec_count, self.gross_aep, self.net_aep))
for ii in range(len(wt_positions)):
self.olfh.write('{:8.1f} {:9.1f} '.format(self.wt_layout.wt_positions[ii][0], self.wt_layout.wt_positions[ii][1]))
self.olfh.write('\n')
if not self.start_once and self.stopOW:
if self.debug:
sys.stderr.write('Stopping OpenWind with pid {:}\n'.format(self.pid))
self.proc.terminate()
self.checkReport() # check for execution errors
if self.debug:
sys.stderr.write("Leaving {0}.execute() {1}...\n\n".format(self.__class__, self.script_file))
#------------------
def parse_results_no_extopt():
''' parse the results of an OpenWind run WITHOUT external optimization '''
self.gross_aep, self.array_aep, self.net_aep, owTurbs = utils.rdReport(rptpath, debug=self.debug)
self.turbine_number = len(owTurbs)
# Set the output variables
self.array_efficiency = self.array_aep / self.gross_aep
self.gross_aep = self.gross_aep * 1000000.0 # gWh to kWh
self.array_aep = self.array_aep * 1000000.0
self.net_aep = self.net_aep * 1000000.0
# find net aep (not using openwind for these since they may be inputs from other models)
self.net_aep = self.net_aep * self.availability * (1-self.other_losses)
# find array efficiency
self.array_losses = 1 - (self.array_aep/self.gross_aep)
#------------------
def dump(self):
# returns a string with a summary of object parameters
dumpstr = ''
dumpstr += 'Gross {:10.4f} GWh Net {:10.4f} GWh from {:4d} turbines'.format(
self.gross_aep*0.000001,self.net_aep*0.000001, self.nTurbs)
#print dumpstr
return dumpstr
#------------------
def getCBvalue(self,val):
''' Callback invoked when waitForNotify detects change in results file
Sets self.net_aep to its argument
waitForNotify has handler which reads results.txt and calls this
function with netEnergy
Is this redundant with other parser for results.txt?
'''
self.net_aep = val
#------------------
def terminateOW(self):
''' Terminate the OpenWind process '''
if self.debug:
sys.stderr.write('Stopping OpenWind with pid {:}\n'.format(self.pid))
self.proc.terminate()
#------------------
def checkReport(self):
''' check the report file for errors '''
fname = self.scriptDict['rptpath']
if self.debug:
sys.stderr.write('checkReport : {:}\n'.format(fname))
fh = open(fname, 'r')
for line in fh.readlines():
if line.startswith('Failed to find and replace turbine type'):
sys.stderr.write('\n*** ERROR: turbine replacement operation failed\n')
sys.stderr.write(' Replace {:}\n'.format(self.scriptDict['replturbname']))
sys.stderr.write(' with {:}\n'.format(self.scriptDict['replturbpath']))
sys.stderr.write('\n')
fh.close()
#------------------------------------------------------------------
def dummy_wt_list():
wtl = ExtendedWindTurbinePowerCurveVT()
nv = 20
wtl.hub_height = 100.0
wtl.rotor_diameter = 90.0
wtl.power_rating = 3.0
wtl.rpm_curve = [ [float(i), 10.0] for i in range(nv) ]
wtl.pitch_curve = [ [float(i), 0.0] for i in range(nv) ]
wtl.c_t_curve = [ [float(i), 10.0] for i in range(nv) ]
wtl.power_curve = [ [float(i), 10.0] for i in range(nv) ]
return wtl
#------------------------------------------------------------------
def wtlDump(wtl):
wstr = 'WTL: pclen {:}'.format(len(wtl.c_t_curve))
return wstr
#------------------------------------------------------------------
''' OWComp.wt_layout is a ExtendedWindFarmTurbineLayout(VariableTree) and has
wt_list = List(ExtendedWindTurbinePowerCurveVT(), desc='The wind turbine list of descriptions [n_wt]')
wt_positions = Array(units='m', desc='Array of wind turbines attached to particular positions [n_wt, 2]')
(among others)
We use wt_positions to move the turbines - we update the values and copy them to
file 'positions.txt' at each iteration using writePositionFile()
(ow.wt_layout.wt_positions and wt_positions are 2 copies of the same data)
If we are replacing the turbines, we use wt_list to hold the modified turbine.
We initialize wt_layout.wt_list with copies of the values in base_turbine_file.
At each iteration, we tweak the values in wt_layout.wt_list.
When OWComp.execute runs, it writes a new turbine file
using the values in wt_layout.wt_list[0]
This turbine file is the same one specified in the script:
<TurbinePath value="../templates/ReplTurb.owtg"/>
When OpenWind runs the Replace Turbine operation, it looks for all turbines whose name matches
the value in <TurbineName value="NREL 5MW"/> and replaces them with the turbine described in
file <TurbinePath>
Does the base_turbine_file need to match the default turbine in the workbook?
How can we get that name?
- run OW energy capture, scan file
- but scripted energy capture doesn't have full description of turbine
'''
def example(owExe):
debug = True # set True so user sees final 'break' message
start_once = False
modify_turbine = False
opt_log = False
extOpt = False
for arg in sys.argv[1:]:
if arg == '-debug':
debug = True
if arg == '-once':
start_once = True
if arg == '-log':
opt_log = True
if arg == '-modturb':
modify_turbine = True
if arg == '-extopt':
extOpt = True
if arg == '-help':
sys.stderr.write('USAGE: python openWindComponent.py [-once] [-debug] [-modturb] [-extopt]\n')
exit()
# Find OpenWind executable
if not os.path.isfile(owExe):
sys.stderr.write('OpenWind executable file "{:}" not found\n'.format(owExe))
exit()
# set the external optimiser flag to extOpt:
# True : can use our optimizing routines
acutils.owIniSet(owExe, extVal=extOpt, debug=True)
# If we are not externally optimizing, we can start OW once and let it run to completion
if not extOpt:
start_once = True
# Set OpenWind script name
testpath = 'templates/'
#owXMLname = testpath + 'rtecScript.xml' # replace turb, energy capture #KLD - this script does not work for me with this component
owXMLname = testpath + 'owScript.xml' # optimize operation
#owXMLname = testpath + 'rtopScript.xml' # replace turb, optimize
if modify_turbine:
owXMLname = testpath + 'rtopScript.xml' # replace turb, optimize
if not os.path.isfile(owXMLname):
sys.stderr.write('OpenWind script file "{:}" not found\n'.format(owXMLname))
exit()
dscript = rwScriptXML.rdScript(owXMLname,debug=debug) # Show our operations
workbook = dscript['workbook']
# default turbine positions and size of translation
wt_positions = [[456000.00,4085000.00],
[456500.00,4085000.00]]
deltaX = 3000.0
deltaY = -2000.0
#deltaX = 200.0
#deltaY = -200.0
deltaX = 3.000
deltaY = -2.000
# Read turbine positions from workbook
if debug:
sys.stderr.write('Getting turbine positions from {:}\n'.format(workbook))
wb = acutils.WTWkbkFile(wkbk=workbook, owexe=owExe)
wt_positions = wb.xy
if debug:
sys.stderr.write('Got {:} turbine positions\n'.format(len(wt_positions)))
# Initialize OWcomp component
ow = OWcomp(owExe=owExe, debug=debug, scriptFile=owXMLname,
extOpt=extOpt,
start_once=start_once, opt_log=opt_log) #, stopOW=False)
if not ow.scriptOK:
sys.stderr.write("\n*** ERROR found in script file\n\n")
exit()
# starting point for turbine mods
#wt_list_elem = dummy_wt_list()
base_turbine_file = testpath + 'NREL5MW.owtg'
wt_list_elem = turbfuncs.owtg_to_wtpc(base_turbine_file)
if not extOpt:
# If we're not optimizing externally, run ow and return
ow.execute()
print '\nFinal values'
owd = ow.dump()
print ' {:}'.format(owd)
print '-' * 40, '\n'
return
# external optimization
ow.wt_layout.wt_list = [ wt_list_elem for i in range(len(wt_positions)) ]
if debug:
sys.stderr.write('Initialized {:} turbines in wt_layout\n'.format(len(wt_positions)))
# With each iteration
# move turbines farther offshore
# possibly modify the turbine rotor diam and power curve and replace turbine
if debug:
ofh = open('wtp.txt', 'w')
for irun in range(1,4):
for i in range(len(wt_positions)):
wt_positions[i][0] += deltaX
wt_positions[i][1] += deltaY
if debug:
ofh.write('{:2d} {:3d} {:.1f} {:.1f}\n'.format(irun, i, wt_positions[i][0], wt_positions[i][1]))
ow.wt_layout.wt_positions = wt_positions
# modify the turbine
ow.rotor_diameter += 1.0
if ow.replace_turbine:
wt_list_elem = ow.wt_layout.wt_list[0]
wt_list_elem.power_rating *= 1.05
for i in range(len(wt_list_elem.power_curve)):
wt_list_elem.power_curve[i][1] *= 1.05
ow.wt_layout.wt_list = [wt_list_elem for i in range(len(ow.wt_layout.wt_list)) ]
if debug:
ofh.write('Updated {:} turbines with:\n'.format(len(ow.wt_layout.wt_list)))
ofh.write(turbfuncs.wtpc_dump(ow.wt_layout.wt_list[0]))
ow.execute() # run the openWind process
print '\nFinal values'
owd = ow.dump()
print ' {:}'.format(owd)
print '-' * 40, '\n'
if start_once:
ow.terminateOW()
if __name__ == "__main__":
# Substitute your own path to Openwind Enterprise
#owExe = 'C:/Models/Openwind/openWind64_ac.exe'
owExe = 'D:/rassess/Openwind/openWind64_ac.exe' # Old Academic v.1275
owExe = 'D:/rassess/Openwind/openWind64.exe'
example(owExe)
| 41.097521
| 137
| 0.591216
|
4a0ad0f178cea93fd651e45ad899ccf5b44fc7bc
| 1,295
|
py
|
Python
|
setup.py
|
bakszero/voxpopuli
|
5745dca4bd16624bbd0e486b3f7fdb08ff795238
|
[
"MIT"
] | 1
|
2020-10-08T14:48:35.000Z
|
2020-10-08T14:48:35.000Z
|
setup.py
|
bakszero/voxpopuli
|
5745dca4bd16624bbd0e486b3f7fdb08ff795238
|
[
"MIT"
] | null | null | null |
setup.py
|
bakszero/voxpopuli
|
5745dca4bd16624bbd0e486b3f7fdb08ff795238
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""Install config."""
from setuptools import setup, find_packages
with open("README.md") as readme:
long_description = readme.read()
setup(
name='voxpopuli',
version='0.3.6',
description='A wrapper around Espeak and Mbrola, to do simple Text-To-Speech (TTS),'
' with the possibility to tweak the phonemic form.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/hadware/voxpopuli',
author='Hadware',
author_email='hadwarez@gmail.com',
license='MIT',
classifiers=[
'Topic :: Text Processing :: Linguistic',
'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
'Topic :: Multimedia :: Sound/Audio :: Speech',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords='tts speech phonemes audio',
packages=find_packages(),
install_requires=[],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'])
| 35
| 88
| 0.637838
|
4a0ad17cb7ff461d2d6d5db5c71dc1487d15b017
| 10,993
|
py
|
Python
|
reports/utils.py
|
sxntixgo/merger
|
cfe98b36afe861797a848618c6f2e1e208c4bd2c
|
[
"MIT"
] | null | null | null |
reports/utils.py
|
sxntixgo/merger
|
cfe98b36afe861797a848618c6f2e1e208c4bd2c
|
[
"MIT"
] | 2
|
2021-03-31T20:05:15.000Z
|
2021-06-10T19:56:29.000Z
|
reports/utils.py
|
sxntixgo/merger
|
cfe98b36afe861797a848618c6f2e1e208c4bd2c
|
[
"MIT"
] | null | null | null |
from django.core.files import File
from .models import ReportTemplate
from main.models import Attach, Proj, Vuln, RISK
from docx import Document
from docx.enum.text import WD_ALIGN_PARAGRAPH, WD_BREAK
from docx.oxml import OxmlElement
from docx.oxml.ns import qn
from docx.shared import Inches, Pt, RGBColor
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot
from matplotlib.colors import CSS4_COLORS
from datetime import date
from re import match
from os import path, remove
from wsgiref.util import FileWrapper
import random
RISK_COLORS = [ 'risk_info_color','risk_low_color', 'risk_medium_color', 'risk_high_color', 'risk_critical_color']
SECTIONS = {
'sec_es': 'Executive Summary',
'sec_sor': 'Summary of Results',
'sec_method': 'Methodology',
'sec_find': 'Findings',
'sec_conc': 'Conclusion',
'sec_desc': 'Description',
'sec_evid': 'Evidence',
'sec_sol': 'Solution',
}
STYLES = ['Title', 'Heading 1', 'Heading 2', 'Heading 3', 'Normal', 'Caption']
def color_to_rgb(font_color):
r_hex_str = CSS4_COLORS[font_color][1:3]
g_hex_str = CSS4_COLORS[font_color][3:5]
b_hex_str = CSS4_COLORS[font_color][5:7]
return RGBColor(int(r_hex_str, base=16), int(g_hex_str, base=16), int(b_hex_str, base=16))
# Remove bottom border
# Source: https://github.com/python-openxml/python-docx/issues/105
def remove_border(paragraph):
p = paragraph._p # p is the <w:p> XML element
pPr = p.get_or_add_pPr()
pBdr = OxmlElement('w:pBdr')
pPr.insert_element_before(pBdr,
'w:shd', 'w:tabs', 'w:suppressAutoHyphens', 'w:kinsoku', 'w:wordWrap',
'w:overflowPunct', 'w:topLinePunct', 'w:autoSpaceDE', 'w:autoSpaceDN',
'w:bidi', 'w:adjustRightInd', 'w:snapToGrid', 'w:spacing', 'w:ind',
'w:contextualSpacing', 'w:mirrorIndents', 'w:suppressOverlap', 'w:jc',
'w:textDirection', 'w:textAlignment', 'w:textboxTightWrap',
'w:outlineLvl', 'w:divId', 'w:cnfStyle', 'w:rPr', 'w:sectPr',
'w:pPrChange'
)
bottom = OxmlElement('w:bottom')
bottom.set(qn('w:sz'), '0')
bottom.set(qn('w:space'), '0')
bottom.set(qn('w:color'), 'white')
pBdr.append(bottom)
def get_donut(path, proj, template):
values = []
labels = []
colors = []
for color in RISK_COLORS:
colors.append(template.COLORS[getattr(template, color)][1])
for risk in RISK:
values.append(Vuln.objects.filter(risk=risk[0], proj=proj.slug).count())
labels.append(risk[1])
# Reverse the data so that criticals are printed first.
colors.reverse()
values.reverse()
labels.reverse()
fig, ax = pyplot.subplots(figsize=(8, 4), subplot_kw=dict(aspect='equal'))
wedges, texts, autotexts = ax.pie(values, colors=colors, wedgeprops=dict(width=0.5, edgecolor='w'), autopct=lambda p: '{:.1f}%'.format(round(p)) if p > 0 else '', pctdistance=1.3)
ax.legend(wedges, labels, title="Risk", loc="center right", bbox_to_anchor=(1.25, 0, 0.5, 1))
pyplot.setp(autotexts, size=14, weight="bold")
pyplot.savefig(f'{path}donut.png')
# Modified from the source
# --- from https://github.com/python-openxml/python-docx/issues/590,
# --- mods by CD
def iter_heading(paragraphs):
for paragraph in paragraphs:
isItHeading=match('Heading ([1-9])', paragraph.style.name)
if isItHeading:
yield int(isItHeading.groups()[0]), paragraph
def addHeaderNumbering(document):
hNums = [0] * 10 # To include the 9 possible headings
for index,hx in iter_heading(document.paragraphs):
# ---put zeroes below---
for i in range(index + 1, len(hNums)):
hNums[i] = 0
# ---increment this---
hNums[index] += 1
# ---prepare the string---
hStr = ''
for i in range(1 , index + 1):
hStr += '{}.'.format(hNums[i])
# ---add the numbering---
hx.text = f'{hStr} {hx.text}'
def generate_document(request, path):
proj_name = request.POST.get('proj_name')
proj = Proj.objects.filter(name=proj_name).first()
template_name = request.POST.get('report_template_name')
template = ReportTemplate.objects.filter(name=template_name).first()
document = Document()
for style in STYLES:
font = document.styles[style].font
style = style.lower().replace(' ', '_')
font.name = getattr(template, f'{style}_font')
font.size = Pt(getattr(template, f'{style}_size'))
font.bold = getattr(template, f'{style}_bold')
font.italic = getattr(template, f'{style}_italic')
font_color = template.COLORS[getattr(template, f'{style}_color')][1]
font.color.rgb = color_to_rgb(font_color)
cover = template.sec_cover
if cover == 0:
run = document.add_paragraph().add_run()
run.add_break()
run.add_break()
run.add_break()
run.add_break()
for element in ['cover_title', 'cover_company_name']:
text = getattr(template, element)
run = document.add_paragraph().add_run(text)
run.add_break()
run.add_break()
font = run.font
font.name = getattr(template, 'title_font')
font.size = Pt(36)
font_color = template.COLORS[getattr(template, 'title_color')][1]
font.color.rgb = color_to_rgb(font_color)
for element in ['cover_contact_name', 'cover_contact_email', 'cover_contact_phone_number']:
text = getattr(template, element)
run = document.add_paragraph().add_run(str(text))
font = run.font
font.name = getattr(template, 'normal_font')
font.size = Pt(12)
font_color = template.COLORS[getattr(template, 'normal_color')][1]
font.color.rgb = color_to_rgb(font_color)
run = document.add_paragraph().add_run()
run.add_break()
run.add_break()
text = date.today().strftime("%b-%d-%Y")
par = document.add_paragraph()
par.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = par.add_run(text)
font = run.font
font.name = getattr(template, 'normal_font')
font.size = Pt(12)
font_color = template.COLORS[getattr(template, 'normal_color')][1]
font.color.rgb = color_to_rgb(font_color)
run.add_break(WD_BREAK.PAGE) # page break
if template.sec_toc:
p = document.add_heading('Table of Contents', 1)
# To add ToC
# From https://stackoverflow.com/questions/18595864/python-create-a-table-of-contents-with-python-docx-lxml
paragraph = document.add_paragraph()
run = paragraph.add_run()
fldChar = OxmlElement('w:fldChar') # creates a new element
fldChar.set(qn('w:fldCharType'), 'begin') # sets attribute on element
instrText = OxmlElement('w:instrText')
instrText.set(qn('xml:space'), 'preserve') # sets attribute on element
instrText.text = 'TOC \\o "1-3" \\h \\z \\u' # change 1-3 depending on heading levels you need
fldChar2 = OxmlElement('w:fldChar')
fldChar2.set(qn('w:fldCharType'), 'separate')
fldChar3 = OxmlElement('w:t')
fldChar3.text = "Right-click to update field."
fldChar2.append(fldChar3)
fldChar4 = OxmlElement('w:fldChar')
fldChar4.set(qn('w:fldCharType'), 'end')
r_element = run._r
r_element.append(fldChar)
r_element.append(instrText)
r_element.append(fldChar2)
r_element.append(fldChar4)
p_element = paragraph._p
run.add_break(WD_BREAK.PAGE) # page break
elif cover == 1:
title = template.cover_title
p = document.add_heading(title, 0)
remove_border(p)
for heading in SECTIONS.keys():
if hasattr(template, heading):
paragraph = document.add_heading(SECTIONS[heading], 1)
if heading == 'sec_es':
text = getattr(template, 'es_text')
if text:
document.add_paragraph(text)
if heading == 'sec_sor':
# Add donut
get_donut(path, proj, template)
document.add_picture(f'{path}donut.png', width=Inches(6))
remove(f'{path}donut.png')
# Add table header
table = document.add_table(rows=1, cols=4)
hdr_cells = table.rows[0].cells
hdr_cells[0].text = 'Title'
hdr_cells[1].text = 'Risk (Score)'
hdr_cells[2].text = 'Description'
hdr_cells[3].text = 'Solution'
# Add table findings
vulns = Vuln.objects.filter(proj=proj.slug)
for vuln in vulns:
row_cells = table.add_row().cells
row_cells[0].text = vuln.title
row_cells[1].text = '{} ({})'.format(RISK[vuln.risk][1], vuln.score)
row_cells[2].text = vuln.description
row_cells[3].text = vuln.solution
if heading == 'sec_method':
text = getattr(template, 'method_text')
if text:
document.add_paragraph(text)
if heading == 'sec_find':
vulns = Vuln.objects.filter(proj=proj.slug)
for vuln in vulns:
document.add_heading(('{} [{}]').format(vuln.title, RISK[vuln.risk][1]), 2)
document.add_paragraph('{} {}'.format('CVE:', vuln.score), 'List Bullet')
document.add_paragraph('{} {}'.format('Score:', vuln.score), 'List Bullet')
document.add_paragraph(vuln.description)
for subheading in ['sec_evid', 'sec_sol']:
document.add_heading(SECTIONS[subheading], 3)
if subheading == 'sec_evid':
document.add_paragraph(vuln.evidence)
attachments = Attach.objects.filter(vuln=vuln).all()
for attachment in attachments:
document.add_picture(path + str(attachment.media), width=Inches(6))
document.add_paragraph(f'Figure: {attachment.caption}', style='Caption')
else:
document.add_paragraph(vuln.solution)
if heading == 'sec_conc':
text = getattr(template, 'conc_text')
if text:
document.add_paragraph(text)
addHeaderNumbering(document)
hex_chars = '0123456789abcdef'
rand_str = ''.join(random.choice(hex_chars) for n in range(6))
document_name = f'{proj_name}_report_{rand_str}.docx'
document.save(path + document_name)
return document_name
| 39.82971
| 183
| 0.59265
|
4a0ad1a083d197ea92ca7443eef579332c70890a
| 720
|
py
|
Python
|
setup.py
|
vkottler/vmklib
|
493d23c1b260d2ef53cafd85d00c2f76c682df6f
|
[
"MIT"
] | 1
|
2022-02-27T00:14:15.000Z
|
2022-02-27T00:14:15.000Z
|
setup.py
|
vkottler/vmklib
|
493d23c1b260d2ef53cafd85d00c2f76c682df6f
|
[
"MIT"
] | 23
|
2021-01-28T01:26:57.000Z
|
2022-03-29T08:44:54.000Z
|
setup.py
|
vkottler/vmklib
|
493d23c1b260d2ef53cafd85d00c2f76c682df6f
|
[
"MIT"
] | null | null | null |
# =====================================
# generator=datazen
# version=1.7.9
# hash=1b779f83ea09cc421cd4bc0c7ace4b21
# =====================================
"""
vmklib - Package definition for distribution.
"""
# third-party
from vmklib.setup import setup
# internal
from vmklib import PKG_NAME, VERSION, DESCRIPTION
author_info = {
"name": "Vaughn Kottler",
"email": "vaughnkottler@gmail.com",
"username": "vkottler",
}
pkg_info = {
"name": PKG_NAME,
"slug": PKG_NAME.replace("-", "_"),
"version": VERSION,
"description": DESCRIPTION,
"versions": [
"3.6",
"3.7",
"3.8",
"3.9",
],
}
setup(
pkg_info,
author_info,
entry_override="mk",
)
| 18
| 49
| 0.538889
|
4a0ad1a0e959aa33c93ca320dd8b3a941cdc4f80
| 2,065
|
py
|
Python
|
setup.py
|
justinnoah/hamper
|
a12c4b61243bd1ddd8c88e11605348da77fb21fe
|
[
"MIT"
] | null | null | null |
setup.py
|
justinnoah/hamper
|
a12c4b61243bd1ddd8c88e11605348da77fb21fe
|
[
"MIT"
] | null | null | null |
setup.py
|
justinnoah/hamper
|
a12c4b61243bd1ddd8c88e11605348da77fb21fe
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
from setuptools import setup, find_packages
requires = open('requirements.txt').read().split('\n')
setup(
name='hamper',
version='1.10.1',
description='Yet another IRC bot',
install_requires=requires,
author='Mike Cooper',
author_email='mythmon@gmail.com',
url='https://www.github.com/hamperbot/hamper',
packages=find_packages(),
entry_points={
'console_scripts': [
'hamper = hamper.commander:main',
],
'hamperbot.plugins': [
'bitly = hamper.plugins.bitly:Bitly',
'botsnack = hamper.plugins.friendly:BotSnack',
'channel_utils = hamper.plugins.channel_utils:ChannelUtils',
'choices = hamper.plugins.questions:ChoicesPlugin',
'dice = hamper.plugins.commands:Dice',
'factoids = hamper.plugins.factoids:Factoids',
'flip = hamper.plugins.flip:Flip',
'friendly = hamper.plugins.friendly:Friendly',
'goodbye = hamper.plugins.goodbye:GoodBye',
'help = hamper.plugins.help:Help',
'karma = hamper.plugins.karma:Karma',
'karma_adv = hamper.plugins.karma_adv:KarmAdv',
'lmgtfy = hamper.plugins.commands:LetMeGoogleThatForYou',
'lookup = hamper.plugins.dictionary:Lookup',
'ponies = hamper.plugins.friendly:OmgPonies',
'quit = hamper.plugins.commands:Quit',
'quotes = hamper.plugins.quotes:Quotes',
'remindme = hamper.plugins.remindme:Reminder',
'rot13 = hamper.plugins.commands:Rot13',
'roulette = hamper.plugins.roulette:Roulette',
'sed = hamper.plugins.commands:Sed',
'seen = hamper.plugins.seen:Seen',
'suggest = hamper.plugins.suggest:Suggest',
'timez = hamper.plugins.timez:Timez',
'tinyurl = hamper.plugins.tinyurl:Tinyurl',
'whatwasthat = hamper.plugins.whatwasthat:WhatWasThat',
'yesno = hamper.plugins.questions:YesNoPlugin',
],
},
)
| 40.490196
| 72
| 0.611138
|
4a0ad1b8413588d513ca47bad46090be0ea974c2
| 2,565
|
py
|
Python
|
vsts/vsts/release/v4_1/models/approval_options.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/release/v4_1/models/approval_options.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/release/v4_1/models/approval_options.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ApprovalOptions(Model):
"""ApprovalOptions.
:param auto_triggered_and_previous_environment_approved_can_be_skipped:
:type auto_triggered_and_previous_environment_approved_can_be_skipped: bool
:param enforce_identity_revalidation:
:type enforce_identity_revalidation: bool
:param execution_order:
:type execution_order: object
:param release_creator_can_be_approver:
:type release_creator_can_be_approver: bool
:param required_approver_count:
:type required_approver_count: int
:param timeout_in_minutes:
:type timeout_in_minutes: int
"""
_attribute_map = {
'auto_triggered_and_previous_environment_approved_can_be_skipped': {'key': 'autoTriggeredAndPreviousEnvironmentApprovedCanBeSkipped', 'type': 'bool'},
'enforce_identity_revalidation': {'key': 'enforceIdentityRevalidation', 'type': 'bool'},
'execution_order': {'key': 'executionOrder', 'type': 'object'},
'release_creator_can_be_approver': {'key': 'releaseCreatorCanBeApprover', 'type': 'bool'},
'required_approver_count': {'key': 'requiredApproverCount', 'type': 'int'},
'timeout_in_minutes': {'key': 'timeoutInMinutes', 'type': 'int'}
}
def __init__(self, auto_triggered_and_previous_environment_approved_can_be_skipped=None, enforce_identity_revalidation=None, execution_order=None, release_creator_can_be_approver=None, required_approver_count=None, timeout_in_minutes=None):
super(ApprovalOptions, self).__init__()
self.auto_triggered_and_previous_environment_approved_can_be_skipped = auto_triggered_and_previous_environment_approved_can_be_skipped
self.enforce_identity_revalidation = enforce_identity_revalidation
self.execution_order = execution_order
self.release_creator_can_be_approver = release_creator_can_be_approver
self.required_approver_count = required_approver_count
self.timeout_in_minutes = timeout_in_minutes
| 55.76087
| 244
| 0.691228
|
4a0ad2204bd558a70775f0b4c9f254e6e5b1e106
| 12,712
|
py
|
Python
|
examples/test_examples.py
|
DiscoSteve/SDK-Python
|
a359380d18dc8bf82308ecfab9bf0d475b1b8878
|
[
"Apache-2.0"
] | null | null | null |
examples/test_examples.py
|
DiscoSteve/SDK-Python
|
a359380d18dc8bf82308ecfab9bf0d475b1b8878
|
[
"Apache-2.0"
] | null | null | null |
examples/test_examples.py
|
DiscoSteve/SDK-Python
|
a359380d18dc8bf82308ecfab9bf0d475b1b8878
|
[
"Apache-2.0"
] | null | null | null |
import copy
import pprint
import unittest
# needed to connect to the central infrastructure
from highcliff.infrastructure import InvalidTopic, InvalidMessageFormat
# needed to run a local version of the AI
from highcliff.ai import AI, intent_is_real
from highcliff.actions import ActionStatus
# the Highcliff actions to be tested
from highcliff.exampleactions import MonitorBodyTemperature, AuthorizeRoomTemperatureChange, ChangeRoomTemperature
# global variables needed to test publish and subscribe
global published_topic
global published_message
class TestHighcliffExamples(unittest.TestCase):
def setUp(self):
# get a reference to the ai and its network
self.highcliff = AI.instance()
self.network = self.highcliff.network()
def tearDown(self):
# reset the ai
self.highcliff.reset()
def test_custom_behavior_is_required(self):
# an error should be thrown if an action's custom behavior is not defined
# define an action without implementing custom behavior
class InvalidActionClass(MonitorBodyTemperature):
pass
try:
self.assertRaises(NotImplementedError,
InvalidActionClass,
self.network)
except:
pass
def test_action_properties_set_properly_at_action_instantiation(self):
# define a test action with a blank custom behavior
class TestAction(MonitorBodyTemperature):
def behavior(self):
pass
# instantiate the test action
test_action = TestAction(self.highcliff)
# check the effects of the test action
expected_effects = {"is_room_temperature_change_needed": True}
self.assertEqual(expected_effects, test_action.effects)
# check the preconditions of the test action
expected_preconditions = {}
self.assertEqual(expected_preconditions, test_action.preconditions)
def test_action_updates_the_world(self):
# the world should be updated after an action occurs
# define a test action with a blank custom behavior
class TestAction(MonitorBodyTemperature):
def behavior(self):
pass
# test that the known world is currently empty
empty_world = {}
self.assertEqual(empty_world, self.network.the_world())
# add a dummy condition to the known world
dummy_condition = {'dummy_condition': False}
self.network.update_the_world(dummy_condition)
# instantiate the test action
test_action = TestAction(self.highcliff)
expected_known_world = {**self.network.the_world(), **test_action.effects}
# take an action and test to see if that action properly affected the world
test_action.act()
self.assertEqual(expected_known_world, self.network.the_world())
def test_action_registers_its_capabilities(self):
# when an action is instantiated, it should register itself as a capability
# define a test action with a blank custom behavior
class TestAction(MonitorBodyTemperature):
def behavior(self):
pass
# test that the capabilities registry is currently empty
no_capabilities = []
self.assertEqual(no_capabilities, self.highcliff.capabilities())
# instantiate the test action
test_action = TestAction(self.highcliff)
# test to see if the test action properly registered itself as a new capability
self.assertTrue(len(self.highcliff.capabilities()) == 1)
self.assertEqual(test_action, self.highcliff.capabilities()[0])
def test_action_notifies_success(self):
# an action that has the intended effect should record a success
# define a test action with a successful behavior
class TestSucceededAction(MonitorBodyTemperature):
def behavior(self):
pass
TestSucceededAction(self.highcliff)
# define the test world state and goals
self.network.update_the_world({})
self.highcliff.set_goals({"is_room_temperature_change_needed": True})
# run a local version of Highcliff
self.highcliff.run(life_span_in_iterations=1)
# the action should complete unsuccessfully
self.assertEqual(ActionStatus.SUCCESS, self.highcliff.diary()[0]['action_status'])
def test_action_notifies_failure(self):
# an action that does not have the intended effect should record a failure
# define a test action with a behavior failure
class TestFailedAction(MonitorBodyTemperature):
def action_failure(self):
self.effects['is_body_temperature_monitored'] = False
def behavior(self):
self.action_failure()
TestFailedAction(self.highcliff)
# define the test world state and goals
self.network.update_the_world({"is_body_temperature_monitored": False})
self.highcliff.set_goals({"is_body_temperature_monitored": True})
# run a local version of Highcliff
self.highcliff.run(life_span_in_iterations=1)
# the action should complete unsuccessfully
self.assertEqual(ActionStatus.FAIL, self.highcliff.diary()[0]['action_status'])
def test_running_a_one_step_plan(self):
# test that the ai can create a one-step plan to execute a single action with a single goal
# define a test body temperature monitor with a blank custom behavior
class TestBodyTemperatureMonitor(MonitorBodyTemperature):
def behavior(self):
pass
# instantiate the test body temperature monitor
test_body_temperature_monitor = TestBodyTemperatureMonitor(self.highcliff)
# define the test world state and goals
self.network.update_the_world({})
self.highcliff.set_goals({"is_room_temperature_change_needed": True})
# run a local version of Highcliff
self.highcliff.run(life_span_in_iterations=1)
# the action should complete successfully
self.assertEqual(ActionStatus.SUCCESS, self.highcliff.diary()[0]['action_status'])
# the goal should have been recorded in the diary
self.assertEqual({"is_room_temperature_change_needed": True}, self.highcliff.diary()[0]['my_goal'])
# the ai should have devised a one-step plan
expected_plan_steps = 1
self.assertEqual(expected_plan_steps, len(self.highcliff.diary()[0]['my_plan']))
# the plan should have been to monitor body temperature
self.assertEqual(test_body_temperature_monitor, self.highcliff.diary()[0]['my_plan'][0].action)
# the diary should have recorded that the world changed to reflect the goal state
world_state_after_matches_goals = intent_is_real({"is_room_temperature_change_needed": True},
self.highcliff.diary()[0]['the_world_state_after'])
self.assertTrue(world_state_after_matches_goals)
def test_running_a_two_step_plan(self):
# test that the ai can create a two-step plan to execute multiple actions to reach a goal
# define a test body temperature monitor with a blank custom behavior
class TestBodyTemperatureMonitor(MonitorBodyTemperature):
def behavior(self):
pass
# instantiate the test body temperature monitor
TestBodyTemperatureMonitor(self.highcliff)
# define a test body authorization application with a blank custom behavior
class TestAuthorizationApp(AuthorizeRoomTemperatureChange):
def behavior(self):
pass
# instantiate the test authorization app
TestAuthorizationApp(self.highcliff)
# define the test world state and goals
world_update = {"is_body_temperature_monitored": False, "is_room_temperature_change_authorized": False}
self.network.update_the_world(world_update)
self.highcliff.set_goals({"is_room_temperature_change_authorized": True})
# run a local version of Highcliff
self.highcliff.run(life_span_in_iterations=2)
# the plan should have started with two steps, then progress to a single step
self.assertEqual(2, len(self.highcliff.diary()[0]['my_plan']))
self.assertEqual(1, len(self.highcliff.diary()[1]['my_plan']))
# in the second iteration, the ai should have reached it's goal
highcliff_reached_its_goal = intent_is_real({"is_room_temperature_change_authorized": True},
self.highcliff.diary()[1]['the_world_state_after'])
self.assertTrue(highcliff_reached_its_goal)
def test_a_three_step_plan(self):
# test that the ai can create a two-step plan to execute multiple actions to reach a goal
class TestBodyTemperatureMonitor(MonitorBodyTemperature):
def behavior(self):
pass
TestBodyTemperatureMonitor(self.highcliff)
class TestAuthorizeRoomTemperatureChange(AuthorizeRoomTemperatureChange):
def behavior(self):
pass
TestAuthorizeRoomTemperatureChange(self.highcliff)
class TestChangeRoomTemperature(ChangeRoomTemperature):
def behavior(self):
pass
TestChangeRoomTemperature(self.highcliff)
# define the test world state and goals
self.network.update_the_world({})
self.highcliff.set_goals({"is_room_temperature_comfortable": True})
# run a local version of Highcliff
self.highcliff.run(life_span_in_iterations=3)
# the plan should have started with two steps, then progress to a single step
self.assertEqual(3, len(self.highcliff.diary()[0]['my_plan']))
self.assertEqual(2, len(self.highcliff.diary()[1]['my_plan']))
self.assertEqual(1, len(self.highcliff.diary()[2]['my_plan']))
# spot check the contents of the diary
self.assertEqual(2, len(self.highcliff.diary()[0]['the_world_state_after']))
self.assertEqual(3, len(self.highcliff.diary()[1]['the_world_state_after']))
self.assertEqual(False, self.highcliff.diary()[1]['the_world_state_after']['is_room_temperature_comfortable'])
self.assertEqual(True, self.highcliff.diary()[2]['the_world_state_after']['is_room_temperature_comfortable'])
# in the third iteration, the ai should have reached it's goal
highcliff_reached_its_goal = intent_is_real({"is_room_temperature_comfortable": True},
self.highcliff.diary()[2]['the_world_state_after'])
self.assertTrue(highcliff_reached_its_goal)
def test_publish_subscribe(self):
# create a topic
test_topic = "test_topic"
self.network.create_topic(test_topic)
# create a callback function to test publishing
def test_callback(topic, message):
global published_message
global published_topic
published_topic = topic
published_message = message
# subscribe to the test topic
self.network.subscribe(test_topic, test_callback)
# publish a message to the subscribed topic
test_message = {
"event_type": "publish_message",
"event_tags": [],
"event_source": "test_examples unit test",
"timestamp": 1234567.89,
"device_info": {},
"application_info": {},
"user_info": {},
"environment": "test",
"context": {},
"effects": {},
"data": {}
}
self.network.publish(test_topic, test_message)
# subscribers should be notified when there is a new message posted to a topic of interest
global published_message
global published_topic
self.assertEqual(test_topic, published_topic)
self.assertEqual(test_message, published_message)
# an invalid message should raise an error
invalid_message = {}
self.network.publish(test_topic, test_message)
try:
self.assertRaises(InvalidMessageFormat, self.network.publish, test_topic, invalid_message)
except InvalidMessageFormat:
pass
# an invalid topic should raise an error
invalid_topic = "invalid_topic"
try:
self.assertRaises(InvalidTopic, self.network.publish, invalid_topic, test_message)
except InvalidTopic:
pass
if __name__ == '__main__':
unittest.main()
| 39.478261
| 118
| 0.674717
|
4a0ad29951f2884a2b658553f705170dbf3f1951
| 635
|
py
|
Python
|
backend/manage.py
|
crowdbotics-apps/dashboard-31817
|
bb5ff5bfc8deb2c551b5c249d080cfb92db20d10
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/manage.py
|
crowdbotics-apps/dashboard-31817
|
bb5ff5bfc8deb2c551b5c249d080cfb92db20d10
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/manage.py
|
crowdbotics-apps/dashboard-31817
|
bb5ff5bfc8deb2c551b5c249d080cfb92db20d10
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dashboard_31817.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| 28.863636
| 79
| 0.686614
|
4a0ad2e75bfa5ef1bbf0f22e0e5ceb4973bda0dc
| 2,388
|
py
|
Python
|
model-optimizer/extensions/front/caffe/proposal_ext_test.py
|
tdp2110/dldt
|
87f321c5365ed813e849ea0ed987354ef2c39743
|
[
"Apache-2.0"
] | null | null | null |
model-optimizer/extensions/front/caffe/proposal_ext_test.py
|
tdp2110/dldt
|
87f321c5365ed813e849ea0ed987354ef2c39743
|
[
"Apache-2.0"
] | null | null | null |
model-optimizer/extensions/front/caffe/proposal_ext_test.py
|
tdp2110/dldt
|
87f321c5365ed813e849ea0ed987354ef2c39743
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright (c) 2018-2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from unittest.mock import patch
from extensions.front.caffe.proposal_ext import ProposalFrontExtractor
from extensions.ops.proposal import ProposalOp
from mo.utils.unittest.extractors import FakeMultiParam
from mo.utils.unittest.graph import FakeNode, FakeAttr
from mo.ops.op import Op
class FakeProposalProtoLayer:
def __init__(self, val):
self.proposal_param = val
class TestProposalExt(unittest.TestCase):
@classmethod
def setUpClass(cls):
Op.registered_ops['Proposal'] = ProposalOp
def test_proposal_no_pb_no_ml(self):
self.assertRaises(AttributeError, ProposalFrontExtractor.extract, None)
@patch('extensions.front.caffe.proposal_ext.merge_attrs')
def test_proposal_ext_ideal_numbers(self, merge_attrs):
params = {
'feat_stride': 1,
'base_size': 16,
'min_size': 16,
'ratio': 1,
'scale': 2,
'pre_nms_topn': 6000,
'post_nms_topn': 300,
'nms_thresh': 0.7
}
merge_attrs.return_value = {
**params
}
fake_pl = FakeProposalProtoLayer(FakeMultiParam(params))
fake_node = FakeNode(fake_pl, None)
fake_node.graph.graph['cmd_params'] = FakeAttr(generate_experimental_IR_V10=False)
ProposalFrontExtractor.extract(fake_node)
exp_res = {
'type': "Proposal",
'feat_stride': 1,
'base_size': 16,
'min_size': 16,
'ratio': 1,
'scale': 2,
'pre_nms_topn': 6000,
'post_nms_topn': 300,
'nms_thresh': 0.7,
'infer': ProposalOp.proposal_infer
}
for key in exp_res.keys():
self.assertEqual(fake_node[key], exp_res[key])
| 31.012987
| 90
| 0.656198
|
4a0ad36a7401c7687e6e4536d32a967b74f42e75
| 1,792
|
py
|
Python
|
test/integration/vint/linting/policy/test_prohibit_autocmd_with_no_group.py
|
tmsanrinsha/vint
|
8c34196252b43d7361d0f58cb78cf2d3e4e4fbd0
|
[
"MIT"
] | 2
|
2021-06-15T15:07:28.000Z
|
2021-10-05T12:23:23.000Z
|
test/integration/vint/linting/policy/test_prohibit_autocmd_with_no_group.py
|
tmsanrinsha/vint
|
8c34196252b43d7361d0f58cb78cf2d3e4e4fbd0
|
[
"MIT"
] | null | null | null |
test/integration/vint/linting/policy/test_prohibit_autocmd_with_no_group.py
|
tmsanrinsha/vint
|
8c34196252b43d7361d0f58cb78cf2d3e4e4fbd0
|
[
"MIT"
] | null | null | null |
import unittest
from test.asserting.policy import PolicyAssertion, get_fixture_path
from vint.linting.level import Level
from vint.linting.policy.prohibit_autocmd_with_no_group import ProhibitAutocmdWithNoGroup
VALID_VIM_SCRIPT_WITH_AUGROUP = get_fixture_path(
'prohibit_autocmd_with_no_group_valid_with_augroup.vim'
)
VALID_VIM_SCRIPT_WITH_GROUP_PARAM = get_fixture_path(
'prohibit_autocmd_with_no_group_valid_with_group_param.vim'
)
INVALID_VIM_SCRIPT = get_fixture_path(
'prohibit_autocmd_with_no_group_invalid.vim'
)
class TestProhibitAutocmdWithNoGroup(PolicyAssertion, unittest.TestCase):
def create_violation(self, line_number, path):
return {
'name': 'ProhibitAutocmdWithNoGroup',
'level': Level.WARNING,
'position': {
'line': line_number,
'column': 1,
'path': path
}
}
def test_get_violation_if_found_with_valid_file_with_augroup(self):
self.assertFoundNoViolations(VALID_VIM_SCRIPT_WITH_AUGROUP,
ProhibitAutocmdWithNoGroup)
def test_get_violation_if_found_with_valid_file_with_group_param(self):
self.assertFoundNoViolations(VALID_VIM_SCRIPT_WITH_GROUP_PARAM,
ProhibitAutocmdWithNoGroup)
def test_get_violation_if_found_with_invalid_file(self):
expected_violations = [
self.create_violation(1, INVALID_VIM_SCRIPT),
self.create_violation(6, INVALID_VIM_SCRIPT),
]
self.assertFoundViolationsEqual(INVALID_VIM_SCRIPT,
ProhibitAutocmdWithNoGroup,
expected_violations)
if __name__ == '__main__':
unittest.main()
| 33.185185
| 89
| 0.686384
|
4a0ad4321adc708ec43c7990890f4582ca186cf4
| 9,587
|
py
|
Python
|
model/model.py
|
frank-xwang/RIDE-LongTailRecognition
|
5396100d33b75992e0a72dd01819513c10088611
|
[
"MIT"
] | 175
|
2020-12-04T19:23:32.000Z
|
2022-03-30T06:22:32.000Z
|
model/model.py
|
frank-xwang/RIDE-LongTailRecognition
|
5396100d33b75992e0a72dd01819513c10088611
|
[
"MIT"
] | 25
|
2020-12-10T13:37:38.000Z
|
2021-12-09T17:34:18.000Z
|
model/model.py
|
frank-xwang/RIDE-LongTailRecognition
|
5396100d33b75992e0a72dd01819513c10088611
|
[
"MIT"
] | 23
|
2020-12-21T09:51:07.000Z
|
2022-03-31T12:54:00.000Z
|
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
from .fb_resnets import ResNet
from .fb_resnets import ResNeXt
from .fb_resnets import RIDEResNet
from .fb_resnets import RIDEResNeXt
from .fb_resnets import EAResNet
from .fb_resnets import EAResNeXt
from .ldam_drw_resnets import resnet_cifar
from .ldam_drw_resnets import ride_resnet_cifar
from .ldam_drw_resnets import ea_resnet_cifar
class Model(BaseModel):
requires_target = False
def __init__(self, num_classes, backbone_class=None):
super().__init__()
if backbone_class is not None: # Do not init backbone here if None
self.backbone = backbone_class(num_classes)
def _hook_before_iter(self):
self.backbone._hook_before_iter()
def forward(self, x, mode=None):
x = self.backbone(x)
assert mode is None
return x
class EAModel(BaseModel):
requires_target = True
confidence_model = True
def __init__(self, num_classes, backbone_class=None):
super().__init__()
if backbone_class is not None: # Do not init backbone here if None
self.backbone = backbone_class(num_classes)
def _hook_before_iter(self):
self.backbone._hook_before_iter()
def forward(self, x, mode=None, target=None):
x = self.backbone(x, target=target)
assert isinstance(x, tuple) # logits, extra_info
assert mode is None
return x
class ResNet10Model(Model):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, use_norm=False, num_experts=1, **kwargs):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = ResNet.ResNet(ResNet.BasicBlock, [1, 1, 1, 1], dropout=None, num_classes=num_classes, use_norm=use_norm, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, **kwargs)
else:
self.backbone = RIDEResNet.ResNet(ResNet.BasicBlock, [1, 1, 1, 1], dropout=None, num_classes=num_classes, use_norm=use_norm, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts, **kwargs)
class ResNet10EAModel(EAModel):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, num_experts=1, **kwargs):
super().__init__(num_classes, None)
self.backbone = EAResNet.ResNet(EAResNet.BasicBlock, [1, 1, 1, 1], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts, **kwargs)
class ResNet32Model(Model): # From LDAM_DRW
def __init__(self, num_classes, reduce_dimension=False, layer2_output_dim=None, layer3_output_dim=None, use_norm=False, num_experts=1, **kwargs):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = resnet_cifar.ResNet_s(resnet_cifar.BasicBlock, [5, 5, 5], num_classes=num_classes, reduce_dimension=reduce_dimension, layer2_output_dim=layer2_output_dim, layer3_output_dim=layer3_output_dim, use_norm=use_norm, **kwargs)
else:
self.backbone = ride_resnet_cifar.ResNet_s(ride_resnet_cifar.BasicBlock, [5, 5, 5], num_classes=num_classes, reduce_dimension=reduce_dimension, layer2_output_dim=layer2_output_dim, layer3_output_dim=layer3_output_dim, use_norm=use_norm, num_experts=num_experts, **kwargs)
class ResNet32EAModel(EAModel): # From LDAM_DRW
def __init__(self, num_classes, reduce_dimension=False, layer2_output_dim=None, layer3_output_dim=None, num_experts=2, **kwargs):
super().__init__(num_classes, None)
self.backbone = ea_resnet_cifar.ResNet_s(ea_resnet_cifar.BasicBlock, [5, 5, 5], num_classes=num_classes, reduce_dimension=reduce_dimension, layer2_output_dim=layer2_output_dim, layer3_output_dim=layer3_output_dim, num_experts=num_experts, **kwargs)
class ResNet50Model(Model):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, use_norm=False, num_experts=1, **kwargs):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = ResNet.ResNet(ResNet.Bottleneck, [3, 4, 6, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, use_norm=use_norm, **kwargs)
else:
self.backbone = RIDEResNet.ResNet(RIDEResNet.Bottleneck, [3, 4, 6, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, use_norm=use_norm, num_experts=num_experts, **kwargs)
class ResNet50EAModel(EAModel):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, num_experts=1, **kwargs):
super().__init__(num_classes, None)
assert num_experts != 1
self.backbone = EAResNet.ResNet(EAResNet.Bottleneck, [3, 4, 6, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts, **kwargs)
class ResNeXt50EAModel(EAModel):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, num_experts=1, **kwargs):
super().__init__(num_classes, None)
assert num_experts != 1
self.backbone = EAResNeXt.ResNext(EAResNeXt.Bottleneck, [3, 4, 6, 3], groups=32, width_per_group=4, dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts, **kwargs)
class ResNeXt50Model(Model):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, num_experts=1, **kwargs):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = ResNeXt.ResNext(ResNeXt.Bottleneck, [3, 4, 6, 3], groups=32, width_per_group=4, dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, **kwargs)
else:
self.backbone = RIDEResNeXt.ResNext(RIDEResNeXt.Bottleneck, [3, 4, 6, 3], groups=32, width_per_group=4, dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts, **kwargs)
class ResNet101Model(Model):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, use_norm=False, num_experts=1, **kwargs):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = ResNet.ResNet(ResNet.Bottleneck, [3, 4, 23, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, use_norm=use_norm, **kwargs)
else:
self.backbone = RIDEResNet.ResNet(RIDEResNet.Bottleneck, [3, 4, 23, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, use_norm=use_norm, num_experts=num_experts, **kwargs)
class ResNet152Model(Model):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, use_norm=False, num_experts=1, **kwargs):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = ResNet.ResNet(ResNet.Bottleneck, [3, 8, 36, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, use_norm=use_norm, **kwargs)
else:
self.backbone = RIDEResNet.ResNet(RIDEResNet.Bottleneck, [3, 8, 36, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, use_norm=use_norm, num_experts=num_experts, **kwargs)
class ResNet152EAModel(EAModel):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, num_experts=1, **kwargs):
super().__init__(num_classes, None)
assert num_experts != 1
self.backbone = EAResNet.ResNet(EAResNet.Bottleneck, [3, 8, 36, 3], dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts, **kwargs)
class ResNeXt152Model(Model):
def __init__(self, num_classes, reduce_dimension=False, layer3_output_dim=None, layer4_output_dim=None, num_experts=1):
super().__init__(num_classes, None)
if num_experts == 1:
self.backbone = ResNeXt.ResNext(ResNeXt.Bottleneck, [3, 8, 36, 3], groups=32, width_per_group=4, dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim)
else:
self.backbone = RIDEResNeXt.ResNext(RIDEResNeXt.Bottleneck, [3, 8, 36, 3], groups=32, width_per_group=4, dropout=None, num_classes=num_classes, reduce_dimension=reduce_dimension, layer3_output_dim=layer3_output_dim, layer4_output_dim=layer4_output_dim, num_experts=num_experts)
| 71.014815
| 298
| 0.765099
|
4a0ad43931d66279e21fb312a45caad54f76798d
| 1,453
|
py
|
Python
|
python/test/test_design_sensor.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
python/test/test_design_sensor.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
python/test/test_design_sensor.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
PowerMeter API
API # noqa: E501
The version of the OpenAPI document: 2021.4.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import powermeter_api
from powermeter_api.models.design_sensor import DesignSensor # noqa: E501
from powermeter_api.rest import ApiException
class TestDesignSensor(unittest.TestCase):
"""DesignSensor unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test DesignSensor
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = powermeter_api.models.design_sensor.DesignSensor() # noqa: E501
if include_optional :
return DesignSensor(
id = 56,
name = '0',
x = 1.337,
y = 1.337
)
else :
return DesignSensor(
name = '0',
x = 1.337,
y = 1.337,
)
def testDesignSensor(self):
"""Test DesignSensor"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 24.627119
| 82
| 0.604955
|
4a0ad472c4395e8d4b9a8dff046ecaa9ba7bfc56
| 2,662
|
py
|
Python
|
tests/fiaas_deploy_daemon/specs/test_lookup.py
|
ariadnarouco/fiaas-deploy-daemon
|
644c09e79f6f07bd85ca9153e731755836f7e454
|
[
"Apache-2.0"
] | 57
|
2017-09-13T09:30:04.000Z
|
2022-03-23T12:00:56.000Z
|
tests/fiaas_deploy_daemon/specs/test_lookup.py
|
ariadnarouco/fiaas-deploy-daemon
|
644c09e79f6f07bd85ca9153e731755836f7e454
|
[
"Apache-2.0"
] | 113
|
2019-02-26T16:10:13.000Z
|
2022-03-31T13:11:03.000Z
|
tests/fiaas_deploy_daemon/specs/test_lookup.py
|
ariadnarouco/fiaas-deploy-daemon
|
644c09e79f6f07bd85ca9153e731755836f7e454
|
[
"Apache-2.0"
] | 30
|
2019-02-22T11:16:24.000Z
|
2022-03-21T22:37:52.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
import pytest
from fiaas_deploy_daemon.specs.factory import InvalidConfiguration
from fiaas_deploy_daemon.specs.lookup import LookupMapping
CONFIG = {
"object": {
"complex": {
"first": 1,
"second": 2
}
},
"list": [
"one",
"two",
"three"
]
}
DEFAULTS = {
"object": {
"simple": 1,
"complex": {}
},
"list": []
}
class TestLookup(object):
@pytest.fixture
def lookup(self):
return LookupMapping(CONFIG, DEFAULTS)
def test_dict(self, lookup):
assert lookup["object"]["simple"] == 1
assert lookup["object"]["complex"] == {
"first": 1, "second": 2
}
assert lookup["object"]["complex"]["first"] == 1
def test_list(self, lookup):
assert lookup["list"] == ["one", "two", "three"]
assert lookup["list"][0] == "one"
assert lookup["list"][-1] == "three"
@pytest.mark.parametrize("type,expected", (
("object", 2),
("list", 3),
))
def test_len(self, lookup, type, expected):
assert len(lookup[type]) == expected
def test_items(self, lookup):
assert lookup["object"].items() == [("simple", 1), ("complex", {"first": 1, "second": 2})]
@pytest.mark.parametrize("config,defaults", (
(CONFIG, 1),
(CONFIG, True),
(CONFIG, "string"),
(1, DEFAULTS),
(True, DEFAULTS),
("string", DEFAULTS)
))
def test_incompatible_types(self, config, defaults):
with pytest.raises(InvalidConfiguration):
LookupMapping(config, defaults)
@pytest.mark.parametrize("config,defaults", (
(None, 1),
(None, True),
(None, "string"),
(None, DEFAULTS),
(1, None),
(True, None),
("string", None),
(CONFIG, None)
))
def test_ignore_empty(self, config, defaults):
LookupMapping(config, defaults)
| 27.163265
| 98
| 0.589031
|
4a0ad4bc2a597c475988451262eafc0361d9cd4a
| 5,098
|
py
|
Python
|
docs/jnpr_healthbot_swagger/swagger_client/models/destination_schema.py
|
dmontagner/healthbot-py-client
|
0952e0a9e7ed63c9fe84879f40407c3327735252
|
[
"Apache-2.0"
] | null | null | null |
docs/jnpr_healthbot_swagger/swagger_client/models/destination_schema.py
|
dmontagner/healthbot-py-client
|
0952e0a9e7ed63c9fe84879f40407c3327735252
|
[
"Apache-2.0"
] | null | null | null |
docs/jnpr_healthbot_swagger/swagger_client/models/destination_schema.py
|
dmontagner/healthbot-py-client
|
0952e0a9e7ed63c9fe84879f40407c3327735252
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Healthbot APIs
API interface for Healthbot application # noqa: E501
OpenAPI spec version: 1.0.0
Contact: healthbot-hackers@juniper.net
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.destination_schema_disk import DestinationSchemaDisk # noqa: F401,E501
from swagger_client.models.destination_schema_email import DestinationSchemaEmail # noqa: F401,E501
class DestinationSchema(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'disk': 'DestinationSchemaDisk',
'email': 'DestinationSchemaEmail',
'name': 'str'
}
attribute_map = {
'disk': 'disk',
'email': 'email',
'name': 'name'
}
def __init__(self, disk=None, email=None, name=None): # noqa: E501
"""DestinationSchema - a model defined in Swagger""" # noqa: E501
self._disk = None
self._email = None
self._name = None
self.discriminator = None
if disk is not None:
self.disk = disk
if email is not None:
self.email = email
self.name = name
@property
def disk(self):
"""Gets the disk of this DestinationSchema. # noqa: E501
:return: The disk of this DestinationSchema. # noqa: E501
:rtype: DestinationSchemaDisk
"""
return self._disk
@disk.setter
def disk(self, disk):
"""Sets the disk of this DestinationSchema.
:param disk: The disk of this DestinationSchema. # noqa: E501
:type: DestinationSchemaDisk
"""
self._disk = disk
@property
def email(self):
"""Gets the email of this DestinationSchema. # noqa: E501
:return: The email of this DestinationSchema. # noqa: E501
:rtype: DestinationSchemaEmail
"""
return self._email
@email.setter
def email(self, email):
"""Sets the email of this DestinationSchema.
:param email: The email of this DestinationSchema. # noqa: E501
:type: DestinationSchemaEmail
"""
self._email = email
@property
def name(self):
"""Gets the name of this DestinationSchema. # noqa: E501
Name of the destination. Should be of pattern [a-zA-Z][a-zA-Z0-9_-]* # noqa: E501
:return: The name of this DestinationSchema. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this DestinationSchema.
Name of the destination. Should be of pattern [a-zA-Z][a-zA-Z0-9_-]* # noqa: E501
:param name: The name of this DestinationSchema. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
if name is not None and len(name) > 64:
raise ValueError("Invalid value for `name`, length must be less than or equal to `64`") # noqa: E501
if name is not None and not re.search('^[a-zA-Z][a-zA-Z0-9_-]*$', name): # noqa: E501
raise ValueError("Invalid value for `name`, must be a follow pattern or equal to `/^[a-zA-Z][a-zA-Z0-9_-]*$/`") # noqa: E501
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DestinationSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.131429
| 137
| 0.576893
|
4a0ad51e388229221e8667a31c1d25946b21cd84
| 4,395
|
py
|
Python
|
classifiers/random_supervised.py
|
dulayjm/data-science-project
|
1fc992e9083f724bd2c48064328e504be698d461
|
[
"MIT"
] | null | null | null |
classifiers/random_supervised.py
|
dulayjm/data-science-project
|
1fc992e9083f724bd2c48064328e504be698d461
|
[
"MIT"
] | null | null | null |
classifiers/random_supervised.py
|
dulayjm/data-science-project
|
1fc992e9083f724bd2c48064328e504be698d461
|
[
"MIT"
] | null | null | null |
import os
from argparse import ArgumentParser
from random import choice, seed
from typing import List
from torchvision.datasets import Omniglot
from torchvision import transforms
from tqdm import tqdm
from data.dataset import OmniglotReactionTimeDataset
from data.full_omniglot import FullOmniglot
from helpers.stratified_sampler import StratifiedKFoldSampler
from helpers.statistical_functions import *
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--data_source", type=str, choices=["full", "background", "evaluation", "reaction-time"],
default="full")
parser.add_argument("--seed", type=int)
parser.add_argument("--split-type", type=str, choices=["none", "random", "stratified"], default="none")
parser.add_argument("--split-value", type=float)
args = parser.parse_args()
print(f"Settings: Data Source - {args.data_source}; "
f"Seed: {args.seed if args.seed else 'None'}; "
f"Split Type: {args.split_type}; "
f"Split Value: {args.split_value if args.split_value else 'None'}")
if args.seed:
if args.seed >= 0:
seed(args.seed)
else:
raise ValueError("Invalid seed. Please try again.")
transform = transforms.Compose([
transforms.ToTensor()
])
# Retrieve data from dataset:
if args.data_source == "full":
dataset = FullOmniglot(os.getcwd(), transform=transform)
elif args.data_source == "background":
dataset = Omniglot(os.getcwd(), transform=transform)
elif args.data_source == "evaluation":
dataset = Omniglot(os.getcwd(), background=False, transform=transform)
elif args.data_source == "reaction-time":
dataset = OmniglotReactionTimeDataset('../sigma_dataset.csv', transforms=transform)
else:
raise ValueError("Appropriate dataset not specified. Please try again with one of the possible options.")
# TODO: add this to perform random trials under stratified condition
if args.split_type == "stratified":
folds = [fold for fold in StratifiedKFoldSampler(dataset, int(args.split_value))]
# Accumulate the labels:
# TODO: there is an assumption here that "stratified" contains evenly-split data throughout all folds.
# However, it is possible that, in some circumstances, "stratified" won't get an instance of a class.
# For our problem, it does not yet seem relevant, but this is a limitation of the code.
if args.split_type in ["none", "stratified"]:
labels: List[int] = []
for image, label in tqdm(dataset):
if label not in labels:
labels.append(label)
else:
raise NotImplementedError("Random splits not yet implemented.")
# Perform random predictions, reseeding seed() now that the static part of the seed is used up.
seed()
ground_truth: list = []
predictions: list = []
if args.split_type == "stratified" and folds is not None:
accuracies, precisions, recalls, f1_scores = [], [], [], []
for fold_number, fold in tqdm(enumerate(folds, start=1)):
print(fold)
for index in fold:
predicted_label: int = choice(labels)
predictions.append(predicted_label)
image, label = dataset[index]
ground_truth.append(label)
accuracy, precision, recall, f_score = calculate_base_statistics(predictions, ground_truth)
accuracies.append(accuracy)
precisions.append(precision)
recalls.append(recall)
f1_scores.append(f_score)
display_base_statistics(args.seed, accuracy, precision, recall, f_score, fold_number)
predictions.clear()
ground_truth.clear()
else:
distributions: list = calculate_fold_statistics(accuracies, precisions, recalls, f1_scores)
display_fold_statistics(args.seed, args.split_value, *distributions)
elif args.split_type == "none":
for image, label in tqdm(dataset):
predicted_label: int = choice(labels)
predictions.append(predicted_label)
ground_truth.append(label)
accuracy, precision, recall, f_score = calculate_base_statistics(predictions, ground_truth)
display_base_statistics(args.seed, accuracy, precision, recall, f_score)
| 43.514851
| 113
| 0.669397
|
4a0ad577f90f88f900418ddf3efe7ff693900e8b
| 3,370
|
py
|
Python
|
homeassistant/components/hive/alarm_control_panel.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 3
|
2021-11-22T22:37:43.000Z
|
2022-03-17T00:55:28.000Z
|
homeassistant/components/hive/alarm_control_panel.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 25
|
2021-11-24T06:24:10.000Z
|
2022-03-31T06:23:06.000Z
|
homeassistant/components/hive/alarm_control_panel.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 3
|
2022-01-02T18:49:54.000Z
|
2022-01-25T02:03:54.000Z
|
"""Support for the Hive alarm."""
from datetime import timedelta
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntity
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import HiveEntity
from .const import DOMAIN
ICON = "mdi:security"
PARALLEL_UPDATES = 0
SCAN_INTERVAL = timedelta(seconds=15)
HIVETOHA = {
"home": STATE_ALARM_DISARMED,
"asleep": STATE_ALARM_ARMED_NIGHT,
"away": STATE_ALARM_ARMED_AWAY,
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Hive thermostat based on a config entry."""
hive = hass.data[DOMAIN][entry.entry_id]
if devices := hive.session.deviceList.get("alarm_control_panel"):
async_add_entities(
[HiveAlarmControlPanelEntity(hive, dev) for dev in devices], True
)
class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity):
"""Representation of a Hive alarm."""
_attr_icon = ICON
@property
def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id
@property
def device_info(self) -> DeviceInfo:
"""Return device information about this AdGuard Home instance."""
return DeviceInfo(
identifiers={(DOMAIN, self.device["device_id"])},
model=self.device["deviceData"]["model"],
manufacturer=self.device["deviceData"]["manufacturer"],
name=self.device["device_name"],
sw_version=self.device["deviceData"]["version"],
via_device=(DOMAIN, self.device["parentDevice"]),
)
@property
def name(self):
"""Return the name of the alarm."""
return self.device["haName"]
@property
def available(self):
"""Return if the device is available."""
return self.device["deviceData"]["online"]
@property
def state(self):
"""Return state of alarm."""
if self.device["status"]["state"]:
return STATE_ALARM_TRIGGERED
return HIVETOHA[self.device["status"]["mode"]]
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_ARM_AWAY
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
await self.hive.alarm.setMode(self.device, "home")
async def async_alarm_arm_night(self, code=None):
"""Send arm night command."""
await self.hive.alarm.setMode(self.device, "asleep")
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
await self.hive.alarm.setMode(self.device, "away")
async def async_update(self):
"""Update all Node data from Hive."""
await self.hive.session.updateData(self.device)
self.device = await self.hive.alarm.getAlarm(self.device)
| 32.095238
| 84
| 0.687537
|
4a0ad602368505ff374c51fe50543f0f78868466
| 1,947
|
py
|
Python
|
lattice/models.py
|
aditya2695/Applicatiopn-for-Computation-of-Lattice-Invariants
|
dd8dee5e385c8e79c8fb411eb9a0836567700982
|
[
"MIT"
] | null | null | null |
lattice/models.py
|
aditya2695/Applicatiopn-for-Computation-of-Lattice-Invariants
|
dd8dee5e385c8e79c8fb411eb9a0836567700982
|
[
"MIT"
] | null | null | null |
lattice/models.py
|
aditya2695/Applicatiopn-for-Computation-of-Lattice-Invariants
|
dd8dee5e385c8e79c8fb411eb9a0836567700982
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Lattice(models.Model):
name = models.CharField(max_length=255)
length_a = models.FloatField()
length_b = models.FloatField()
length_c = models.FloatField()
angle_a = models.FloatField()
angle_b = models.FloatField()
angle_c = models.FloatField()
image =models.CharField(max_length=2500)
class LatticeTypes(models.Model):
name = models.CharField(max_length=255)
length_a = models.FloatField()
length_b = models.FloatField()
length_c = models.FloatField()
angle_a = models.FloatField()
angle_b = models.FloatField()
angle_c = models.FloatField()
image =models.CharField(max_length=2500)
bravais_types =models.CharField(max_length=255)
class lattice_2D_data(models.Model):
id = models.AutoField(primary_key=True)
a = models.FloatField()
b = models.FloatField()
gamma = models.FloatField()
class lattice_3D_data(models.Model):
identifier = models.CharField(max_length=6)
a = models.FloatField()
b = models.FloatField()
c = models.FloatField()
alpha = models.FloatField()
beta = models.FloatField()
gamma = models.FloatField()
bravais_types =models.CharField(max_length=255)
class latticeEntries():
id = models.AutoField(primary_key=True)
a = models.FloatField()
b = models.FloatField()
c = models.FloatField()
alpha = models.FloatField()
beta = models.FloatField()
gamma = models.FloatField()
bravais_types =models.CharField(max_length=255)
addedTime = models.DateTimeField(auto_now_add=True)
class UserFile(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
file = models.FileField(verbose_name="User image", upload_to="user_images")
class FileModel(models.Model):
doc = models.FileField(upload_to='media/')
class FilesUpload(models.Model):
file = models.FileField()
| 26.671233
| 79
| 0.703133
|
4a0ad6c81e812a0d8bdaa188b29d4f096c871212
| 58
|
py
|
Python
|
api/reply.py
|
TClaypool00/ExpenseTrackerClient-Python
|
44f45e7e7b7434d19a20a0d1b565592a157bed88
|
[
"MIT"
] | null | null | null |
api/reply.py
|
TClaypool00/ExpenseTrackerClient-Python
|
44f45e7e7b7434d19a20a0d1b565592a157bed88
|
[
"MIT"
] | null | null | null |
api/reply.py
|
TClaypool00/ExpenseTrackerClient-Python
|
44f45e7e7b7434d19a20a0d1b565592a157bed88
|
[
"MIT"
] | null | null | null |
from .api import base_url
reply_url = base_url + 'reply/'
| 19.333333
| 31
| 0.741379
|
4a0ad76140b691f3801647618db07b670805d85e
| 2,845
|
py
|
Python
|
xonsh/xontribs_meta.py
|
sthagen/xonsh-xonsh
|
5f418483015dfdac064c69a3891d7769971a0772
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xonsh/xontribs_meta.py
|
sthagen/xonsh-xonsh
|
5f418483015dfdac064c69a3891d7769971a0772
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xonsh/xontribs_meta.py
|
sthagen/xonsh-xonsh
|
5f418483015dfdac064c69a3891d7769971a0772
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
"""
This modules is the place where one would define the xontribs.
"""
import importlib.util
import typing as tp
from pathlib import Path
from xonsh.lazyasd import LazyObject, lazyobject
class _XontribPkg(tp.NamedTuple):
"""Class to define package information of a xontrib.
Attributes
----------
install
a mapping of tools with respective install commands. e.g. {"pip": "pip install xontrib"}
license
license type of the xontrib package
name
full name of the package. e.g. "xontrib-argcomplete"
url
URL to the homepage of the xontrib package.
"""
install: tp.Dict[str, str]
license: str = ""
name: str = ""
url: tp.Optional[str] = None
class Xontrib(tp.NamedTuple):
"""Meta class that is used to describe xontribs.
Attributes
----------
url
url to the home page of the xontrib.
description
short description about the xontrib.
package
pkg information for installing the xontrib
tags
category.
"""
url: str = ""
description: tp.Union[str, LazyObject] = ""
package: tp.Optional[_XontribPkg] = None
tags: tp.Tuple[str, ...] = ()
def get_module_docstring(module: str) -> str:
"""Find the module and return its docstring without actual import"""
import ast
spec = importlib.util.find_spec(module)
if spec and spec.has_location and spec.origin:
return ast.get_docstring(ast.parse(Path(spec.origin).read_text())) or ""
return ""
def get_xontribs() -> tp.Dict[str, Xontrib]:
"""Return xontrib definitions lazily."""
return dict(get_installed_xontribs())
def get_installed_xontribs(pkg_name="xontrib"):
"""List all core packages + newly installed xontribs"""
core_pkg = _XontribPkg(
name="xonsh",
license="BSD 3-clause",
install={
"conda": "conda install -c conda-forge xonsh",
"pip": "xpip install xonsh",
"aura": "sudo aura -A xonsh",
"yaourt": "yaourt -Sa xonsh",
},
url="http://xon.sh",
)
spec = importlib.util.find_spec(pkg_name)
def iter_paths():
for loc in spec.submodule_search_locations:
path = Path(loc)
if path.exists():
yield from path.iterdir()
def iter_modules():
# pkgutil is not finding `*.xsh` files
for path in iter_paths():
if path.suffix in {".py", ".xsh"}:
yield path.stem
elif path.is_dir():
if (path / "__init__.py").exists():
yield path.name
for name in iter_modules():
yield name, Xontrib(
url="http://xon.sh",
description=lazyobject(lambda: get_module_docstring(f"xontrib.{name}")),
package=core_pkg,
)
| 26.839623
| 96
| 0.6
|
4a0ad762b24fef0ab2f25de3a301b19946ba93d6
| 1,671
|
py
|
Python
|
AWS/lambda/wakeup-web-ec2.py
|
grtfou/devops-tools
|
8a8e0539d98fe6958f4da9d3af57021f03fce1c4
|
[
"MIT"
] | 1
|
2016-03-02T16:21:26.000Z
|
2016-03-02T16:21:26.000Z
|
AWS/lambda/wakeup-web-ec2.py
|
grtfou/DevOp-tools
|
8a8e0539d98fe6958f4da9d3af57021f03fce1c4
|
[
"MIT"
] | null | null | null |
AWS/lambda/wakeup-web-ec2.py
|
grtfou/DevOp-tools
|
8a8e0539d98fe6958f4da9d3af57021f03fce1c4
|
[
"MIT"
] | 1
|
2017-10-18T20:12:08.000Z
|
2017-10-18T20:12:08.000Z
|
"""
Start a EC2 instance and associate ip to it.
Then set Router 53 to map the ip.
"""
import os
import sys
root = os.environ["LAMBDA_TASK_ROOT"]
sys.path.insert(0, root)
import boto3
from boto3.session import Session
AWS_KEY_ID = 'YourIAMID'
AWS_SECRET_KEY = 'YourIAMSecretKey'
INSTANCE_ID = 'i-example'
REGION_NAME = 'YourEC2Region'
HOSTED_ZONE_ID = 'YourRouter53ZoneID'
RECORD_NAME = 'your.web.domain'
def lambda_handler(event, context):
session = Session(aws_access_key_id=AWS_KEY_ID,
aws_secret_access_key=AWS_SECRET_KEY,
region_name=REGION_NAME)
ec2 = session.resource('ec2')
instance = ec2.Instance(INSTANCE_ID)
# associate ip to instance
client = boto3.client('ec2')
response = client.allocate_address()
myip = response['PublicIp']
# associate ip to instance
client.associate_address(InstanceId=INSTANCE_ID, PublicIp=myip)
# set Router 53
client_r53 = boto3.client('route53')
client_r53.change_resource_record_sets(
HostedZoneId=HOSTED_ZONE_ID,
ChangeBatch={
'Comment': 'set ip',
'Changes': [
{
'Action': 'UPSERT',
'ResourceRecordSet': {
'Name': RECORD_NAME,
'Type': 'A',
'TTL': 300,
'ResourceRecords': [
{
'Value': myip
}
]
}
},
]
}
)
# start server
instance.start()
return True
| 23.871429
| 67
| 0.540395
|
4a0ad771e742c6da99365705972c81c7048cc1f6
| 1,189
|
py
|
Python
|
examples/status.py
|
Soneydom13/python-adguardhome
|
8e6b027630d9819112313ffe85c0866ea227ebe8
|
[
"MIT"
] | null | null | null |
examples/status.py
|
Soneydom13/python-adguardhome
|
8e6b027630d9819112313ffe85c0866ea227ebe8
|
[
"MIT"
] | null | null | null |
examples/status.py
|
Soneydom13/python-adguardhome
|
8e6b027630d9819112313ffe85c0866ea227ebe8
|
[
"MIT"
] | 1
|
2021-02-19T01:14:50.000Z
|
2021-02-19T01:14:50.000Z
|
# pylint: disable=W0621
"""Asynchronous Python client for the AdGuard Home API."""
import asyncio
from adguardhome import AdGuardHome
async def main():
"""Show example how to get status of your AdGuard Home instance."""
async with AdGuardHome("192.168.1.2") as adguard:
version = await adguard.version()
print("AdGuard version:", version)
active = await adguard.protection_enabled()
active = "Yes" if active else "No"
print("Protection enabled?", active)
active = await adguard.filtering.enabled()
active = "Yes" if active else "No"
print("Filtering enabled?", active)
active = await adguard.parental.enabled()
active = "Yes" if active else "No"
print("Parental control enabled?", active)
active = await adguard.safebrowsing.enabled()
active = "Yes" if active else "No"
print("Safe browsing enabled?", active)
active = await adguard.safesearch.enabled()
active = "Yes" if active else "No"
print("Enforce safe search enabled?", active)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| 30.487179
| 71
| 0.647603
|
4a0ad8d03e1124831d878b7b9e33eeac8d414c2c
| 5,987
|
py
|
Python
|
tensornet/gradcam/visual.py
|
shan18/TensorNet
|
c79a0c64152dbeb3499d204994772858326f668c
|
[
"MIT"
] | 6
|
2020-06-04T16:01:38.000Z
|
2021-11-28T17:47:13.000Z
|
tensornet/gradcam/visual.py
|
shan18/TensorNet
|
c79a0c64152dbeb3499d204994772858326f668c
|
[
"MIT"
] | 22
|
2020-03-20T22:00:32.000Z
|
2021-02-08T19:32:32.000Z
|
tensornet/gradcam/visual.py
|
shan18/TensorNet
|
c79a0c64152dbeb3499d204994772858326f668c
|
[
"MIT"
] | 5
|
2020-03-24T11:29:22.000Z
|
2020-11-01T11:45:20.000Z
|
import cv2
import torch
import numpy as np
import matplotlib.pyplot as plt
from tensornet.gradcam.gradcam import GradCAM
from tensornet.gradcam.gradcam_pp import GradCAMPP
from tensornet.data.utils import to_numpy, unnormalize
from typing import Tuple, List, Dict, Union, Optional
def visualize_cam(mask: torch.Tensor, img: torch.Tensor, alpha: float = 1.0) -> Tuple[torch.Tensor]:
"""Make heatmap from mask and synthesize GradCAM result image using heatmap and img.
Args:
mask (torch.tensor): mask shape of (1, 1, H, W) and each element has value in range [0, 1]
img (torch.tensor): img shape of (1, 3, H, W) and each pixel value is in range [0, 1]
Returns:
2-element tuple containing
- (*torch.tensor*): heatmap img shape of (3, H, W)
- (*torch.tensor*): synthesized GradCAM result of same shape with heatmap.
"""
heatmap = (255 * mask.squeeze()).type(torch.uint8).cpu().numpy()
heatmap = cv2.applyColorMap(heatmap, cv2.COLORMAP_JET)
heatmap = torch.from_numpy(heatmap).permute(2, 0, 1).float().div(255)
b, g, r = heatmap.split(1)
heatmap = torch.cat([r, g, b]) * alpha
result = heatmap + img.cpu()
result = result.div(result.max()).squeeze()
return heatmap, result
class GradCAMView:
"""Create GradCAM and GradCAM++.
*Note*: The current implemenation of `GradCAM` and `GradCAM++` supports only ResNet
models. The class can be extended to add support for other models.
Args:
model (torch.nn.Module): Trained model.
layers (list): List of layers to show GradCAM on.
device (:obj:`str` or :obj:`torch.device`): GPU or CPU.
mean (:obj:`float` or :obj:`tuple`): Mean of the dataset.
std (:obj:`float` or :obj:`tuple`): Standard Deviation of the dataset.
"""
def __init__(
self, model: torch.nn.Module, layers: List[str], device: Union[str, torch.device],
mean: Union[float, tuple], std: Union[float, tuple]
):
self.model = model
self.layers = layers
self.device = device
self.mean = mean
self.std = std
self._gradcam()
self._gradcam_pp()
print('Mode set to GradCAM.')
self.grad = self.gradcam.copy()
self.views = []
def _gradcam(self):
"""Initialize GradCAM instance."""
self.gradcam = {}
for layer in self.layers:
self.gradcam[layer] = GradCAM(self.model, layer)
def _gradcam_pp(self):
"""Initialize GradCAM++ instance."""
self.gradcam_pp = {}
for layer in self.layers:
self.gradcam_pp[layer] = GradCAMPP(self.model, layer)
def switch_mode(self):
"""Switch between GradCAM and GradCAM++."""
if self.grad == self.gradcam:
print('Mode switched to GradCAM++.')
self.grad = self.gradcam_pp.copy()
else:
print('Mode switched to GradCAM.')
self.grad = self.gradcam.copy()
def _cam_image(
self, norm_image: torch.Tensor, class_idx: Optional[int] = None
) -> Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]:
"""Get CAM for an image.
Args:
norm_image (torch.Tensor): Normalized image.
class_idx (:obj:`int`, optional): Class index for calculating GradCAM.
If not specified, the class index that makes the highest model
prediction score will be used.
Returns:
Dictionary containing unnormalized image, heatmap and CAM result.
"""
image = unnormalize(norm_image, self.mean, self.std) # Unnormalized image
norm_image_cuda = norm_image.clone().unsqueeze_(0).to(self.device)
heatmap, result = {}, {}
for layer, gc in self.gradcam.items():
mask, _ = gc(norm_image_cuda, class_idx=class_idx)
cam_heatmap, cam_result = visualize_cam(
mask,
image.clone().unsqueeze_(0).to(self.device)
)
heatmap[layer], result[layer] = to_numpy(cam_heatmap), to_numpy(cam_result)
return {
'image': to_numpy(image),
'heatmap': heatmap,
'result': result
}
def cam(self, norm_img_class_list: List[Union[Dict[str, Union[torch.Tensor, int]], torch.Tensor]]):
"""Get CAM for a list of images.
Args:
norm_img_class_list (list): List of dictionaries or list of images.
If dict, each dict contains keys 'image' and 'class'
having values 'normalized_image' and 'class_idx' respectively.
class_idx is optional. If class_idx is not given then the
model prediction will be used and the parameter should just be
a list of images. Each image should be of type torch.Tensor
"""
for norm_image_class in norm_img_class_list:
class_idx = None
norm_image = norm_image_class
if type(norm_image_class) == dict:
class_idx, norm_image = norm_image_class['class'], norm_image_class['image']
self.views.append(self._cam_image(norm_image, class_idx=class_idx))
def __call__(
self, norm_img_class_list: List[Union[Dict[str, Union[torch.Tensor, int]], torch.Tensor]]
) -> List[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]]:
"""Get GradCAM for a list of images.
Args:
norm_img_class_list (list): List of dictionaries or list of images.
If dict, each dict contains keys 'image' and 'class'
having values 'normalized_image' and 'class_idx' respectively.
class_idx is optional. If class_idx is not given then the
model prediction will be used and the parameter should just be
a list of images. Each image should be of type torch.Tensor
"""
self.cam(norm_img_class_list)
return self.views
| 38.625806
| 103
| 0.616502
|
4a0ad8d9acf44a4a24acbb3c647784384d9676f7
| 7,927
|
py
|
Python
|
vsgia_model/utils/utils.py
|
nkuhzx/VSG-IA
|
075b58c2bf89562cc197e721f050396589861c6a
|
[
"Apache-2.0"
] | null | null | null |
vsgia_model/utils/utils.py
|
nkuhzx/VSG-IA
|
075b58c2bf89562cc197e721f050396589861c6a
|
[
"Apache-2.0"
] | null | null | null |
vsgia_model/utils/utils.py
|
nkuhzx/VSG-IA
|
075b58c2bf89562cc197e721f050396589861c6a
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from collections import deque
from sklearn.metrics import average_precision_score
from PIL import Image
from sklearn.metrics import roc_auc_score
class AverageMeter():
def __init__(self):
self.reset()
def reset(self):
self.count=0
self.newval=0
self.sum=0
self.avg=0
def update(self,newval,n=1):
self.newval=newval
self.sum+=newval*n
self.count+=n
self.avg=self.sum/self.count
class MovingAverageMeter():
def __init__(self,max_len=30):
self.max_len=max_len
self.reset()
def reset(self):
self.dq=deque(maxlen=self.max_len)
self.count=0
self.avg=0
self.sum=0
def update(self,newval):
self.dq.append(newval)
self.count=len(self.dq)
self.sum=np.array(self.dq).sum()
self.avg=self.sum/float(self.count)
# # Metric functions
def argmax_pts(heatmap):
idx=np.unravel_index(heatmap.argmax(),heatmap.shape)
pred_y,pred_x=map(float,idx)
return pred_x,pred_y
def euclid_dist(pred,target,type='avg'):
batch_dist=0.
sample_dist=0.
batch_size=pred.shape[0]
pred_H,pred_W=pred.shape[1:]
sample_dist_list=[]
mean_gt_gaze_list=[]
for b_idx in range(batch_size):
pred_x,pred_y=argmax_pts(pred[b_idx])
norm_p=np.array([pred_x,pred_y])/np.array([pred_W,pred_H])
# print(norm_p,target[b_idx])
# plt.imshow(pred[b_idx])
# plt.show()
b_target=target[b_idx]
valid_target=b_target[b_target!=-1].view(-1,2)
valid_target=valid_target.numpy()
sample_dist=valid_target-norm_p
sample_dist = np.sqrt(np.power(sample_dist[:, 0], 2) + np.power(sample_dist[:, 1], 2))
if type=='avg':
mean_gt_gaze = np.mean(valid_target, 0)
sample_avg_dist = mean_gt_gaze - norm_p
sample_avg_dist = np.sqrt(np.power(sample_avg_dist[0], 2) + np.power(sample_avg_dist[1], 2))
sample_dist=float(sample_avg_dist)
elif type=='min':
sample_dist=float(np.min(sample_dist))
elif type=="retained":
mean_gt_gaze = np.mean(valid_target, 0)
sample_avg_dist = mean_gt_gaze - norm_p
sample_avg_dist = np.sqrt(np.power(sample_avg_dist[0], 2) + np.power(sample_avg_dist[1], 2))
sample_dist=float(sample_avg_dist)
mean_gt_gaze_list.append(mean_gt_gaze)
sample_dist_list.append(sample_dist)
else:
raise NotImplemented
batch_dist+=sample_dist
euclid_dist=batch_dist/float(batch_size)
if type=="retained":
return mean_gt_gaze_list,sample_dist_list
return euclid_dist
def auc(gt_gaze,pred_heatmap,imsize):
batch_size=len(gt_gaze)
auc_score_list=[]
for b_idx in range(batch_size):
multi_hot=multi_hot_targets(gt_gaze[b_idx],imsize[b_idx])
scaled_heatmap=Image.fromarray(pred_heatmap[b_idx]).resize(size=(imsize[b_idx][0],imsize[b_idx][1]),
resample=0)
scaled_heatmap=np.array(scaled_heatmap)
sample_auc_score=roc_auc_score(np.reshape(multi_hot,multi_hot.size),
np.reshape(scaled_heatmap,scaled_heatmap.size))
auc_score_list.append(sample_auc_score)
auc_score=sum(auc_score_list)/len(auc_score_list)
return auc_score
def auc_videoatt(gt_gaze,pred_heatmap,imsize,output_resolution=64):
batch_size=len(gt_gaze)
valid_counter=0
auc_score_list=[]
for b_idx in range(batch_size):
if -1 in gt_gaze[b_idx]:
continue
else:
multi_hot=np.zeros((output_resolution,output_resolution))
multi_hot=draw_labelmap(multi_hot,[gt_gaze[b_idx][0]*output_resolution,gt_gaze[b_idx][1]*output_resolution],
3,type="Gaussian")
multi_hot=(multi_hot>0)
multi_hot=multi_hot.astype(np.float)*1
scaled_heatmap = Image.fromarray(pred_heatmap[b_idx]).resize(size=(output_resolution, output_resolution),
resample=0)
scaled_heatmap = np.array(scaled_heatmap)
# plt.imshow(multi_hot,cmap='jet')
# plt.show()
# plt.imshow(scaled_heatmap,cmap='jet')
# plt.show()
sample_auc_score=roc_auc_score(np.reshape(multi_hot,multi_hot.size),
np.reshape(scaled_heatmap,scaled_heatmap.size))
auc_score_list.append(sample_auc_score)
valid_counter+=1
if valid_counter!=0:
auc_score=sum(auc_score_list)/len(auc_score_list)
else:
auc_score=0
return auc_score,valid_counter
def euclid_dist_videoatt(pred,target,type='avg'):
batch_dist=0.
sample_dist=0.
batch_size=pred.shape[0]
pred_H,pred_W=pred.shape[1:]
outside_counter=0
for b_idx in range(batch_size):
pred_x,pred_y=argmax_pts(pred[b_idx])
norm_p=np.array([pred_x,pred_y])/np.array([pred_W,pred_H])
b_target=target[b_idx]
if -1 in b_target:
outside_counter+=1
continue
valid_target=b_target[b_target!=-1].view(-1,2)
valid_target=valid_target.numpy()
sample_dist=valid_target-norm_p
sample_dist = np.sqrt(np.power(sample_dist[:, 0], 2) + np.power(sample_dist[:, 1], 2))
if type=='avg':
mean_gt_gaze = np.mean(valid_target, 0)
sample_avg_dist = mean_gt_gaze - norm_p
sample_avg_dist = np.sqrt(np.power(sample_avg_dist[0], 2) + np.power(sample_avg_dist[1], 2))
sample_dist=float(sample_avg_dist)
elif type=='min':
sample_dist=float(np.min(sample_dist))
else:
raise NotImplemented
batch_dist+=sample_dist
if batch_size!=outside_counter:
euclid_dist=batch_dist/(float(batch_size)-outside_counter)
else:
euclid_dist=0
valid_num=batch_size-outside_counter
return euclid_dist,valid_num
def ap(label,pred):
return average_precision_score(label,pred)
def multi_hot_targets(gaze_pts,out_res):
w,h= out_res
target_map=np.zeros((h,w))
for p in gaze_pts:
if p[0]>=0:
x,y=map(int,[p[0]*float(w),p[1]*float(h)])
x=min(x,w-1)
y=min(y,h-1)
target_map[y,x]=1
return target_map
def draw_labelmap(img, pt, sigma, type='Gaussian'):
# Draw a 2D gaussian
# Adopted from https://github.com/anewell/pose-hg-train/blob/master/src/pypose/draw.py
# img = to_numpy(img)
# Check that any part of the gaussian is in-bounds
ul = [int(pt[0] - 3 * sigma), int(pt[1] - 3 * sigma)]
br = [int(pt[0] + 3 * sigma + 1), int(pt[1] + 3 * sigma + 1)]
if (ul[0] >= img.shape[1] or ul[1] >= img.shape[0] or
br[0] < 0 or br[1] < 0):
# If not, just return the image as is
return img
# Generate gaussian
size = 6 * sigma + 1
x = np.arange(0, size, 1, float)
y = x[:, np.newaxis]
x0 = y0 = size // 2
# The gaussian is not normalized, we want the center value to equal 1
if type == 'Gaussian':
g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
elif type == 'Cauchy':
g = sigma / (((x - x0) ** 2 + (y - y0) ** 2 + sigma ** 2) ** 1.5)
# Usable gaussian range
g_x = max(0, -ul[0]), min(br[0], img.shape[1]) - ul[0]
g_y = max(0, -ul[1]), min(br[1], img.shape[0]) - ul[1]
# Image range
img_x = max(0, ul[0]), min(br[0], img.shape[1])
img_y = max(0, ul[1]), min(br[1], img.shape[0])
img[img_y[0]:img_y[1], img_x[0]:img_x[1]] += g[g_y[0]:g_y[1], g_x[0]:g_x[1]]
img = img/np.max(img) # normalize heatmap so it has max value of 1
return img
| 27.911972
| 120
| 0.605021
|
4a0ad97ff7e36afe68fbff03099b3ea7be8de477
| 5,516
|
py
|
Python
|
contrib/seeds/makeseeds.py
|
mctnoc/nocnoc
|
87cab7106ae50aa9b7d17de5536f03084a1c98af
|
[
"MIT"
] | 1
|
2021-01-04T08:11:51.000Z
|
2021-01-04T08:11:51.000Z
|
contrib/seeds/makeseeds.py
|
CryptoLover705/NocNoc
|
fafa860f29c63d7357721a231fef7ad314355263
|
[
"MIT"
] | 1
|
2021-02-07T01:14:40.000Z
|
2021-02-07T02:39:47.000Z
|
contrib/seeds/makeseeds.py
|
CryptoLover705/NocNoc
|
fafa860f29c63d7357721a231fef7ad314355263
|
[
"MIT"
] | 6
|
2020-12-05T19:25:13.000Z
|
2021-05-21T18:19:11.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/NocNocCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| 32.069767
| 186
| 0.567078
|
4a0ad9da02093564d15f0ef9a99f8501cc3fc40d
| 710
|
py
|
Python
|
docs/steps/code/s1b.py
|
westpark/wallball
|
c3603680a8c7d722a92b13c31d9e4cc3b753f530
|
[
"MIT"
] | 1
|
2020-03-19T15:12:08.000Z
|
2020-03-19T15:12:08.000Z
|
docs/steps/code/s1b.py
|
westpark/wallball
|
c3603680a8c7d722a92b13c31d9e4cc3b753f530
|
[
"MIT"
] | null | null | null |
docs/steps/code/s1b.py
|
westpark/wallball
|
c3603680a8c7d722a92b13c31d9e4cc3b753f530
|
[
"MIT"
] | 1
|
2020-03-19T15:12:19.000Z
|
2020-03-19T15:12:19.000Z
|
WIDTH = 640
HEIGHT = 480
class Ball(ZRect): pass
#
# The ball is a red square halfway across the game screen
#
ball = Ball(0, 0, 30, 30)
ball.center = WIDTH / 2, HEIGHT / 2
ball.colour = "red"
#
# The ball moves one step right and one step down each tick
#
ball.direction = 1, 1
#
# The ball moves at a speed of 3 steps each tick
#
ball.speed = 3
def draw():
#
# Clear the screen and place the ball at its current position
#
screen.clear()
screen.draw.filled_rect(ball, ball.colour)
def update():
#
# Move the ball along its current direction at its current speed
#
dx, dy = ball.direction
ball.move_ip(ball.speed * dx, ball.speed * dy)
| 22.1875
| 69
| 0.633803
|
4a0adb9419cebf8a82676ce752d9fce502506613
| 13,820
|
py
|
Python
|
simpleml/persistables/hashing.py
|
ptoman/SimpleML
|
a829ee05da01a75b64982d91a012e9274b6f7c6e
|
[
"BSD-3-Clause"
] | null | null | null |
simpleml/persistables/hashing.py
|
ptoman/SimpleML
|
a829ee05da01a75b64982d91a012e9274b6f7c6e
|
[
"BSD-3-Clause"
] | null | null | null |
simpleml/persistables/hashing.py
|
ptoman/SimpleML
|
a829ee05da01a75b64982d91a012e9274b6f7c6e
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Mixin classes to handle hashing
'''
__author__ = 'Elisha Yadgaran'
import pandas as pd
import numpy as np
from pandas.util import hash_pandas_object
import inspect
class CustomHasherMixin(object):
'''
Mixin class to hash any object
'''
def custom_hasher(self, object_to_hash, custom_class_proxy=type(object.__dict__)):
"""
Adapted from: https://stackoverflow.com/questions/5884066/hashing-a-dictionary
Makes a hash from a dictionary, list, tuple or set to any level, that
contains only other hashable types (including any lists, tuples, sets, and
dictionaries). In the case where other kinds of objects (like classes) need
to be hashed, pass in a collection of object attributes that are pertinent.
For example, a class can be hashed in this fashion:
custom_hasher([cls.__dict__, cls.__name__])
A function can be hashed like so:
custom_hasher([fn.__dict__, fn.__code__])
python 3.3+ changes the default hash method to add an additional random
seed. Need to set the global PYTHONHASHSEED=0 or use a different hash
function
"""
if type(object_to_hash) == custom_class_proxy:
o2 = {}
for k, v in object_to_hash.items():
if not k.startswith("__"):
o2[k] = v
object_to_hash = o2
if isinstance(object_to_hash, (set, tuple, list)):
return hash(tuple([self.custom_hasher(e) for e in object_to_hash]))
elif isinstance(object_to_hash, np.ndarray):
return self.custom_hasher(object_to_hash.tostring())
elif isinstance(object_to_hash, pd.DataFrame):
# Pandas is unable to hash numpy arrays so prehash those
return hash_pandas_object(object_to_hash.applymap(
lambda element: self.custom_hasher(element) if isinstance(element, np.ndarray) else element),
index=False).sum()
elif isinstance(object_to_hash, pd.Series):
# Pandas is unable to hash numpy arrays so prehash those
return hash_pandas_object(object_to_hash.apply(
lambda element: self.custom_hasher(element) if isinstance(element, np.ndarray) else element),
index=False).sum()
elif object_to_hash is None:
# hash of None is unstable between systems
return -12345678987654321
elif isinstance(object_to_hash, dict):
return hash(tuple(
sorted([self.custom_hasher(item) for item in object_to_hash.items()])
))
elif isinstance(object_to_hash, type(lambda: 0)):
# Functions dont hash consistently because of the halting problem
# https://stackoverflow.com/questions/33998594/hash-for-lambda-function-in-python
# Attempt to use the source code string
return self.custom_hasher(inspect.getsource(object_to_hash))
elif isinstance(object_to_hash, type):
# Have to keep this at the end of the try list; np.ndarray,
# pd.DataFrame/Series, and function are also of <type 'type'>
return self.custom_hasher(repr(object_to_hash))
# return self.custom_hasher(inspect.getsource(object_to_hash))
else:
return hash(object_to_hash)
"""
Copied from joblib library -- no modification, just avoiding unnecessary dependencies
https://github.com/joblib/joblib/blob/master/joblib/hashing.py#L246
Fast cryptographic hash of Python objects, with a special case for fast
hashing of numpy arrays.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import pickle
import hashlib
import sys
import types
import struct
import io
import decimal
PY3_OR_LATER = sys.version_info[0] >= 3
try:
_basestring = basestring
_bytes_or_unicode = (str, unicode)
except NameError:
_basestring = str
_bytes_or_unicode = (bytes, str)
if PY3_OR_LATER:
Pickler = pickle._Pickler
else:
Pickler = pickle.Pickler
class _ConsistentSet(object):
""" Class used to ensure the hash of Sets is preserved
whatever the order of its items.
"""
def __init__(self, set_sequence):
# Forces order of elements in set to ensure consistent hash.
try:
# Trying first to order the set assuming the type of elements is
# consistent and orderable.
# This fails on python 3 when elements are unorderable
# but we keep it in a try as it's faster.
self._sequence = sorted(set_sequence)
except (TypeError, decimal.InvalidOperation):
# If elements are unorderable, sorting them using their hash.
# This is slower but works in any case.
self._sequence = sorted((hash(e) for e in set_sequence))
class _MyHash(object):
""" Class used to hash objects that won't normally pickle """
def __init__(self, *args):
self.args = args
class Hasher(Pickler):
""" A subclass of pickler, to do cryptographic hashing, rather than
pickling.
"""
def __init__(self, hash_name='md5'):
self.stream = io.BytesIO()
# By default we want a pickle protocol that only changes with
# the major python version and not the minor one
protocol = (pickle.DEFAULT_PROTOCOL if PY3_OR_LATER
else pickle.HIGHEST_PROTOCOL)
Pickler.__init__(self, self.stream, protocol=protocol)
# Initialise the hash obj
self._hash = hashlib.new(hash_name)
def hash(self, obj, return_digest=True):
try:
self.dump(obj)
except pickle.PicklingError as e:
e.args += ('PicklingError while hashing %r: %r' % (obj, e),)
raise
dumps = self.stream.getvalue()
self._hash.update(dumps)
if return_digest:
return self._hash.hexdigest()
def save(self, obj):
if isinstance(obj, (types.MethodType, type({}.pop))):
# the Pickler cannot pickle instance methods; here we decompose
# them into components that make them uniquely identifiable
if hasattr(obj, '__func__'):
func_name = obj.__func__.__name__
else:
func_name = obj.__name__
inst = obj.__self__
if type(inst) == type(pickle):
obj = _MyHash(func_name, inst.__name__)
elif inst is None:
# type(None) or type(module) do not pickle
obj = _MyHash(func_name, inst)
else:
cls = obj.__self__.__class__
obj = _MyHash(func_name, inst, cls)
Pickler.save(self, obj)
def memoize(self, obj):
# We want hashing to be sensitive to value instead of reference.
# For example we want ['aa', 'aa'] and ['aa', 'aaZ'[:2]]
# to hash to the same value and that's why we disable memoization
# for strings
if isinstance(obj, _bytes_or_unicode):
return
Pickler.memoize(self, obj)
# The dispatch table of the pickler is not accessible in Python
# 3, as these lines are only bugware for IPython, we skip them.
def save_global(self, obj, name=None, pack=struct.pack):
# We have to override this method in order to deal with objects
# defined interactively in IPython that are not injected in
# __main__
kwargs = dict(name=name, pack=pack)
if sys.version_info >= (3, 4):
del kwargs['pack']
try:
Pickler.save_global(self, obj, **kwargs)
except pickle.PicklingError:
Pickler.save_global(self, obj, **kwargs)
module = getattr(obj, "__module__", None)
if module == '__main__':
my_name = name
if my_name is None:
my_name = obj.__name__
mod = sys.modules[module]
if not hasattr(mod, my_name):
# IPython doesn't inject the variables define
# interactively in __main__
setattr(mod, my_name, obj)
dispatch = Pickler.dispatch.copy()
# builtin
dispatch[type(len)] = save_global
# type
dispatch[type(object)] = save_global
# classobj
dispatch[type(Pickler)] = save_global
# function
dispatch[type(pickle.dump)] = save_global
def _batch_setitems(self, items):
# forces order of keys in dict to ensure consistent hash.
try:
# Trying first to compare dict assuming the type of keys is
# consistent and orderable.
# This fails on python 3 when keys are unorderable
# but we keep it in a try as it's faster.
Pickler._batch_setitems(self, iter(sorted(items)))
except TypeError:
# If keys are unorderable, sorting them using their hash. This is
# slower but works in any case.
Pickler._batch_setitems(self, iter(sorted((hash(k), v)
for k, v in items)))
def save_set(self, set_items):
# forces order of items in Set to ensure consistent hash
Pickler.save(self, _ConsistentSet(set_items))
dispatch[type(set())] = save_set
class NumpyHasher(Hasher):
""" Special case the hasher for when numpy is loaded.
"""
def __init__(self, hash_name='md5', coerce_mmap=False):
"""
Parameters
----------
hash_name: string
The hash algorithm to be used
coerce_mmap: boolean
Make no difference between np.memmap and np.ndarray
objects.
"""
self.coerce_mmap = coerce_mmap
Hasher.__init__(self, hash_name=hash_name)
# delayed import of numpy, to avoid tight coupling
import numpy as np
self.np = np
if hasattr(np, 'getbuffer'):
self._getbuffer = np.getbuffer
else:
self._getbuffer = memoryview
def save(self, obj):
""" Subclass the save method, to hash ndarray subclass, rather
than pickling them. Off course, this is a total abuse of
the Pickler class.
"""
if isinstance(obj, self.np.ndarray) and not obj.dtype.hasobject:
# Compute a hash of the object
# The update function of the hash requires a c_contiguous buffer.
if obj.shape == ():
# 0d arrays need to be flattened because viewing them as bytes
# raises a ValueError exception.
obj_c_contiguous = obj.flatten()
elif obj.flags.c_contiguous:
obj_c_contiguous = obj
elif obj.flags.f_contiguous:
obj_c_contiguous = obj.T
else:
# Cater for non-single-segment arrays: this creates a
# copy, and thus aleviates this issue.
# XXX: There might be a more efficient way of doing this
obj_c_contiguous = obj.flatten()
# memoryview is not supported for some dtypes, e.g. datetime64, see
# https://github.com/numpy/numpy/issues/4983. The
# workaround is to view the array as bytes before
# taking the memoryview.
self._hash.update(
self._getbuffer(obj_c_contiguous.view(self.np.uint8)))
# We store the class, to be able to distinguish between
# Objects with the same binary content, but different
# classes.
if self.coerce_mmap and isinstance(obj, self.np.memmap):
# We don't make the difference between memmap and
# normal ndarrays, to be able to reload previously
# computed results with memmap.
klass = self.np.ndarray
else:
klass = obj.__class__
# We also return the dtype and the shape, to distinguish
# different views on the same data with different dtypes.
# The object will be pickled by the pickler hashed at the end.
obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides))
elif isinstance(obj, self.np.dtype):
# Atomic dtype objects are interned by their default constructor:
# np.dtype('f8') is np.dtype('f8')
# This interning is not maintained by a
# pickle.loads + pickle.dumps cycle, because __reduce__
# uses copy=True in the dtype constructor. This
# non-deterministic behavior causes the internal memoizer
# of the hasher to generate different hash values
# depending on the history of the dtype object.
# To prevent the hash from being sensitive to this, we use
# .descr which is a full (and never interned) description of
# the array dtype according to the numpy doc.
klass = obj.__class__
obj = (klass, ('HASHED', obj.descr))
Hasher.save(self, obj)
def hash(obj, hash_name='md5', coerce_mmap=False):
""" Quick calculation of a hash to identify uniquely Python objects
containing numpy arrays.
Parameters
-----------
hash_name: 'md5' or 'sha1'
Hashing algorithm used. sha1 is supposedly safer, but md5 is
faster.
coerce_mmap: boolean
Make no difference between np.memmap and np.ndarray
"""
if 'numpy' in sys.modules:
hasher = NumpyHasher(hash_name=hash_name, coerce_mmap=coerce_mmap)
else:
hasher = Hasher(hash_name=hash_name)
return hasher.hash(obj)
| 38.711485
| 109
| 0.613242
|
4a0adbc2633a435c29ed711c7958f8aa86d9d16e
| 3,186
|
py
|
Python
|
learn2learn/vision/datasets/full_omniglot.py
|
vfdev-5/learn2learn
|
dd0dfc20ef7b3e1655c73417e84d12361582160a
|
[
"MIT"
] | 13
|
2019-08-12T09:33:09.000Z
|
2021-11-09T00:16:36.000Z
|
learn2learn/vision/datasets/full_omniglot.py
|
vainaijr/learn2learn
|
a1da2c755856505556241809bba9b150f36850c2
|
[
"MIT"
] | 20
|
2019-08-13T16:23:26.000Z
|
2019-09-04T17:38:46.000Z
|
learn2learn/vision/datasets/full_omniglot.py
|
vainaijr/learn2learn
|
a1da2c755856505556241809bba9b150f36850c2
|
[
"MIT"
] | 1
|
2019-09-01T00:01:37.000Z
|
2019-09-01T00:01:37.000Z
|
#!/usr/bin/env python3
from torch.utils.data import Dataset, ConcatDataset
from torchvision.datasets.omniglot import Omniglot
class FullOmniglot(Dataset):
"""
[[Source]]()
**Description**
This class provides an interface to the Omniglot dataset.
The Omniglot dataset was introduced by Lake et al., 2015.
Omniglot consists of 1623 character classes from 50 different alphabets, each containing 20 samples.
While the original dataset is separated in background and evaluation sets,
this class concatenates both sets and leaves to the user the choice of classes splitting
as was done in Ravi and Larochelle, 2017.
The background and evaluation splits are available in the `torchvision` package.
**References**
1. Lake et al. 2015. “Human-Level Concept Learning through Probabilistic Program Induction.” Science.
2. Ravi and Larochelle. 2017. “Optimization as a Model for Few-Shot Learning.” ICLR.
**Arguments**
* **root** (str) - Path to download the data.
* **transform** (Transform, *optional*, default=None) - Input pre-processing.
* **target_transform** (Transform, *optional*, default=None) - Target pre-processing.
* **download** (bool, *optional*, default=False) - Whether to download the dataset.
**Example**
~~~python
omniglot = l2l.vision.datasets.FullOmniglot(root='./data',
transform=transforms.Compose([
l2l.vision.transforms.RandomDiscreteRotation(
[0.0, 90.0, 180.0, 270.0]),
transforms.Resize(28, interpolation=LANCZOS),
transforms.ToTensor(),
lambda x: 1.0 - x,
]),
download=True)
omniglot = l2l.data.MetaDataset(omniglot)
~~~
"""
def __init__(self, root, transform=None, target_transform=None, download=False):
self.transform = transform
self.target_transform = target_transform
# Set up both the background and eval dataset
omni_background = Omniglot(root, background=True, download=download)
# Eval labels also start from 0.
# It's important to add 964 to label values in eval so they don't overwrite background dataset.
omni_evaluation = Omniglot(root,
background=False,
download=download,
target_transform=lambda x: x + len(omni_background._characters))
self.dataset = ConcatDataset((omni_background, omni_evaluation))
def __len__(self):
return len(self.dataset)
def __getitem__(self, item):
image, character_class = self.dataset[item]
if self.transform:
image = self.transform(image)
if self.target_transform:
character_class = self.target_transform(character_class)
return image, character_class
| 40.846154
| 105
| 0.591965
|
4a0adc4d8fe8343fdaf267749ec6df11185e7858
| 126
|
py
|
Python
|
app/tournament/app.py
|
RobLewisQA/PrisonersDilemma
|
fb763464dcdcad07c3a9f70c736a577bcd9305b1
|
[
"MIT"
] | null | null | null |
app/tournament/app.py
|
RobLewisQA/PrisonersDilemma
|
fb763464dcdcad07c3a9f70c736a577bcd9305b1
|
[
"MIT"
] | null | null | null |
app/tournament/app.py
|
RobLewisQA/PrisonersDilemma
|
fb763464dcdcad07c3a9f70c736a577bcd9305b1
|
[
"MIT"
] | 1
|
2021-04-18T14:09:27.000Z
|
2021-04-18T14:09:27.000Z
|
from flask import Flask
from application import app
if __name__=='__main__':
app.run(host='0.0.0.0', port=5000, debug=True)
| 25.2
| 48
| 0.738095
|
4a0addc7ea0061a30f327c50cf69e8d1c80df2ec
| 2,863
|
py
|
Python
|
get_Exploitdb_CSV_SUPERSEDED.py
|
NadimKawwa/CybersecurityThreatIdentification
|
e088dbb861342676337b4c9d385e6abfb6463291
|
[
"MIT"
] | 3
|
2021-01-15T10:28:54.000Z
|
2021-11-09T17:55:45.000Z
|
get_Exploitdb_CSV_SUPERSEDED.py
|
NadimKawwa/CybersecurityThreatIdentification
|
e088dbb861342676337b4c9d385e6abfb6463291
|
[
"MIT"
] | null | null | null |
get_Exploitdb_CSV_SUPERSEDED.py
|
NadimKawwa/CybersecurityThreatIdentification
|
e088dbb861342676337b4c9d385e6abfb6463291
|
[
"MIT"
] | 2
|
2021-02-05T17:35:48.000Z
|
2021-04-23T18:56:21.000Z
|
from time import sleep
from pymongo import MongoClient
from FakePersona import getPage
base_url = "https://www.exploit-db.com"
def getExploitCategories():
#access page as fake persona
soup = getPage(base_url)
#find all list items <li>
categories = soup.find("ul", {"class":"w-nav-list"}).findAll("li", {"class":"level_1"})[1]
#get anchor
categories = [i.find("a")['href'] for i in categories.findAll("li")]
return categories
def getCategoryTable(pageSoup):
table = pageSoup.find("table", {"class": "exploit_list"}).findAll("tr")
return table
def streamData(data,collection):
client = MongoClient(host='localhost', port=27017)
db = client.exploits
db[collection].insert(data)
def streamExploitTableSoup(tableSoup, category, database):
for i in tableSoup:
try:
rows = i.findAll("td")
rows = rows[0:1] + rows[3::]
if len(rows) == 5:
date = rows[0].getText()
verification = rows[1].find("img")['title'].strip()
exploitLink = rows[2].find("a")
title,link= exploitLink.getText().replace("\n","").strip(), exploitLink['href']
if "-" in title:
appattack = title.split("-")
application = appattack[0]
attack = appattack[1]
else:
application = title
attack = title
platform = rows[3].find("a").getText().replace("\n","").strip()
author = rows[4].find("a").getText().replace("\n","").strip()
#datetime = date.split("-")
#data = {"date":{"fulldate":date,"year":datetime[0], "month":datetime[1], "day":datetime[2]},
# "attack":{"application":application,"vector":attack},
# "platform":platform, "author":author, "link":link, "verification":verification}
#streamData(data=data, collection=database)
print("{0},{1},{2},{3},{4},{5}".format(date, application, attack,platform, author, verification, link))
except:
pass
def crawlCategoryTables(categoryLink):
category = categoryLink.split("/")[-2]
categoryPage =getPage(categoryLink)
lastPage = int(categoryPage.find("main").find("div", {"class":"pagination"}).findAll("a")[-1]['href'].split("=")[-1])
for i in range(1,(lastPage+1)):
if i % 20 == 0:
sleep(60)
newUrl = categoryLink + "?order_by=date_published&order=desc&pg="+str(i)
ts = getCategoryTable(getPage(newUrl))
streamExploitTableSoup(ts, category = category, database =category)
if __name__ == "__main__":
exploitCategories = getExploitCategories()
crawlCategoryTables(exploitCategories[3])
| 34.083333
| 121
| 0.567586
|
4a0ade80c1b9c1961a7ad8e1f5d7a2f859682ee2
| 809
|
py
|
Python
|
src/bt/game/state.py
|
btdevel/bt
|
23abdf0860484a4adcfbe2bcbe94eebca7f820fd
|
[
"MIT"
] | 1
|
2017-06-30T00:35:05.000Z
|
2017-06-30T00:35:05.000Z
|
src/bt/game/state.py
|
btdevel/bt
|
23abdf0860484a4adcfbe2bcbe94eebca7f820fd
|
[
"MIT"
] | null | null | null |
src/bt/game/state.py
|
btdevel/bt
|
23abdf0860484a4adcfbe2bcbe94eebca7f820fd
|
[
"MIT"
] | null | null | null |
from bt.game.character import Party
from bt.game.ui import UI
class State:
def __init__(self, city_handler, start_handler):
self.city_handler = city_handler
self.start_handler = start_handler
self.curr_handler = None
self.ui = UI()
self.party = Party()
def run(self):
self.ui.init(self)
self.set_handler(self.start_handler, True)
self.ui.event_loop()
def set_handler(self, curr, redraw=True):
if self.curr_handler:
self.curr_handler.exit(self)
self.curr_handler = curr
self.curr_handler.enter(self)
self.ui.show_location(curr.location)
self.redraw()
def redraw(self):
self.ui.redraw()
def message_view_ctx(self):
return self.ui.message_view.noupdate()
| 26.966667
| 52
| 0.637824
|
4a0adf403b61ec03a0e0ec17a87ef3b59a0d5fed
| 187
|
py
|
Python
|
contact_us/apps.py
|
ohahlev/ahlev-django-contact-us
|
a1e8e22bba16ca79ee355ac12a4627df29f76ce8
|
[
"BSD-3-Clause"
] | null | null | null |
contact_us/apps.py
|
ohahlev/ahlev-django-contact-us
|
a1e8e22bba16ca79ee355ac12a4627df29f76ce8
|
[
"BSD-3-Clause"
] | null | null | null |
contact_us/apps.py
|
ohahlev/ahlev-django-contact-us
|
a1e8e22bba16ca79ee355ac12a4627df29f76ce8
|
[
"BSD-3-Clause"
] | null | null | null |
from django.apps import AppConfig
from . import __version__ as VERSION
class ContactUsConfig(AppConfig):
name = "contact_us"
verbose_name = "Contact Us Management %s" % VERSION
| 23.375
| 55
| 0.754011
|
4a0ae1430d255468cf8f7fa8fab0bd431da224f4
| 9,051
|
py
|
Python
|
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/contrib/quantize/python/graph_matcher.py
|
JustinACoder/H22-GR3-UnrealAI
|
361eb9ef1147f8a2991e5f98c4118cd823184adf
|
[
"MIT"
] | 6
|
2022-02-04T18:12:24.000Z
|
2022-03-21T23:57:12.000Z
|
Lib/site-packages/tensorflow/contrib/quantize/python/graph_matcher.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/contrib/quantize/python/graph_matcher.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-08T03:53:23.000Z
|
2022-02-08T03:53:23.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities that match patterns in a tf.Graph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import itertools
class Pattern(object):
"""The parent class of all patterns (e.g. OpTypePattern and OneofPattern)."""
@abc.abstractmethod
def match(self, op, tensor):
"""Returns the result of matching op/tensor against this pattern."""
raise NotImplementedError('Method "match" not implemented.')
class OpTypePattern(Pattern):
"""A tree pattern that matches TF expressions with certain op types."""
def __init__(self, op_type, name=None, inputs=None, ordered_inputs=True):
"""Initializes an OpTypePattern.
Args:
op_type: string that specifies the allowed types of the root. It can be
(1) an op type, e.g. 'Conv2D',
(2) '*', i.e. wildcard, or
(3) multiple op types separated by '|', e.g., 'Relu|Relu6'.
We could use regex strings, which might be worthwhile when we have many
similar TF op types.
name: Optional string. The name of the pattern that can be looked up in
MatchResult.
inputs: Optional list of `Pattern`s or strings that specify the
patterns for the inputs of a matching op. If None, this pattern accepts
any inputs of a matching op.
ordered_inputs: Defaults to True. If False, will match any op that
matches a permutation of the inputs.
Raises:
ValueError: if too many inputs are provided when order_inputs is False.
"""
self._op_type = op_type
self._name = name
if inputs is None:
inputs = []
if len(inputs) > 8:
raise ValueError(
'Only < 8 inputs are allowed when ordered_inputs is False.')
self._inputs = [
input_pattern
if isinstance(input_pattern, Pattern) else OpTypePattern(input_pattern)
for input_pattern in inputs
]
self._ordered_inputs = ordered_inputs
@property
def name(self):
return self._name
def match(self, op, tensor):
if self._op_type != '*':
if op.type not in self._op_type.split('|'):
return None
match_result = MatchResult()
match_result.add(self, op, tensor)
if not self._inputs:
# If pattern.inputs is empty, skips the rest and accepts all the inputs.
return match_result
if len(op.inputs) != len(self._inputs):
return None
input_patterns_list = [self._inputs]
# If order doesn't matter for the inputs, then make sure we match at least
# one permutation of the inputs.
if not self._ordered_inputs:
input_patterns_list = list(itertools.permutations(self._inputs))
for input_patterns in input_patterns_list:
match_failed = False
for input_tensor, input_pattern in zip(op.inputs, input_patterns):
input_match_result = input_pattern.match(input_tensor.op, input_tensor)
if input_match_result is None:
match_failed = True
break
match_result.merge_from(input_match_result)
if not match_failed:
return match_result
return None
class OneofPattern(Pattern):
"""Matches one of the given sub-patterns."""
def __init__(self, sub_patterns):
self._sub_patterns = sub_patterns
def match(self, op, tensor):
for sub_pattern in self._sub_patterns:
match_result = sub_pattern.match(op, tensor)
if match_result is not None:
return match_result
return None
class MatchResult(object):
r"""Encapsulates the result of a match done by GraphMatcher.
MatchResult contains a map from Pattern to the matching op and tensor.
When the matching op has multiple output tensors, the matching tensor is the
output tensor used by the matching op of the parent pattern. E.g., when we
match graph
- +
/ \y0 y1/ \
x split z
|
y (nodes are ops; edges are going up)
against add_pattern defined as
y1_pattern = OpTypePattern('*')
z_pattern = OpTypePattern('*')
add_pattern = OpTypePattern('+', inputs=[y1_pattern, z_pattern])
the matching op of `y1_pattern` is `split`, and the matching tensor of
`y1_pattern`
is `y1` not `y0`.
"""
def __init__(self):
self._pattern_to_op_tensor = {}
self._name_to_pattern = {}
def add(self, pattern, op, tensor):
self._pattern_to_op_tensor[pattern] = op, tensor
if pattern.name is not None:
if pattern.name in self._name_to_pattern:
raise ValueError(
'Name %s is already bound to another pattern' % pattern.name)
self._name_to_pattern[pattern.name] = pattern
def _to_pattern(self, pattern_or_name):
if isinstance(pattern_or_name, Pattern):
return pattern_or_name
if isinstance(pattern_or_name, str):
if pattern_or_name not in self._name_to_pattern:
return None
return self._name_to_pattern[pattern_or_name]
raise ValueError('pattern_or_name has type %s. Expect Pattern or str.' %
type(pattern_or_name))
def _get_op_tensor(self, pattern_or_name):
pattern = self._to_pattern(pattern_or_name)
if pattern is None:
return None
if pattern not in self._pattern_to_op_tensor:
return None
return self._pattern_to_op_tensor[pattern]
def get_op(self, pattern_or_name):
op_tensor = self._get_op_tensor(pattern_or_name)
return op_tensor[0] if op_tensor else None
def get_tensor(self, pattern_or_name):
op_tensor = self._get_op_tensor(pattern_or_name)
return op_tensor[1] if op_tensor else None
def merge_from(self, other_match_result):
# pylint: disable=protected-access
self._pattern_to_op_tensor.update(other_match_result._pattern_to_op_tensor)
self._name_to_pattern.update(other_match_result._name_to_pattern)
# pylint: enable=protected-access
class GraphMatcher(object):
"""Checks if a particular subgraph matches a given pattern."""
def __init__(self, pattern):
"""Initializes a GraphMatcher.
Args:
pattern: The `Pattern` against which `GraphMatcher` matches
subgraphs.
"""
self._pattern = pattern
def _match_pattern(self, pattern, op, tensor):
"""Returns whether an TF expression rooted at `op` matches `pattern`.
If there is a match, adds to `self._match_result` the matching op and tensor
with key `pattern`.
Args:
pattern: An `Pattern`.
op: A `tf.Operation` to match against the pattern.
tensor: the output `tf.Tensor` of `op` that is used by the matching op of
`pattern`'s parent. Can be None if `pattern` is already the root of the
pattern tree.
Returns:
True if an TF expression rooted at `op` matches `pattern`.
"""
match_result = pattern.match(op, tensor)
if match_result is None:
return False
self._match_result.merge_from(match_result)
return True
def match_op(self, op):
"""Matches `op` against `self._pattern`.
Args:
op: `tf.Operation` to match against the pattern.
Returns:
Returns a `MatchResult` if `op` matches the pattern; otherwise, returns
None.
"""
self._match_result = MatchResult()
if not self._match_pattern(self._pattern, op, tensor=None):
return None
return self._match_result
def match_ops(self, ops):
"""Matches each operation in `ops` against `self._pattern`.
Args:
ops: collection of `tf.Operation` to match against the pattern.
Yields:
`MatchResult` for each `tf.Operation` that matches the pattern.
"""
for op in ops:
match_result = self.match_op(op)
if match_result:
yield match_result
def match_graph(self, graph):
"""Matches each operation in `graph` against `self._pattern`.
Args:
graph: `tf.Graph` containing operations to match.
Yields:
`MatchResult` for each `tf.Operation` in `graph` that matches the pattern.
"""
# Python 3.3.2+ implements `yield from`, but for now:
for match_result in self.match_ops(graph.get_operations()):
yield match_result
| 33.153846
| 81
| 0.662689
|
4a0ae177ba4ecf064aae80d40f975331346da828
| 2,131
|
py
|
Python
|
ngraph_onnx/onnx_importer/utils/misc.py
|
ddokupil/ngraph-onnx
|
497f91d0972de0b5ca902633b3bce09e58b913a0
|
[
"Apache-2.0"
] | null | null | null |
ngraph_onnx/onnx_importer/utils/misc.py
|
ddokupil/ngraph-onnx
|
497f91d0972de0b5ca902633b3bce09e58b913a0
|
[
"Apache-2.0"
] | 3
|
2018-06-04T13:37:27.000Z
|
2018-06-04T14:05:47.000Z
|
ngraph_onnx/onnx_importer/utils/misc.py
|
ddokupil/ngraph-onnx
|
497f91d0972de0b5ca902633b3bce09e58b913a0
|
[
"Apache-2.0"
] | null | null | null |
# ******************************************************************************
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from typing import Sequence, List, Tuple
from ngraph_onnx import TYPE_CHECKING
if TYPE_CHECKING:
from ngraph_onnx.onnx_importer.model_wrappers import NodeWrapper
def split_pads_into_pairs(pads): # type: (Sequence[int]) -> List[Tuple[int, int]]
"""
Convert ONNX padding format to ngraph padding format.
:param pads: ONNX `pads` format: [x1_begin, x2_begin..., x1_end, x2_end,...]
:return: ngraph format: [(x1_begin, x1_end), (x2_begin, x2_end), ...]
"""
if not pads:
return []
first_end_pad_index = int(len(pads) / 2)
begin_pads = pads[:first_end_pad_index]
end_pads = pads[first_end_pad_index:]
return list(zip(begin_pads, end_pads))
def verify_symmetric_padding(onnx_node, pads):
# type: (NodeWrapper, Sequence[int]) -> bool
"""
Check if the `pads` value of an ONNX node contains only symmetric padding pairs.
:param onnx_node: an ONNX node
:param pads: the value for `pads` already extracted or calculated base on `auto_pad`
:return: True if padding is symmetric, otherwise raises a NotImplementedError
"""
for pad_left, pad_right in split_pads_into_pairs(pads):
if pad_left != pad_right:
raise NotImplementedError('%s node (%s): asymmetric padding is not supported '
'by ngraph.', onnx_node.op_type, onnx_node.name)
return True
| 38.745455
| 90
| 0.651807
|
4a0ae17a11102bd62aadc2e50ea0cc887917c6ef
| 690
|
py
|
Python
|
models.py
|
Thytu/earthquakePrediction
|
95777022e492bd21aa2107c2b5af7a80b38abc2f
|
[
"MIT"
] | null | null | null |
models.py
|
Thytu/earthquakePrediction
|
95777022e492bd21aa2107c2b5af7a80b38abc2f
|
[
"MIT"
] | null | null | null |
models.py
|
Thytu/earthquakePrediction
|
95777022e492bd21aa2107c2b5af7a80b38abc2f
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
class RNN(nn.Module):
def __init__(self, intput_size, hidden_size, output_size):
super().__init__()
self.hidden_size = hidden_size
self.i2h = nn.Linear(intput_size + hidden_size, hidden_size)
self.i2o = nn.Linear(intput_size + hidden_size, output_size)
self.relu = nn.ReLU()
def forward(self, input_tensor, hidden_tensor):
combined = torch.cat((input_tensor, hidden_tensor), dim=1)
hidden = self.i2h(combined)
output = self.i2o(combined)
output = self.relu(output)
return output, hidden
def init_hidden(self):
return torch.zeros(1, self.hidden_size)
| 30
| 68
| 0.65942
|
4a0ae1cd5a4fef1efc21cab29c25016a0e1926b0
| 2,364
|
py
|
Python
|
intruder-detector.py
|
Miguel-Munoz-Dominguez/diy-opensource-intruder-detection
|
23b14169d6e46fc12f6c4447c7b6727ea94b5521
|
[
"MIT"
] | null | null | null |
intruder-detector.py
|
Miguel-Munoz-Dominguez/diy-opensource-intruder-detection
|
23b14169d6e46fc12f6c4447c7b6727ea94b5521
|
[
"MIT"
] | null | null | null |
intruder-detector.py
|
Miguel-Munoz-Dominguez/diy-opensource-intruder-detection
|
23b14169d6e46fc12f6c4447c7b6727ea94b5521
|
[
"MIT"
] | null | null | null |
picam = False
if picam:
from picamera import PiCamera
else:
import cv2
import time
import telepot
import RPi.GPIO as GPIO
import subprocess
if picam:
camera = PiCamera()
camera.rotation = 180
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11,GPIO.IN)
intruder = False
enabled = False
def handle(msg):
global enabled
global intruder
command = msg['text']
from_id = msg['from']['id']
#chat_id = msg['chat']['id']
#print 'Got command: %s' % command
if from_id == 0000000: # your id from Telegram
if command.lower() == 'show':
if picam:
camera.start_preview()
time.sleep(5)
camera.capture('/home/pi/image.jpg')
camera.stop_preview()
else:
camera = cv2.VideoCapture(0)
return_value, image = camera.read()
cv2.imwrite('/home/pi/image.jpg', image)
del(camera)
inf = open('/home/pi/image.jpg', 'rb')
#bot.sendMessage(chat_id,text="Done!")
bot.sendPhoto(chat_id,inf)
if command.lower() == "enable pir":
enabled = True
bot.sendMessage(chat_id,text="PIR enabled")
if command.lower() == "disable pir":
enabled = False
bot.sendMessage(chat_id,text="PIR disabled")
if command.lower().split(' ')[0]=='say':
# not using the espeak python module due to raspberry pi and alsa compatibility problems
command = "espeak -ves \""+command[3:]+"\" --stdout|aplay" # remove or change -ves (spanish text) option to change the language
process = subprocess.Popen(command,shell=True, executable='/bin/bash')
else:
bot.sendMessage(from_id,text="I'm not allowed to talk with you, sorry")
bot.sendMessage(chat_id,text="Somebody is trying to use the chatbot")
chat_id = xxxxxxx
bot = telepot.Bot('xxxxxxxxx')
bot.message_loop(handle)
#print 'I am listening...'
while 1:
pirvalue = GPIO.input(11)
if pirvalue == 1 and intruder == False and enabled == True:
intruder = True
if picam:
camera.start_preview()
time.sleep(5)
camera.capture('/home/pi/image.jpg')
camera.stop_preview()
else:
camera = cv2.VideoCapture(0)
return_value, image = camera.read()
cv2.imwrite('/home/pi/image.jpg', image)
del(camera)
inf = open('/home/pi/image.jpg')
bot.sendPhoto(chat_id,inf)
if pirvalue == 0:
intruder = False
time.sleep(1)
| 27.488372
| 136
| 0.649746
|
4a0ae33271a6e7f3cfd72dabf2bfff2acdd86300
| 5,819
|
py
|
Python
|
tests/annotators/test_cosmic.py
|
NCI-GDC/aliquot-maf-tools
|
6aec9490ab7194ec605bf02c4c8e7c1cfca53973
|
[
"Apache-2.0"
] | 1
|
2020-09-18T17:52:37.000Z
|
2020-09-18T17:52:37.000Z
|
tests/annotators/test_cosmic.py
|
NCI-GDC/aliquot-maf-tools
|
6aec9490ab7194ec605bf02c4c8e7c1cfca53973
|
[
"Apache-2.0"
] | null | null | null |
tests/annotators/test_cosmic.py
|
NCI-GDC/aliquot-maf-tools
|
6aec9490ab7194ec605bf02c4c8e7c1cfca53973
|
[
"Apache-2.0"
] | 1
|
2020-08-14T08:49:39.000Z
|
2020-08-14T08:49:39.000Z
|
"""
Tests for the ``aliquotmaf.annotators.Cosmic`` class.
"""
from collections import OrderedDict, namedtuple
import pysam
import pytest
from maflib.column_types import SequenceOfStrings
from aliquotmaf.annotators import CosmicID
from aliquotmaf.converters.builder import get_builder
@pytest.fixture
def setup_annotator():
created = []
def _make_annotator(scheme, source):
curr = CosmicID.setup(scheme, source)
created.append(curr)
return curr
yield _make_annotator
for record in created:
record.shutdown()
@pytest.fixture
def test_scheme(get_test_scheme):
coldict = OrderedDict(
[("COSMIC", SequenceOfStrings), ("dbSNP_RS", SequenceOfStrings)]
)
return get_test_scheme(coldict)
@pytest.fixture
def vcf_gen(get_test_file):
VcfRec = namedtuple("VcfRec", ["snp1", "deletion", "insertion", "snp2"])
created = []
def _vcf_gen(name):
vcf_path = get_test_file(name)
vcf_obj = pysam.VariantFile(vcf_path)
created.append(vcf_obj)
gen = vcf_obj.fetch()
curr = VcfRec(
snp1=next(gen), deletion=next(gen), insertion=next(gen), snp2=next(gen)
)
return curr
yield _vcf_gen
for obj in created:
obj.close()
def test_setup_cosmic(test_scheme, setup_annotator, get_test_file):
vcf_path = get_test_file("ex2.vcf.gz")
annotator = setup_annotator(test_scheme, source=vcf_path)
assert isinstance(annotator, CosmicID)
def test_cosmic_snp(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
vcf_gen,
get_test_vcf_record,
):
vcf_path = get_test_file("ex2.vcf.gz")
annotator = setup_annotator(test_scheme, source=vcf_path)
gen = vcf_gen("ex2.vcf.gz")
# simple snp
record = gen.snp1
# setup
vcf_record = get_test_vcf_record(
chrom=record.chrom,
pos=record.pos,
alleles=record.alleles,
ref=record.ref,
alts=record.alts,
)
maf_record = get_empty_maf_record
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value="novel")
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == ["COSM0000"]
assert maf_record["dbSNP_RS"].value == []
maf_record = get_empty_maf_record
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value=None)
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == ["COSM0000"]
assert maf_record["dbSNP_RS"].value == []
record = gen.snp2
# setup
vcf_record = get_test_vcf_record(
chrom=record.chrom,
pos=record.pos,
alleles=record.alleles,
ref=record.ref,
alts=record.alts,
)
maf_record = get_empty_maf_record
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value="novel")
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == ["COSM0003"]
assert maf_record["dbSNP_RS"].value == []
# setup no overlap alleles
vcf_record = get_test_vcf_record(
chrom=record.chrom,
pos=record.pos,
alleles=(record.ref, "G"),
ref=record.ref,
alts=tuple("G"),
)
maf_record["COSMIC"] = get_builder("COSMIC", test_scheme, value=None)
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value="novel")
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == []
assert maf_record["dbSNP_RS"].value == ["novel"]
# setup no overlap pos
vcf_record = get_test_vcf_record(
chrom=record.chrom,
pos=101,
alleles=(record.ref, "G"),
ref=record.ref,
alts=tuple("G"),
)
maf_record["COSMIC"] = get_builder("COSMIC", test_scheme, value=None)
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value="novel")
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == []
assert maf_record["dbSNP_RS"].value == ["novel"]
def test_cosmic_del(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
vcf_gen,
get_test_vcf_record,
):
vcf_path = get_test_file("ex2.vcf.gz")
annotator = setup_annotator(test_scheme, source=vcf_path)
gen = vcf_gen("ex2.vcf.gz")
# deletion
record = gen.deletion
# setup
vcf_record = get_test_vcf_record(
chrom=record.chrom,
pos=record.pos,
alleles=record.alleles,
ref=record.ref,
alts=record.alts,
)
maf_record = get_empty_maf_record
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value="novel")
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == ["COSM0001"]
assert maf_record["dbSNP_RS"].value == []
def test_cosmic_ins(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
vcf_gen,
get_test_vcf_record,
):
vcf_path = get_test_file("ex2.vcf.gz")
annotator = setup_annotator(test_scheme, source=vcf_path)
gen = vcf_gen("ex2.vcf.gz")
# insertion
record = gen.insertion
# setup
vcf_record = get_test_vcf_record(
chrom=record.chrom,
pos=record.pos,
alleles=record.alleles,
ref=record.ref,
alts=record.alts,
)
maf_record = get_empty_maf_record
maf_record["dbSNP_RS"] = get_builder("dbSNP_RS", test_scheme, value="novel")
maf_record = annotator.annotate(maf_record, vcf_record, var_allele_idx=1)
assert maf_record["COSMIC"].value == ["COSM0002"]
assert maf_record["dbSNP_RS"].value == []
| 27.448113
| 83
| 0.670562
|
4a0ae41fc43591aa1d8fa0b837c5058ae5f5a250
| 218
|
py
|
Python
|
basis_modules/modules/marketstack/importers/base.py
|
kvh/snapflow-modules
|
6123597f4b71a8e890b8ba7df471c7efbd59d6a4
|
[
"BSD-3-Clause"
] | null | null | null |
basis_modules/modules/marketstack/importers/base.py
|
kvh/snapflow-modules
|
6123597f4b71a8e890b8ba7df471c7efbd59d6a4
|
[
"BSD-3-Clause"
] | 2
|
2021-07-26T17:46:22.000Z
|
2021-08-02T19:40:02.000Z
|
basis_modules/modules/marketstack/importers/base.py
|
kvh/snapflow-modules
|
6123597f4b71a8e890b8ba7df471c7efbd59d6a4
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import annotations
from datetime import date
MARKETSTACK_API_BASE_URL = "http://api.marketstack.com/v1/"
HTTPS_MARKETSTACK_API_BASE_URL = "https://api.marketstack.com/v1/"
MIN_DATE = date(2000, 1, 1)
| 27.25
| 66
| 0.788991
|
4a0ae51bee48d252592d182838f4d6fcd70d5659
| 119
|
py
|
Python
|
django_project/users/admin.py
|
courage173/django_project
|
7a04cbfa20cc4bb913d08edab74c6f7d633fd3ee
|
[
"bzip2-1.0.6"
] | null | null | null |
django_project/users/admin.py
|
courage173/django_project
|
7a04cbfa20cc4bb913d08edab74c6f7d633fd3ee
|
[
"bzip2-1.0.6"
] | null | null | null |
django_project/users/admin.py
|
courage173/django_project
|
7a04cbfa20cc4bb913d08edab74c6f7d633fd3ee
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Profile
admin.site.register(Profile)
| 23.8
| 32
| 0.815126
|
4a0ae60a8630c4a48f9ced0729dd343d07987615
| 3,951
|
py
|
Python
|
tests/test_writers.py
|
EgorVorontsov/corsair
|
d17ea4d43cf16a6cc3b45ffbb407650bbe628665
|
[
"MIT"
] | null | null | null |
tests/test_writers.py
|
EgorVorontsov/corsair
|
d17ea4d43cf16a6cc3b45ffbb407650bbe628665
|
[
"MIT"
] | null | null | null |
tests/test_writers.py
|
EgorVorontsov/corsair
|
d17ea4d43cf16a6cc3b45ffbb407650bbe628665
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Writers module tests.
"""
import pytest
from corsair import RegisterMapReader, ConfigurationReader
from corsair import RegisterMapWriter, ConfigurationWriter, LbBridgeWriter
from corsair import HdlWriter, DocsWriter
from corsair import Configuration, RegisterMap
class TestRegisterMapWriter:
"""Class 'RegisterMapWriter' testing."""
def _write(self, path):
# create regmap
rmap_orig = RegisterMap()
# write to file
RegisterMapWriter()(path, rmap_orig)
# read back
rmap_test = RegisterMapReader()(path)
assert rmap_test == rmap_orig
def test_write_json(self, tmpdir):
"""Test of writing register map to a JSON file."""
output_file = str(tmpdir.join('map_out.json'))
print('output_file:', output_file)
self._write(output_file)
def test_write_yaml(self, tmpdir):
"""Test of writing register map to a YAML file."""
output_file = str(tmpdir.join('map_out.yaml'))
print('output_file:', output_file)
self._write(output_file)
class TestConfigurationWriter:
"""Class 'ConfigurationWriter' testing."""
def _write(self, path):
# create config
config_orig = Configuration()
# write to file
ConfigurationWriter()(path, config_orig)
# read back
config_test = ConfigurationReader()(path)
assert config_orig == config_test
def test_write_json(self, tmpdir):
"""Test of writing configuration to a JSON file."""
output_file = str(tmpdir.join('config_out.json'))
print('output_file:', output_file)
self._write(output_file)
def test_write_yaml(self, tmpdir):
"""Test of writing configuration to a YAML file."""
output_file = str(tmpdir.join('config_out.yaml'))
print('output_file:', output_file)
self._write(output_file)
class TestLbBridgeWriter:
"""Class 'LbBridgeWriter' testing."""
def test_apb_write(self, tmpdir):
"""Test of creating bridge to LocalBus module in Verilog."""
output_file = str(tmpdir.join('apb2lb.v'))
print('output_file:', output_file)
# create configuration
config = Configuration()
config['lb_bridge']['type'].value = 'apb'
# write output file
writer = LbBridgeWriter()
writer(output_file, config)
# read file and verify
with open(output_file, 'r') as f:
raw_str = ''.join(f.readlines())
assert 'APB to Local Bus bridge' in raw_str
class TestHdlWriter:
"""Class 'HdlWriter' testing."""
def test_verilog_write(self, tmpdir):
"""Test of creating regmap module in Verilog."""
output_file = str(tmpdir.join('regs.v'))
print('output_file:', output_file)
# create regmap
rmap = RegisterMap()
# write output file
writer = HdlWriter()
writer(output_file, rmap)
# read file and verify
with open(output_file, 'r') as f:
raw_str = ''.join(f.readlines())
assert 'module regs' in raw_str
assert 'endmodule' in raw_str
class TestDocsWriter:
"""Class 'DocsWriter' testing."""
def _read_rmap(self, path):
reader = RegisterMapReader()
rmap = reader(path)
def test_md_write(self, tmpdir):
"""Test of creating markdown regmap file."""
rmap_path = 'tests/data/map.json'
md_path = str(tmpdir.join('regs.md'))
print('rmap_path:', rmap_path)
print('md_path:', md_path)
# read regmap
rmap = RegisterMapReader()(rmap_path)
# write output file
DocsWriter()(md_path, rmap)
# read file and verify
with open(md_path, 'r') as f:
raw_str = ''.join(f.readlines())
assert '## Register map' in raw_str
assert 'Back to [Register map](#register-map).' in raw_str
| 31.862903
| 74
| 0.627436
|
4a0ae653fbe1fccb883f83aba5065f77ce2a79de
| 9,589
|
py
|
Python
|
demo_cli.py
|
DJKINGASSASSIN/Real-Time-Voice-Cloning
|
34bb74b0286840cd093a8d9f71e5ae89da4ce3b4
|
[
"MIT"
] | 35,818
|
2019-06-12T17:16:29.000Z
|
2022-03-31T21:02:16.000Z
|
demo_cli.py
|
DJKINGASSASSIN/Real-Time-Voice-Cloning
|
34bb74b0286840cd093a8d9f71e5ae89da4ce3b4
|
[
"MIT"
] | 973
|
2019-06-12T17:16:35.000Z
|
2022-03-31T10:35:41.000Z
|
demo_cli.py
|
DJKINGASSASSIN/Real-Time-Voice-Cloning
|
34bb74b0286840cd093a8d9f71e5ae89da4ce3b4
|
[
"MIT"
] | 6,583
|
2019-06-12T21:14:18.000Z
|
2022-03-31T03:54:10.000Z
|
import argparse
import os
from pathlib import Path
import librosa
import numpy as np
import soundfile as sf
import torch
from encoder import inference as encoder
from encoder.params_model import model_embedding_size as speaker_embedding_size
from synthesizer.inference import Synthesizer
from utils.argutils import print_args
from utils.default_models import ensure_default_models
from vocoder import inference as vocoder
if __name__ == '__main__':
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("-e", "--enc_model_fpath", type=Path,
default="saved_models/default/encoder.pt",
help="Path to a saved encoder")
parser.add_argument("-s", "--syn_model_fpath", type=Path,
default="saved_models/default/synthesizer.pt",
help="Path to a saved synthesizer")
parser.add_argument("-v", "--voc_model_fpath", type=Path,
default="saved_models/default/vocoder.pt",
help="Path to a saved vocoder")
parser.add_argument("--cpu", action="store_true", help=\
"If True, processing is done on CPU, even when a GPU is available.")
parser.add_argument("--no_sound", action="store_true", help=\
"If True, audio won't be played.")
parser.add_argument("--seed", type=int, default=None, help=\
"Optional random number seed value to make toolbox deterministic.")
args = parser.parse_args()
arg_dict = vars(args)
print_args(args, parser)
# Hide GPUs from Pytorch to force CPU processing
if arg_dict.pop("cpu"):
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
print("Running a test of your configuration...\n")
if torch.cuda.is_available():
device_id = torch.cuda.current_device()
gpu_properties = torch.cuda.get_device_properties(device_id)
## Print some environment information (for debugging purposes)
print("Found %d GPUs available. Using GPU %d (%s) of compute capability %d.%d with "
"%.1fGb total memory.\n" %
(torch.cuda.device_count(),
device_id,
gpu_properties.name,
gpu_properties.major,
gpu_properties.minor,
gpu_properties.total_memory / 1e9))
else:
print("Using CPU for inference.\n")
## Load the models one by one.
print("Preparing the encoder, the synthesizer and the vocoder...")
ensure_default_models(Path("saved_models"))
encoder.load_model(args.enc_model_fpath)
synthesizer = Synthesizer(args.syn_model_fpath)
vocoder.load_model(args.voc_model_fpath)
## Run a test
print("Testing your configuration with small inputs.")
# Forward an audio waveform of zeroes that lasts 1 second. Notice how we can get the encoder's
# sampling rate, which may differ.
# If you're unfamiliar with digital audio, know that it is encoded as an array of floats
# (or sometimes integers, but mostly floats in this projects) ranging from -1 to 1.
# The sampling rate is the number of values (samples) recorded per second, it is set to
# 16000 for the encoder. Creating an array of length <sampling_rate> will always correspond
# to an audio of 1 second.
print("\tTesting the encoder...")
encoder.embed_utterance(np.zeros(encoder.sampling_rate))
# Create a dummy embedding. You would normally use the embedding that encoder.embed_utterance
# returns, but here we're going to make one ourselves just for the sake of showing that it's
# possible.
embed = np.random.rand(speaker_embedding_size)
# Embeddings are L2-normalized (this isn't important here, but if you want to make your own
# embeddings it will be).
embed /= np.linalg.norm(embed)
# The synthesizer can handle multiple inputs with batching. Let's create another embedding to
# illustrate that
embeds = [embed, np.zeros(speaker_embedding_size)]
texts = ["test 1", "test 2"]
print("\tTesting the synthesizer... (loading the model will output a lot of text)")
mels = synthesizer.synthesize_spectrograms(texts, embeds)
# The vocoder synthesizes one waveform at a time, but it's more efficient for long ones. We
# can concatenate the mel spectrograms to a single one.
mel = np.concatenate(mels, axis=1)
# The vocoder can take a callback function to display the generation. More on that later. For
# now we'll simply hide it like this:
no_action = lambda *args: None
print("\tTesting the vocoder...")
# For the sake of making this test short, we'll pass a short target length. The target length
# is the length of the wav segments that are processed in parallel. E.g. for audio sampled
# at 16000 Hertz, a target length of 8000 means that the target audio will be cut in chunks of
# 0.5 seconds which will all be generated together. The parameters here are absurdly short, and
# that has a detrimental effect on the quality of the audio. The default parameters are
# recommended in general.
vocoder.infer_waveform(mel, target=200, overlap=50, progress_callback=no_action)
print("All test passed! You can now synthesize speech.\n\n")
## Interactive speech generation
print("This is a GUI-less example of interface to SV2TTS. The purpose of this script is to "
"show how you can interface this project easily with your own. See the source code for "
"an explanation of what is happening.\n")
print("Interactive generation loop")
num_generated = 0
while True:
try:
# Get the reference audio filepath
message = "Reference voice: enter an audio filepath of a voice to be cloned (mp3, " \
"wav, m4a, flac, ...):\n"
in_fpath = Path(input(message).replace("\"", "").replace("\'", ""))
## Computing the embedding
# First, we load the wav using the function that the speaker encoder provides. This is
# important: there is preprocessing that must be applied.
# The following two methods are equivalent:
# - Directly load from the filepath:
preprocessed_wav = encoder.preprocess_wav(in_fpath)
# - If the wav is already loaded:
original_wav, sampling_rate = librosa.load(str(in_fpath))
preprocessed_wav = encoder.preprocess_wav(original_wav, sampling_rate)
print("Loaded file succesfully")
# Then we derive the embedding. There are many functions and parameters that the
# speaker encoder interfaces. These are mostly for in-depth research. You will typically
# only use this function (with its default parameters):
embed = encoder.embed_utterance(preprocessed_wav)
print("Created the embedding")
## Generating the spectrogram
text = input("Write a sentence (+-20 words) to be synthesized:\n")
# If seed is specified, reset torch seed and force synthesizer reload
if args.seed is not None:
torch.manual_seed(args.seed)
synthesizer = Synthesizer(args.syn_model_fpath)
# The synthesizer works in batch, so you need to put your data in a list or numpy array
texts = [text]
embeds = [embed]
# If you know what the attention layer alignments are, you can retrieve them here by
# passing return_alignments=True
specs = synthesizer.synthesize_spectrograms(texts, embeds)
spec = specs[0]
print("Created the mel spectrogram")
## Generating the waveform
print("Synthesizing the waveform:")
# If seed is specified, reset torch seed and reload vocoder
if args.seed is not None:
torch.manual_seed(args.seed)
vocoder.load_model(args.voc_model_fpath)
# Synthesizing the waveform is fairly straightforward. Remember that the longer the
# spectrogram, the more time-efficient the vocoder.
generated_wav = vocoder.infer_waveform(spec)
## Post-generation
# There's a bug with sounddevice that makes the audio cut one second earlier, so we
# pad it.
generated_wav = np.pad(generated_wav, (0, synthesizer.sample_rate), mode="constant")
# Trim excess silences to compensate for gaps in spectrograms (issue #53)
generated_wav = encoder.preprocess_wav(generated_wav)
# Play the audio (non-blocking)
if not args.no_sound:
import sounddevice as sd
try:
sd.stop()
sd.play(generated_wav, synthesizer.sample_rate)
except sd.PortAudioError as e:
print("\nCaught exception: %s" % repr(e))
print("Continuing without audio playback. Suppress this message with the \"--no_sound\" flag.\n")
except:
raise
# Save it on the disk
filename = "demo_output_%02d.wav" % num_generated
print(generated_wav.dtype)
sf.write(filename, generated_wav.astype(np.float32), synthesizer.sample_rate)
num_generated += 1
print("\nSaved output as %s\n\n" % filename)
except Exception as e:
print("Caught exception: %s" % repr(e))
print("Restarting\n")
| 45.880383
| 117
| 0.653457
|
4a0ae6e4d8b11a3284c792fb299479b8510f89be
| 16,414
|
py
|
Python
|
lib/enthought/traits/ui/item.py
|
mattfoster/matplotlib
|
0b47697b19b77226c633ec6a3d74a2199a153315
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1
|
2016-05-08T18:33:12.000Z
|
2016-05-08T18:33:12.000Z
|
lib/enthought/traits/ui/item.py
|
mattfoster/matplotlib
|
0b47697b19b77226c633ec6a3d74a2199a153315
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
lib/enthought/traits/ui/item.py
|
mattfoster/matplotlib
|
0b47697b19b77226c633ec6a3d74a2199a153315
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: David C. Morrill
# Date: 10/07/2004
# Symbols defined: Item
#------------------------------------------------------------------------------
"""Defines the Item class, which is used to represent a single item within
a Traits-based user interface.
"""
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
import re
from string \
import find, rfind
from enthought.traits.api \
import Instance, Str, Int, Range, Constant, false, Callable, Delegate
from enthought.traits.trait_base \
import user_name_for
from view_element \
import ViewSubElement
from ui_traits \
import container_delegate
from editor_factory \
import EditorFactory
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
# Pattern of all digits
all_digits = re.compile( r'\d+' )
# Pattern for finding size infomation embedded in an item description
size_pat = re.compile( r"^(.*)<(.*)>(.*)$", re.MULTILINE | re.DOTALL )
# Pattern for finding tooltip infomation embedded in an item description
tooltip_pat = re.compile( r"^(.*)`(.*)`(.*)$", re.MULTILINE | re.DOTALL )
#-------------------------------------------------------------------------------
# Trait definitions:
#-------------------------------------------------------------------------------
# Reference to an EditorFactory
ItemEditor = Instance( EditorFactory, allow_none = True )
# Amount of padding to add around item
Padding = Range( -15, 15, 0, desc = 'amount of padding to add around item' )
#-------------------------------------------------------------------------------
# 'Item' class:
#-------------------------------------------------------------------------------
class Item ( ViewSubElement ):
""" An element in a Traits-based user interface.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# A unique identifier for the item. If not set, it defaults to the value
# of **name**.
id = Str
# User interface label for the item in the GUI. If this attribute is not
# set, the label is the value of **name** with slight modifications:
# underscores are replaced by spaces, and the first letter is capitalized.
# If an item's **name** is not specified, its label is displayed as
# static text, without any editor widget.
label = Str
# Name of the trait the item is editing
name = Str
# Help text describing the purpose of the item. The built-in help handler
# displays this text in a pop-up window if the user clicks the widget's
# label. View-level help displays the help text for all items in a view.
# If this attribute is not set, the built-in help handler generates a
# description based on the trait definition.
help = Str
# The HasTraits object whose trait attribute the item is editing
object = container_delegate
# Presentation style for the item
style = container_delegate
# Docking style for the item
dock = container_delegate
# Image to display on notebook tabs
image = container_delegate
# Category of elements dragged from view
export = container_delegate
# Should a label be displayed for the item?
show_label = Delegate( 'container', 'show_labels' )
# Editor to use for the item
editor = ItemEditor
# Should the item use extra space? If set to True, the widget expands to
# fill any extra space that is available in the display. If set to True
# for more than one item in the same View, any extra space is divided
# between them.
resizable = false
# Should the item use extra space along its Group's layout orientation?
springy = false
# Should the item's label use emphasized text? If the label is not shown,
# this attribute is ignored.
emphasized = false
# Should the item receive focus initially?
has_focus = false
# Pre-condition for including the item in the display. If the expression
# evaluates to False, the item is not defined in the display. Conditions
# for **defined_when** are evaluated only once, when the display is first
# constructed. Use this attribute for conditions based on attributes that
# vary from object to object, but that do not change over time. For example,
# displaying a 'maiden_name' item only for female employees in a company
# database.
defined_when = Str
# Pre-condition for showing the item. If the expression evaluates to False,
# the widget is not visible (and disappears if it was previously visible).
# If the value evaluates to True, the widget becomes visible. All
# **visible_when** conditions are checked each time that any trait value
# is edited in the display. Therefore, you can use **visible_when**
# conditions to hide or show widgets in response to user input.
visible_when = Str
# Pre-condition for enabling the item. If the expression evaluates to False,
# the widget is disabled, that is, it does not accept input. All
# **enabled_when** conditions are checked each time that any trait value
# is edited in the display. Therefore, you can use **enabled_when**
# conditions to enable or disable widgets in response to user input.
enabled_when = Str
# Amount of extra space, in pixels, to add around the item. Values must be
# integers between -15 and 15. Use negative values to subtract from the
# default spacing.
padding = Padding
# Tooltip to display over the item, when the mouse pointer is left idle
# over the widget. Make this text as concise as possible; use the **help**
# attribute to provide more detailed information.
tooltip = Str
# A Callable to use for formatting the contents of the item. This function
# or method is called to create the string representation of the trait value
# to be edited. If the widget does not use a string representation, this
# attribute is ignored.
format_func = Callable
# Python format string to use for formatting the contents of the item.
# The format string is applied to the string representation of the trait
# value before it is displayed in the widget. This attribute is ignored if
# the widget does not use a string representation, or if the
# **format_func** is set.
format_str = Str
# Requested width of the editor (in pixels). The actual displayed width
# is at least the maximum of **width** and the optimal width of the widget
# as calculated by the GUI toolkit. Specify a negative value to ignore the
# toolkit's optimal width. For example, use -50 to force a width of 50
# pixels. The default value of -1 ensures that the toolkit's optimal width
# is used.
width = Int( -1 )
# Requested height of the editor (in pixels). The actual displayed height
# is at least the maximum of **width** and the optimal height of the widget
# as calculated by the GUI toolkit. Specify a negative value to ignore the
# toolkit's optimal height. For example, use -50 to force a height of 50
# pixels. The default value of -1 ensures that the toolkit's optimal height
# is used.
height = Int( -1 )
#---------------------------------------------------------------------------
# Initialize the object:
#---------------------------------------------------------------------------
def __init__ ( self, value = None, **traits ):
""" Initializes the item object.
"""
super( ViewSubElement, self ).__init__( **traits )
if value is None:
return
if not isinstance(value, basestring):
raise TypeError, ("The argument to Item must be a string of the "
"form: {id:}{object.}{name}{[label]}`tooltip`{#^}{$|@|*|~|;style}")
value, empty = self._parse_label( value )
if empty:
self.show_label = False
value = self._parse_style( value )
value = self._parse_size( value )
value = self._parse_tooltip( value )
value = self._option( value, '#', 'resizable', True )
value = self._option( value, '^', 'emphasized', True )
value = self._split( 'id', value, ':', find, 0, 1 )
value = self._split( 'object', value, '.', find, 0, 1 )
if value != '':
self.name = value
#---------------------------------------------------------------------------
# Returns whether or not the object is replacable by an Include object:
#---------------------------------------------------------------------------
def is_includable ( self ):
""" Returns a Boolean indicating whether the object is replaceable by an
Include object.
"""
return (self.id != '')
#---------------------------------------------------------------------------
# Returns whether or not the Item represents a spacer or separator:
#---------------------------------------------------------------------------
def is_spacer ( self ):
""" Returns True if the item represents a spacer or separator.
"""
name = self.name.strip()
return ((name == '') or (name == '_') or
(all_digits.match( name ) is not None))
#---------------------------------------------------------------------------
# Gets the help text associated with the Item in a specified UI:
#---------------------------------------------------------------------------
def get_help ( self, ui ):
""" Gets the help text associated with the Item in a specified UI.
"""
# Return 'None' if the Item is a separator or spacer:
if self.is_spacer():
return None
# Otherwise, it must be a trait Item:
return self.help or None
#---------------------------------------------------------------------------
# Gets the label to use for a specified Item in a specified UI:
#---------------------------------------------------------------------------
def get_label ( self, ui ):
""" Gets the label to use for a specified Item.
"""
# Return 'None' if the Item is a separator or spacer:
if self.is_spacer():
return None
label = self.label
if label != '':
return label
name = self.name
object = ui.context[ self.object ]
trait = object.base_trait( name )
label = user_name_for( name )
tlabel = trait.label
if tlabel is None:
return label
if isinstance(tlabel, basestring):
if tlabel[0:3] == '...':
return label + tlabel[3:]
if tlabel[-3:] == '...':
return tlabel[:-3] + label
if self.label != '':
return self.label
return tlabel
return tlabel( object, name, label )
#---------------------------------------------------------------------------
# Returns an id used to identify the item:
#---------------------------------------------------------------------------
def get_id ( self ):
""" Returns an ID used to identify the item.
"""
if self.id != '':
return self.id
return self.name
#---------------------------------------------------------------------------
# Parses a '<width,height>' value from the string definition:
#---------------------------------------------------------------------------
def _parse_size ( self, value ):
""" Parses a '<width,height>' value from the string definition.
"""
match = size_pat.match( value )
if match is not None:
data = match.group( 2 )
value = match.group( 1 ) + match.group( 3 )
col = data.find( ',' )
if col < 0:
self._set_int( 'width', data )
else:
self._set_int( 'width', data[ : col ] )
self._set_int( 'height', data[ col + 1: ] )
return value
#---------------------------------------------------------------------------
# Parses a '`tooltip`' value from the string definition:
#---------------------------------------------------------------------------
def _parse_tooltip ( self, value ):
""" Parses a *tooltip* value from the string definition.
"""
match = tooltip_pat.match( value )
if match is not None:
self.tooltip = match.group( 2 )
value = match.group( 1 ) + match.group( 3 )
return value
#---------------------------------------------------------------------------
# Sets a specified trait to a specified string converted to an integer:
#---------------------------------------------------------------------------
def _set_int ( self, name, value ):
""" Sets a specified trait to a specified string converted to an
integer.
"""
value = value.strip()
if value != '':
setattr( self, name, int( value ) )
#---------------------------------------------------------------------------
# Returns a 'pretty print' version of the Item:
#---------------------------------------------------------------------------
def __repr__ ( self ):
""" Returns a "pretty print" version of the Item.
"""
return '"%s%s%s%s%s"' % ( self._repr_value( self.id, '', ':' ),
self._repr_value( self.object, '', '.',
'object' ),
self._repr_value( self.name ),
self._repr_value( self.label,'=' ),
self._repr_value( self.style, ';', '',
'simple' ) )
#-------------------------------------------------------------------------------
# 'Label' class:
#-------------------------------------------------------------------------------
class Label ( Item ):
""" An item that is a label.
"""
#---------------------------------------------------------------------------
# Initializes the object:
#---------------------------------------------------------------------------
def __init__ ( self, label ):
super( Label, self ).__init__( label = label )
#-------------------------------------------------------------------------------
# 'Heading' class:
#-------------------------------------------------------------------------------
class Heading ( Label ):
""" An item that is a fancy label.
"""
# Override the 'style' trait to default to the fancy 'custom' style:
style = Constant( 'custom' )
#-------------------------------------------------------------------------------
# 'Spring' class:
#-------------------------------------------------------------------------------
class Spring ( Item ):
""" An item that is a layout "spring".
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Name of the trait the item is editing
name = 'spring'
# Should a label be displayed?
show_label = false
# Editor to use for the item
editor = Instance( 'enthought.traits.ui.api.NullEditor', () )
# Should the item use extra space along its Group's layout orientation?
springy = True
# A pre-defined spring for convenience
spring = Spring()
| 39.839806
| 84
| 0.495614
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.