id
stringlengths 3
8
| content
stringlengths 100
981k
|
|---|---|
97086
|
import pendulum
import pytest
import prefect.schedules.filters as filters
def test_on_datetime_0():
filter_fn = filters.on_datetime(pendulum.datetime(2019, 1, 2, 3, 4, 5))
assert filter_fn(pendulum.datetime(2019, 1, 2, 3, 4, 5))
def test_on_datetime_1():
filter_fn = filters.on_datetime(pendulum.datetime(2019, 1, 2))
assert filter_fn(pendulum.datetime(2019, 1, 2))
def test_on_datetime_2():
filter_fn = filters.on_datetime(pendulum.datetime(2019, 1, 2, 3, 4))
assert not filter_fn(pendulum.datetime(2019, 1, 2, 3, 4, 5))
def test_on_datetime_3():
filter_fn = filters.on_datetime(pendulum.datetime(2019, 1, 2, 3, 4, 5))
assert not filter_fn(pendulum.datetime(2019, 1, 2, 3, 4))
@pytest.mark.parametrize(
"test_datetimes",
[
(pendulum.datetime(2019, 1, 1), pendulum.datetime(2019, 1, 2), True),
(pendulum.datetime(2019, 1, 1), pendulum.datetime(2019, 1, 1), False),
(pendulum.datetime(2019, 1, 2), pendulum.datetime(2019, 1, 2), False),
(pendulum.datetime(2019, 1, 1, 6), pendulum.datetime(2019, 1, 1, 6), True),
(
pendulum.datetime(2019, 1, 1, 5, 59),
pendulum.datetime(2019, 1, 1, 6, 1),
True,
),
],
)
def test_between_datetimes(test_datetimes):
dt = pendulum.datetime(2019, 1, 1, 6)
filter_fn = filters.between_datetimes(test_datetimes[0], test_datetimes[1])
assert filter_fn(dt) is test_datetimes[2]
def test_on_date():
filter_fn = filters.on_date(3, 4)
assert filter_fn(pendulum.datetime(2019, 3, 4))
assert not filter_fn(pendulum.datetime(2019, 3, 5))
assert filter_fn(pendulum.datetime(2019, 3, 4, 5, 6))
assert filter_fn(pendulum.datetime(2034, 3, 4))
assert not filter_fn(pendulum.datetime(2034, 3, 5))
assert not filter_fn(pendulum.datetime(2034, 4, 4))
@pytest.mark.parametrize(
"test_dates",
[
((1, 1, 12, 31), True),
((6, 1, 6, 1), True),
((5, 31, 6, 2), True),
((6, 2, 5, 31), False),
((6, 2, 7, 1), False),
((11, 1, 7, 1), True),
],
)
def test_between_dates(test_dates):
dt = pendulum.datetime(2019, 6, 1)
filter_fn = filters.between_dates(*test_dates[0])
assert filter_fn(dt) is test_dates[1]
@pytest.mark.parametrize(
"test_times",
[
(pendulum.datetime(2019, 1, 2, 4, 30), False),
(pendulum.datetime(2019, 1, 2, 3, 30), True),
(pendulum.datetime(2020, 1, 2, 3, 30), True),
(pendulum.datetime(2019, 4, 5, 3, 30), True),
(pendulum.datetime(2019, 4, 5, 3, 30, 1), False),
],
)
def test_at_time(test_times):
test_dt, result = test_times
filter_fn = filters.at_time(pendulum.time(3, 30))
assert filter_fn(test_dt) is result
@pytest.mark.parametrize(
"test_times",
[
(pendulum.time(5), pendulum.time(7), True),
(pendulum.time(6), pendulum.time(6), True),
(pendulum.time(7), pendulum.time(5), False),
(pendulum.time(7), pendulum.time(6), True),
],
)
def test_between_times(test_times):
dt = pendulum.datetime(2019, 6, 1, 6)
filter_fn = filters.between_times(test_times[0], test_times[1])
assert filter_fn(dt) is test_times[2]
@pytest.mark.parametrize("dt", [pendulum.datetime(2019, 1, i) for i in range(1, 10)])
def test_is_weekday(dt):
assert filters.is_weekday(dt) == (dt.weekday() < 5)
@pytest.mark.parametrize("dt", [pendulum.datetime(2019, 1, i) for i in range(1, 10)])
def test_is_weekend(dt):
assert filters.is_weekend(dt) == (dt.weekday() > 4)
@pytest.mark.parametrize(
"dates",
[
(pendulum.datetime(2019, 1, 20), False),
(pendulum.datetime(2019, 1, 31), True),
(pendulum.datetime(2019, 2, 27), False),
(pendulum.datetime(2019, 2, 28), True),
(pendulum.datetime(2020, 2, 28), False),
(pendulum.datetime(2020, 2, 29), True),
],
)
def test_is_month_end(dates):
assert filters.is_month_end(dates[0]) is dates[1]
@pytest.mark.parametrize(
"year",
[
1971, # Before start of UTC
1972, # Start of UTC
1992, # Near past
2020, # Relative present
2525, # Distant future
],
)
@pytest.mark.parametrize("month", list(range(1, 12)))
def test_is_month_start(year: int, month: int):
filter_fn = filters.is_month_start
assert filter_fn(dt=pendulum.datetime(year=year, month=month, day=1))
assert not filter_fn(dt=pendulum.datetime(year=year, month=month, day=2))
assert not filter_fn(dt=pendulum.datetime(year=year, month=month, day=15))
assert not filter_fn(dt=pendulum.datetime(year=year, month=month, day=28))
def test_is_day_of_week():
years = {
1971: {"month": 2, "day": 22}, # Before start of UTC
1972: {"month": 6, "day": 12}, # Start of UTC
1992: {"month": 6, "day": 8}, # Near past
2020: {"month": 9, "day": 14}, # Relative present
2525: {"month": 12, "day": 3}, # Distant future
}
months = range(1, 12)
days_week = range(0, 6)
def test_day_of_week(day_of_week: int):
filter_fn = filters.is_day_of_week(day_of_week=day_of_week)
for year in years:
month = years[year]["month"]
day = (
years[year]["day"] + day_of_week
) # day of the week also acts as an offset for each day, which starts at Sunday (0)
next_day = day + 1
assert filter_fn(dt=pendulum.datetime(year=year, month=month, day=day))
assert not filter_fn(
dt=pendulum.datetime(year=year, month=month, day=next_day)
)
for day in days_week:
test_day_of_week(day)
|
97125
|
from vnpy.event import EventEngine, Event
from vnpy.trader.engine import MainEngine
from vnpy.trader.ui import QtWidgets, QtCore
from ..engine import APP_NAME, EVENT_RPC_LOG
class RpcManager(QtWidgets.QWidget):
""""""
signal_log = QtCore.pyqtSignal(Event)
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
""""""
super().__init__()
self.main_engine = main_engine
self.event_engine = event_engine
self.rpc_engine = main_engine.get_engine(APP_NAME)
self.init_ui()
self.register_event()
def init_ui(self):
""""""
self.setWindowTitle("RPC service ")
self.setFixedWidth(900)
self.setFixedHeight(500)
self.start_button = QtWidgets.QPushButton(" start up ")
self.start_button.clicked.connect(self.start_server)
self.stop_button = QtWidgets.QPushButton(" stop ")
self.stop_button.clicked.connect(self.stop_server)
self.stop_button.setEnabled(False)
for button in [self.start_button, self.stop_button]:
hint = button.sizeHint()
button.setFixedHeight(hint.height() * 2)
button.setFixedWidth(hint.width() * 4)
self.rep_line = QtWidgets.QLineEdit(self.rpc_engine.rep_address)
self.rep_line.setFixedWidth(300)
self.pub_line = QtWidgets.QLineEdit(self.rpc_engine.pub_address)
self.pub_line.setFixedWidth(300)
self.log_monitor = QtWidgets.QTextEdit()
self.log_monitor.setReadOnly(True)
form = QtWidgets.QFormLayout()
form.addRow(" address request response ", self.rep_line)
form.addRow(" event broadcast address ", self.pub_line)
hbox = QtWidgets.QHBoxLayout()
hbox.addLayout(form)
hbox.addWidget(self.start_button)
hbox.addWidget(self.stop_button)
hbox.addStretch()
vbox = QtWidgets.QVBoxLayout()
vbox.addLayout(hbox)
vbox.addWidget(self.log_monitor)
self.setLayout(vbox)
def register_event(self):
""""""
self.signal_log.connect(self.process_log_event)
self.event_engine.register(EVENT_RPC_LOG, self.signal_log.emit)
def process_log_event(self, event: Event):
""""""
log = event.data
msg = f"{log.time}\t{log.msg}"
self.log_monitor.append(msg)
def start_server(self):
""""""
rep_address = self.rep_line.text()
pub_address = self.pub_line.text()
result = self.rpc_engine.start(rep_address, pub_address)
if result:
self.start_button.setEnabled(False)
self.stop_button.setEnabled(True)
def stop_server(self):
""""""
result = self.rpc_engine.stop()
if result:
self.start_button.setEnabled(True)
self.stop_button.setEnabled(False)
|
97154
|
from insights.parsers.neutron_ovs_agent_log import NeutronOVSAgentLog
from insights.tests import context_wrap
from datetime import datetime
LOG = """
2016-11-09 14:39:25.348 3153 WARNING oslo_config.cfg [-] Option "rabbit_password" from group "oslo_messaging_rabbit" is deprecated for removal. Its value may be silently ignored in the future.
2016-11-09 14:39:25.348 3153 WARNING oslo_config.cfg [-] Option "rabbit_userid" from group "oslo_messaging_rabbit" is deprecated for removal. Its value may be silently ignored in the future.
2016-11-09 14:39:25.352 3153 INFO ryu.base.app_manager [-] loading app neutron.plugins.ml2.drivers.openvswitch.agent.openflow.native.ovs_ryuapp
2016-11-09 14:39:27.171 3153 INFO ryu.base.app_manager [-] loading app ryu.app.ofctl.service
2016-11-09 14:39:27.190 3153 INFO ryu.base.app_manager [-] loading app ryu.controller.ofp_handler
"""
def test_neutron_ovs_agent_log():
log = NeutronOVSAgentLog(context_wrap(LOG))
assert len(log.get("WARNING")) == 2
assert len(list(log.get_after(datetime(2016, 11, 9, 14, 39, 26)))) == 2
|
97195
|
import pytest
import mitzasql.sql_parser.tokens as Token
from mitzasql.sql_parser.lexer import Lexer
from mitzasql.utils import token_is_parsed
def test_schema_object_is_tokenized():
raw = '''
`schema`.`object`
@`not a schema object`
'''
tokens = list(Lexer(raw).tokenize())
assert token_is_parsed((Token.Name, '`schema`'), tokens)
assert token_is_parsed((Token.Name, '`object`'), tokens)
assert not token_is_parsed((Token.Name, '`not a schema object`'), tokens)
|
97202
|
import logging
from typing import Optional, Text, List, Any
from pybeerxml.fermentable import Fermentable
from pybeerxml.hop import Hop
from pybeerxml.mash import Mash
from pybeerxml.misc import Misc
from pybeerxml.yeast import Yeast
from pybeerxml.style import Style
from pybeerxml.water import Water
from pybeerxml.equipment import Equipment
from pybeerxml.utils import cast_to_bool, gravity_to_plato
logger = logging.getLogger(__name__)
# pylint: disable=too-many-instance-attributes, too-many-statements, too-many-public-methods
class Recipe:
def __init__(self):
self.name: Optional[Text] = None
self.version: Optional[float] = None
self.type: Optional[Text] = None
self.brewer: Optional[Text] = None
self.asst_brewer: Optional[Text] = None
self.batch_size: Optional[float] = None
self.boil_time: Optional[float] = None
self.boil_size: Optional[float] = None
self.efficiency: Optional[float] = None
self.notes: Optional[Text] = None
self.taste_notes: Optional[Text] = None
self.taste_rating: Optional[float] = None
self.fermentation_stages: Optional[Text] = None
self.primary_age: Optional[float] = None
self.primary_temp: Optional[float] = None
self.secondary_age: Optional[float] = None
self.secondary_temp: Optional[float] = None
self.tertiary_age: Optional[float] = None
self.tertiary_temp: Optional[float] = None
self.carbonation: Optional[float] = None
self.carbonation_temp: Optional[float] = None
self.age: Optional[float] = None
self.age_temp: Optional[float] = None
self.date: Optional[float] = None
self.carbonation: Optional[float] = None
self._forced_carbonation: Optional[bool] = None
self.priming_sugar_name: Optional[float] = None
self.carbonation_temp: Optional[float] = None
self.priming_sugar_equiv: Optional[Text] = None
self.keg_priming_factor: Optional[float] = None
# Recipe extension fields
self.est_og: Optional[float] = None
self.est_fg: Optional[float] = None
self.est_color: Optional[float] = None
self.ibu_method: Optional[Text] = None
self.est_abv: Optional[float] = None
self.actual_efficiency: Optional[float] = None
self.calories: Optional[float] = None
self.carbonation_used: Optional[Text] = None
# Values from the recipe, which are calculated as a fallback
self._abv: Optional[float] = None
self._og: Optional[float] = None
self._fg: Optional[float] = None
self._ibu: Optional[float] = None
self._color: Optional[float] = None
self.style: Optional[Style] = None
self.hops: List[Hop] = []
self.yeasts: List[Yeast] = []
self.fermentables: List[Fermentable] = []
self.miscs: List[Misc] = []
self.mash: Optional[Mash] = None
self.waters: List[Water] = []
self.equipment: Optional[Equipment] = None
@property
def abv(self):
if self._abv is not None:
return self._abv
logger.debug("The value for ABV has been calculated from OG and FG")
return self.abv_calculated
@abv.setter
def abv(self, value):
self._abv = value
@property
def abv_calculated(self):
return (
((1.05 * (self.og_calculated - self.fg_calculated)) / self.fg_calculated)
/ 0.79
* 100.0
)
@abv_calculated.setter
def abv_calculated(self, value):
pass
# Gravity degrees plato approximations
@property
def og_plato(self):
return gravity_to_plato(self.og)
@og_plato.setter
def og_plato(self, value):
pass
@property
def og_calculated_plato(self):
return gravity_to_plato(self.og_calculated)
@og_calculated_plato.setter
def og_calculated_plato(self, value):
pass
@property
def fg_plato(self):
return gravity_to_plato(self.fg)
@fg_plato.setter
def fg_plato(self, value):
pass
@property
def fg_calculated_plato(self):
return gravity_to_plato(self.fg_calculated)
@fg_calculated_plato.setter
def fg_calculated_plato(self, value):
pass
@property
def ibu(self):
if self._ibu is not None:
return self._ibu
logger.debug(
"The value for IBU has been calculated from the hop bill using Tinseth's formula"
)
return self.ibu_calculated
@ibu.setter
def ibu(self, value):
self._ibu = value
@property
def ibu_calculated(self):
ibu_method = "tinseth"
_ibu = 0.0
for hop in self.hops:
if hop.alpha and hop.use.lower() == "boil":
_ibu += hop.bitterness(ibu_method, self.og_calculated, self.batch_size)
return _ibu
@ibu_calculated.setter
def ibu_calculated(self, value):
pass
# pylint: disable=invalid-name
@property
def og(self):
if self._og is not None:
return self._og
logger.debug("The value for OG has been calculated from the mashing steps")
return self.og_calculated
@og.setter
def og(self, value):
self._og = value
@property
def og_calculated(self):
_og = 1.0
steep_efficiency = 50
mash_efficiency = 75
# Calculate gravities and color from fermentables
for fermentable in self.fermentables:
addition = fermentable.addition
if addition == "steep":
efficiency = steep_efficiency / 100.0
elif addition == "mash":
efficiency = mash_efficiency / 100.0
else:
efficiency = 1.0
# Update gravities
gu = fermentable.gu(self.batch_size) * efficiency
gravity = gu / 1000.0
_og += gravity
return _og
@og_calculated.setter
def og_calculated(self, value):
pass
# pylint: disable=invalid-name
@property
def fg(self):
if self._fg is not None:
return self._fg
logger.debug("The value for FG has been calculated from OG and yeast")
return self.fg_calculated
@fg.setter
def fg(self, value):
self._fg = value
@property
def fg_calculated(self):
_fg = 0
attenuation = 0
# Get attenuation for final gravity
for yeast in self.yeasts:
if yeast.attenuation is not None and yeast.attenuation > attenuation:
attenuation = yeast.attenuation
if attenuation == 0:
attenuation = 75.0
_fg = self.og_calculated - ((self.og_calculated - 1.0) * attenuation / 100.0)
return _fg
@fg_calculated.setter
def fg_calculated(self, value):
pass
@property
def color(self):
if self._color is not None:
return self._color
logger.debug(
"The value for color has been calculated from fermentables using the Morey Equation"
)
return self.color_calculated
@color.setter
def color(self, value):
self._color = value
@property
def color_calculated(self):
# Formula source: http://brewwiki.com/index.php/Estimating_Color
mcu = 0.0
for fermentable in self.fermentables:
if fermentable.amount is not None and fermentable.color is not None:
# 8.3454 is conversion factor from kg/L to lb/gal
mcu += fermentable.amount * fermentable.color * 8.3454 / self.batch_size
return 1.4922 * (mcu ** 0.6859)
@color_calculated.setter
def color_calculated(self, value):
pass
@property
def forced_carbonation(self):
return self._forced_carbonation
@forced_carbonation.setter
def forced_carbonation(self, value: Any):
self._forced_carbonation = cast_to_bool(value)
|
97204
|
load("@com_github_reboot_dev_pyprotoc_plugin//:rules.bzl", "create_protoc_plugin_rule")
cc_eventuals_library = create_protoc_plugin_rule(
"@com_github_3rdparty_eventuals_grpc//protoc-gen-eventuals:protoc-gen-eventuals", extensions=(".eventuals.h", ".eventuals.cc")
)
|
97252
|
import unittest
from brume.config import Config
class TestConfig(unittest.TestCase):
"""Test for brume.Config."""
def test_load(self):
"""A configuration file can be loaded."""
conf = Config.load()
assert conf['region'] == 'eu-west-1'
assert isinstance(conf['stack'], dict)
assert isinstance(conf['templates'], dict)
if __name__ == '__main__':
unittest.main()
|
97279
|
from fpds import FIELDS, CALCULATED_FIELDS
import os.path
from django.db import connection
from django.db import transaction
import re
class Loader():
def fields(self):
return [ x[0] for x in FIELDS ] + [ x[0] for x in CALCULATED_FIELDS ]
def sql_str(self, infile):
table = 'usaspending_contract'
return self.sql_template_postgres(infile, table, self.fields())
def print_sql(self, infile):
print self.sql_str(infile)
def sql_template_postgres(self, file_, table, fields):
fy = re.findall('\d{4}', file_)[0]
table = table + '_' + fy
return """
copy {1} \
({2}) \
FROM '{0}' \
DELIMITER '|' \
CSV QUOTE '"' \
NULL 'NULL' \
""".format(os.path.abspath(file_), table, ', '.join(fields))
@transaction.commit_on_success
def insert_fpds(self, infile):
sql = self.sql_str(infile)
cursor = connection.cursor()
cursor.execute(sql);
|
97335
|
import diffractsim
diffractsim.set_backend("CPU") #Change the string to "CUDA" to use GPU acceleration
from diffractsim import PolychromaticField,ApertureFromImage, cf, mm, cm
F = PolychromaticField(
spectrum=1.5 * cf.illuminant_d65,
extent_x=20 * mm,
extent_y=20 * mm,
Nx=1600,
Ny=1600,
)
F.add(ApertureFromImage("./apertures/diffraction_text.jpg", image_size=(15 * mm, 15 * mm), simulation = F))
F.propagate(z=150*cm)
rgb = F.get_colors()
F.plot_colors(rgb, xlim=[-10*mm, 10*mm], ylim=[-10*mm, 10*mm])
|
97365
|
import logging
logging.basicConfig(
format="%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s",
datefmt="%Y-%m-%d:%H:%M:%S",
level=logging.DEBUG,
)
logger = logging.getLogger(__name__)
import os
import uuid
import glob
import json
import time
import shlex
import shutil
from datetime import datetime
from diskcache import Deque, Index
import sys
from example import example
from . import QUEUE_DIR, QUEUE_NAME
# En variable to configure allowed origins
ALLOWED_ORIGINS = os.getenv("ALLOWED_ORIGINS", "*")
PREDICTION_LOOP_SLEEP = float(os.getenv("PREDICTION_LOOP_SLEEP", "0.06"))
MANAGER_LOOP_SLEEP = float(os.getenv("MANAGER_LOOP_SLEEP", "8"))
_request_queue = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.request_queue")
_results_index = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.results_index")
_log_index = os.path.join(QUEUE_DIR, f"{QUEUE_NAME}.log_index")
_htmls_dir = os.path.join(QUEUE_DIR, ".htmls")
REQUEST_QUEUE = Deque(directory=_request_queue)
RESULTS_INDEX = Index(_results_index)
LOG_INDEX = Index(_log_index)
logger.info(
f"REQUEST_QUEUE: {_request_queue} RESULTS_INDEX: {_results_index} LOG_INDEX: {_log_index} _htmls_dir: {_htmls_dir}"
)
# clear if not
# No real use in making these configurable.
# Number of gunicorn workers to use
# Keep 0 for auto selection
WORKERS = int(os.getenv("WORKERS", "0"))
TIMEOUT = int(os.getenv("TIMEOUT", "120"))
# if BATCH_SIZE is not 0, will be used as default batch size.
BATCH_SIZE = int(os.getenv("BATCH_SIZE", "0"))
# Maximum examples allowed in client batch.
# 0 means unlimited
MAX_PER_CLIENT_BATCH = int(os.getenv("MAX_PER_CLIENT_BATCH", "0"))
# The loop will wait for time_per_example * MAX_WAIT for batching.
MAX_WAIT = float(os.getenv("MAX_WAIT", 0.2))
def warmup(predictor, example_input, n=3):
"""
Run warmup prediction on the model.
:param n: number of warmup predictions to be run. defaults to 3
"""
logger.info("Warming up .. ")
for _ in range(n):
predictor(example_input)
def find_optimum_batch_sizes(
predictor,
example_input,
max_batch_search_sec=int(os.getenv("MAX_BATCH_SEARCH_SEC", "240")),
):
"""
Finds the optimum batch size for a predictor function with the given example input.
:param predictor: predictor function. Should have two inputs, a list of examples and batch size.
:param example_input: example input for the predictor.
:param max_batch_search_sec: max time to spend on batch size search in seconds.
:return batch_size: optimal batch size to be used
:return time_per_example: approx time taken per example.
"""
time_per_example = None
previous_time_per_example = pow(2, 64)
possible_batch_sizes = range(16)
if BATCH_SIZE:
possible_batch_sizes = [BATCH_SIZE]
if len(possible_batch_sizes) > 1:
possible_batch_sizes = [
pow(2, batch_size) for batch_size in possible_batch_sizes
]
search_start_time = time.time()
batch_size_to_time_per_example = {}
for b_i, batch_size in enumerate(possible_batch_sizes):
start = time.time()
if start - search_start_time >= max_batch_search_sec:
batch_size = possible_batch_sizes[b_i - 1]
logger.warn(
f"Batch size set to {batch_size} because of MAX_BATCH_SEARCH_SEC: {max_batch_search_sec}"
)
break
try:
for _ in range(3):
predictor(example_input * batch_size, batch_size=batch_size)
except Exception as ex:
logger.exception(ex, exc_info=True)
logger.warn("Batch size set to 1 because of above exception")
break
time_per_example = (time.time() - start) / (3 * batch_size)
logger.info(
f"Time per sample for batch_size: {batch_size} is {time_per_example}"
)
batch_size_to_time_per_example[batch_size] = time_per_example
LOG_INDEX[
f"META.batch_size_to_time_per_example"
] = batch_size_to_time_per_example
# determine which batch size yields the least time per example.
if time_per_example > previous_time_per_example * 0.95:
break
else:
previous_time_per_example = time_per_example
if not BATCH_SIZE:
batch_size = int(max(1, batch_size / 2))
else:
batch_size = BATCH_SIZE
logger.info(f"optimum batch size is {batch_size}")
return batch_size, time_per_example
def write_webhook(unique_id, webhook):
"""
writes webhook string (url) to corresponding file.
:param unique_id: unique_id
:param webhook: webhook string
"""
if webhook and isinstance(webhook, str):
open(os.path.join(RAM_DIR, unique_id + ".webhook"), "w").write(webhook)
else:
if webhook is not None:
logger.warn(f"id: {unique_id}, webhook: {webhook} is not valid.")
|
97378
|
import numpy as np
from scipy import special as special
from scipy.special import logsumexp
from mimo.abstraction import MixtureDistribution
from mimo.abstraction import BayesianMixtureDistribution
from mimo.distributions.bayesian import CategoricalWithDirichlet
from mimo.distributions.bayesian import CategoricalWithStickBreaking
from mimo.util.decorate import pass_obs_arg, pass_obs_and_labels_arg
from mimo.util.stats import sample_discrete_from_log
from mimo.util.data import batches
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from tqdm import tqdm
from pathos.helpers import mp
class MixtureOfGaussians(MixtureDistribution):
"""
This class is for mixtures of Gaussians.
"""
def __init__(self, gating, components):
assert len(components) > 0
assert len(components) == gating.K
self.gating = gating
self.components = components
@property
def params(self):
raise NotImplementedError
@property
def nb_params(self):
return sum(c.nb_params for c in self.components) + self.gating.nb_params
@property
def size(self):
return len(self.components)
@property
def dim(self):
return self.components[0].dim
def rvs(self, size=1):
z = self.gating.rvs(size)
counts = np.bincount(z, minlength=self.size)
obs = np.zeros((size, self.dim))
for idx, (c, count) in enumerate(zip(self.components, counts)):
obs[z == idx, ...] = c.rvs(count)
perm = np.random.permutation(size)
obs, z = obs[perm], z[perm]
return obs, z
def log_likelihood(self, obs):
assert isinstance(obs, (np.ndarray, list))
if isinstance(obs, list):
return [self.log_likelihood(_obs) for _obs in obs]
else:
scores = self.log_scores(obs)
return logsumexp(scores[~np.isnan(obs).any(axis=1)], axis=1)
# Expectation-Maximization
def log_scores(self, obs):
N, K = obs.shape[0], self.size
# update, see Eq. 10.67 in Bishop
component_scores = np.empty((N, K))
for idx, c in enumerate(self.components):
component_scores[:, idx] = c.log_likelihood(obs)
component_scores = np.nan_to_num(component_scores, copy=False)
gating_scores = self.gating.log_likelihood(np.arange(K))
score = gating_scores + component_scores
return score
def scores(self, obs):
logr = self.log_scores(obs)
score = np.exp(logr - np.max(logr, axis=1, keepdims=True))
score /= np.sum(score, axis=1, keepdims=True)
return score
def max_likelihood(self, obs, maxiter=1, progprint=True):
current = mp.current_process()
if len(current._identity) > 0:
pos = current._identity[0] - 1
else:
pos = 0
obs = obs if isinstance(obs, list) else [obs]
elbo = []
with tqdm(total=maxiter, desc=f'EM #{pos + 1}',
position=pos, disable=not progprint) as pbar:
for _ in range(maxiter):
# Expectation step
scores = [self.scores(_obs) for _obs in obs]
# Maximization step
for idx, c in enumerate(self.components):
c.max_likelihood([_obs for _obs in obs],
[_score[:, idx] for _score in scores])
# mixture weights
self.gating.max_likelihood(None, scores)
elbo.append(np.sum(self.log_likelihood(obs)))
pbar.update(1)
return elbo
def plot(self, obs=None, color=None, legend=False, alpha=None):
obs = obs if isinstance(obs, list) else [obs]
import matplotlib.pyplot as plt
from matplotlib import cm
artists = []
# get colors
cmap = cm.get_cmap('RdBu')
if color is None:
label_colors = dict((idx, cmap(v)) for idx, v in
enumerate(np.linspace(0, 1, self.size, endpoint=True)))
else:
label_colors = dict((idx, color) for idx in range(self.size))
labels = []
for _obs in obs:
labels.append(np.argmax(self.scores(_obs), axis=1))
# plot data scatter
for _obs, _label in zip(obs, labels):
colorseq = [label_colors[l] for l in _label]
artists.append(plt.scatter(_obs[:, 0], _obs[:, 1], c=colorseq, marker='+'))
# plot parameters
axis = plt.axis()
for label, (c, w) in enumerate(zip(self.components, self.gating.probs)):
artists.extend(c.plot(color=label_colors[label], label='%d' % label,
alpha=min(0.25, 1. - (1. - w) ** 2) / 0.25
if alpha is None else alpha))
plt.axis(axis)
# add legend
if legend and color is None:
plt.legend([plt.Rectangle((0, 0), 1, 1, fc=c)
for i, c in label_colors.items() if i in self.used_labels],
[i for i in label_colors if i in self.used_labels], loc='best', ncol=2)
plt.show()
return artists
class BayesianMixtureOfGaussians(BayesianMixtureDistribution):
"""
This class is for a Bayesian mixtures of Gaussians.
"""
def __init__(self, gating, components):
assert len(components) > 0
self.gating = gating
self.components = components
self.likelihood = MixtureOfGaussians(gating=self.gating.likelihood,
components=[c.likelihood for c in self.components])
self.obs = []
self.labels = []
self.whitend = False
self.transform = None
@property
def used_labels(self):
assert self.has_data()
label_usages = sum(np.bincount(_label, minlength=self.likelihood.size)
for _label in self.labels)
used_labels, = np.where(label_usages > 0)
return used_labels
def add_data(self, obs, whiten=False,
transform_type='PCA',
labels_from_prior=False):
obs = obs if isinstance(obs, list) else [obs]
if whiten:
self.whitend = True
data = np.vstack([_obs for _obs in obs])
if transform_type == 'PCA':
self.transform = PCA(n_components=data.shape[-1], whiten=True)
elif transform_type == 'Standard':
self.transform = StandardScaler()
elif transform_type == 'MinMax':
self.transform = MinMaxScaler((-1., 1.))
else:
raise NotImplementedError
self.transform.fit(data)
for _obs in obs:
self.obs.append(self.transform.transform(_obs))
else:
self.obs = obs
if labels_from_prior:
for _obs in self.obs:
self.labels.append(self.likelihood.gating.rvs(len(_obs)))
else:
self.labels = self._resample_labels(self.obs)
def clear_data(self):
self.obs.clear()
self.labels.clear()
def clear_transform(self):
self.whitend = False
self.transform = None
def has_data(self):
return len(self.obs) > 0
# Expectation-Maximization
@pass_obs_arg
def max_aposteriori(self, obs, maxiter=1, progprint=True):
current = mp.current_process()
if len(current._identity) > 0:
pos = current._identity[0] - 1
else:
pos = 0
obs = obs if isinstance(obs, list) else [obs]
with tqdm(total=maxiter, desc=f'MAP #{pos + 1}',
position=pos, disable=not progprint) as pbar:
for i in range(maxiter):
# Expectation step
scores = []
for _obs in obs:
scores.append(self.likelihood.scores(_obs))
# Maximization step
for idx, c in enumerate(self.components):
c.max_aposteriori([_obs for _obs in obs],
[_score[:, idx] for _score in scores])
# mixture weights
self.gating.max_aposteriori(None, scores)
pbar.update(1)
# Gibbs sampling
@pass_obs_and_labels_arg
def resample(self, obs=None, labels=None,
maxiter=1, progprint=True):
current = mp.current_process()
if len(current._identity) > 0:
pos = current._identity[0] - 1
else:
pos = 0
with tqdm(total=maxiter, desc=f'Gibbs #{pos + 1}',
position=pos, disable=not progprint) as pbar:
for _ in range(maxiter):
self._resample_components(obs, labels)
self._resample_gating(labels)
labels = self._resample_labels(obs)
if self.has_data():
self.labels = labels
pbar.update(1)
def _resample_components(self, obs, labels):
for idx, c in enumerate(self.components):
c.resample(data=[_obs[_label == idx]
for _obs, _label in zip(obs, labels)])
def _resample_gating(self, labels):
self.gating.resample([_label for _label in labels])
def _resample_labels(self, obs):
labels = []
for _obs in obs:
score = self.likelihood.log_scores(_obs)
labels.append(sample_discrete_from_log(score, axis=1))
return labels
# Mean Field
def expected_scores(self, obs):
N, K = obs.shape[0], self.likelihood.size
# update, see Eq. 10.67 in Bishop
component_scores = np.empty((N, K))
for idx, c in enumerate(self.components):
component_scores[:, idx] = c.posterior.expected_log_likelihood(obs)
component_scores = np.nan_to_num(component_scores, copy=False)
if isinstance(self.gating, CategoricalWithDirichlet):
gating_scores = self.gating.posterior.expected_statistics()
elif isinstance(self.gating, CategoricalWithStickBreaking):
E_log_stick, E_log_rest = self.gating.posterior.expected_statistics()
gating_scores = E_log_stick + np.hstack((0, np.cumsum(E_log_rest)[:-1]))
else:
raise NotImplementedError
logr = gating_scores + component_scores
r = np.exp(logr - np.max(logr, axis=1, keepdims=True))
r /= np.sum(r, axis=1, keepdims=True)
return r
def meanfield_coordinate_descent(self, tol=1e-2, maxiter=250, progprint=True):
elbo = []
current = mp.current_process()
if len(current._identity) > 0:
pos = current._identity[0] - 1
else:
pos = 0
with tqdm(total=maxiter, desc=f'VI #{pos + 1}',
position=pos, disable=not progprint) as pbar:
for i in range(maxiter):
elbo.append(self.meanfield_update())
if elbo[-1] is not None and len(elbo) > 1:
if elbo[-1] < elbo[-2]:
print('WARNING: ELBO should always increase')
return elbo
if (elbo[-1] - elbo[-2]) < tol:
return elbo
pbar.update(1)
# print('WARNING: meanfield_coordinate_descent hit maxiter of %d' % maxiter)
return elbo
@pass_obs_arg
def meanfield_update(self, obs=None):
scores, labels = self._meanfield_update_sweep(obs)
if self.has_data():
self.labels = labels
return self.variational_lowerbound(obs, scores)
def _meanfield_update_sweep(self, obs):
scores, z = self._meanfield_update_labels(obs)
self._meanfield_update_parameters(obs, scores)
return scores, z
def _meanfield_update_labels(self, obs):
scores, labels = [], []
for _obs in obs:
scores.append(self.expected_scores(_obs))
labels.append(np.argmax(scores[-1], axis=1))
return scores, labels
def _meanfield_update_parameters(self, obs, scores):
self._meanfield_update_components(obs, scores)
self._meanfield_update_gating(scores)
def _meanfield_update_gating(self, scores):
self.gating.meanfield_update(None, scores)
def _meanfield_update_components(self, obs, scores):
for idx, c in enumerate(self.components):
c.meanfield_update([_obs for _obs in obs],
[_score[:, idx] for _score in scores])
# SVI
def meanfield_stochastic_descent(self, stepsize=1e-3, batchsize=128,
maxiter=500, progprint=True):
assert self.has_data()
current = mp.current_process()
if len(current._identity) > 0:
pos = current._identity[0] - 1
else:
pos = 0
prob = batchsize / float(sum(len(_obs) for _obs in self.obs))
with tqdm(total=maxiter, desc=f'SVI #{pos + 1}',
position=pos, disable=not progprint) as pbar:
for _ in range(maxiter):
for _obs in self.obs:
for batch in batches(batchsize, len(_obs)):
self.meanfield_sgdstep(_obs[batch, :], prob, stepsize)
pbar.update(1)
def meanfield_sgdstep(self, obs, prob, stepsize):
obs = obs if isinstance(obs, list) else [obs]
scores, _ = self._meanfield_update_labels(obs)
self._meanfield_sgdstep_parameters(obs, scores, prob, stepsize)
if self.has_data():
for _obs in self.obs:
self.labels.append(np.argmax(self.expected_scores(_obs), axis=1))
def _meanfield_sgdstep_parameters(self, obs, scores, prob, stepsize):
self._meanfield_sgdstep_components(obs, scores, prob, stepsize)
self._meanfield_sgdstep_gating(scores, prob, stepsize)
def _meanfield_sgdstep_components(self, obs, scores, prob, stepsize):
for idx, c in enumerate(self.components):
c.meanfield_sgdstep([_obs for _obs in obs],
[_score[:, idx] for _score in scores], prob, stepsize)
def _meanfield_sgdstep_gating(self, scores, prob, stepsize):
self.gating.meanfield_sgdstep(None, scores, prob, stepsize)
def _variational_lowerbound_labels(self, scores):
vlb = 0.
if isinstance(self.gating, CategoricalWithDirichlet):
vlb += np.sum(scores * self.gating.posterior.expected_log_likelihood())
elif isinstance(self.gating, CategoricalWithStickBreaking):
cumscores = np.hstack((np.cumsum(scores[:, ::-1], axis=1)[:, -2::-1],
np.zeros((len(scores), 1))))
E_log_stick, E_log_rest = self.gating.posterior.expected_log_likelihood()
vlb += np.sum(scores * E_log_stick + cumscores * E_log_rest)
errs = np.seterr(invalid='ignore', divide='ignore')
vlb -= np.nansum(scores * np.log(scores)) # treats nans as zeros
np.seterr(**errs)
return vlb
def _variational_lowerbound_obs(self, obs, scores):
return np.sum([r.dot(c.posterior.expected_log_likelihood(obs))
for c, r in zip(self.components, scores.T)])
def variational_lowerbound(self, obs, scores):
vlb = 0.
vlb += sum(self._variational_lowerbound_labels(_score) for _score in scores)
vlb += self.gating.variational_lowerbound()
vlb += sum(c.variational_lowerbound() for c in self.components)
vlb += sum(self._variational_lowerbound_obs(_obs, _score)
for _obs, _score in zip(obs, scores))
# add in symmetry factor (if we're actually symmetric)
if len(set(type(c) for c in self.components)) == 1:
vlb += special.gammaln(self.likelihood.size + 1)
return vlb
# Misc
def bic(self, obs=None):
assert obs is not None
return - 2. * np.sum(self.likelihood.log_likelihood(obs)) + self.likelihood.nb_params\
* np.log(sum([_obs.shape[0] for _obs in obs]))
def aic(self):
assert self.has_data()
return 2. * self.likelihood.nb_params - 2. * sum(np.sum(self.likelihood.log_likelihood(_obs))
for _obs in self.obs)
@pass_obs_arg
def plot(self, obs=None, color=None, legend=False, alpha=None):
# I haven't implemented plotting
# for whitend data, it's a hassle :D
assert self.whitend is False
artists = self.likelihood.plot(obs, color, legend, alpha)
return artists
|
97379
|
class SearchParams(object):
def __init__(self):
self.maxTweets = 0
def set_username(self, username):
self.username = username
return self
def set_since(self, since):
self.since = since
return self
def set_until(self, until):
self.until = until
return self
def set_search(self, query_search):
self.querySearch = query_search
return self
def set_max_tweets(self, max_tweets):
self.maxTweets = max_tweets
return self
def set_lang(self, lang):
self.lang = lang
return self
|
97386
|
import os
import time
import scipy
import numpy as np
import soundfile as sf
def mel_scale(freq):
return 1127.0 * np.log(1.0 + float(freq)/700)
def inv_mel_scale(mel_freq):
return 700 * (np.exp(float(mel_freq)/1127) - 1)
class MelBank(object):
def __init__(self,
low_freq=20,
high_freq=8000,
num_bins=80,
sample_freq=16000,
frame_size=32):
self.low_freq = low_freq
self.high_freq = high_freq
self.num_bins = num_bins
self.sample_freq = sample_freq
self.frame_size = frame_size
# frame_size in millisecond
self.window_size = self.sample_freq * 0.001 * self.frame_size
self.fft_freqs = np.linspace(
0, self.sample_freq / 2, self.window_size / 2 + 1)[:-1]
self.mel_low_freq = mel_scale(self.low_freq)
self.mel_high_freq = mel_scale(self.high_freq)
mel_freqs = np.linspace(
self.mel_low_freq, self.mel_high_freq, self.num_bins+2)
self.mel_windows = [mel_freqs[i:i+3] for i in xrange(self.num_bins)]
def _weight(mel_window, mel_freq):
mel_low, mel_center, mel_high = mel_window
if mel_freq > mel_low and mel_freq < mel_high:
if mel_freq <= mel_center:
return (mel_freq - mel_low) / (mel_center - mel_low)
else:
return (mel_high - mel_freq) / (mel_high - mel_center)
else:
return 0
self.mel_banks = [[_weight(window, mel_scale(freq)) \
for freq in self.fft_freqs] for window in self.mel_windows]
self.center_freqs = [inv_mel_scale(mel_freq) \
for mel_freq in mel_freqs[1:-1]]
def hann(n):
"""
n : length of the window
"""
w=np.zeros(n)
for x in xrange(n):
w[x] = 0.5*(1 - np.cos(2*np.pi*x/n))
return w
def stft_index(wave, frame_size_n, frame_starts_n, fft_size=None, win=None):
"""
wave : 1-d float array
frame_size_n : number of samples in each frame
frame_starts_n : a list of int denoting starting sample index of each frame
fft_size : number of frequency bins
win : windowing function on amplitude; len(win) == frame_size_n
"""
wave = np.asarray(wave)
frame_starts_n = np.int32(frame_starts_n)
if fft_size is None:
fft_size = frame_size_n
if win is None:
win = np.sqrt(hann(frame_size_n))
# sanity check
if not wave.ndim == 1:
raise ValueError('wave is not mono')
elif not frame_starts_n.ndim == 1:
raise ValueError('frame_starts_n is not 1-d')
elif not len(win) == frame_size_n:
raise ValueError('win does not match frame_starts_n (%s != %s)', len(win), frame_size_n)
elif fft_size % 2 == 1:
raise ValueError('odd ffts not yet implemented')
elif np.min(frame_starts_n) < 0 or np.max(frame_starts_n) > wave.shape[0]-frame_size_n:
raise ValueError('Your starting indices contain values outside the allowed range')
spec = np.asarray([scipy.fft(wave[n:n+frame_size_n]*win, n=fft_size)[:fft_size/2+1] \
for n in frame_starts_n])
return spec
def istft_index(spec, frame_size_n, frame_starts_n, fft_size=None, win=None, awin=None):
"""
spec : 1-d complex array
frame_size_n : number of samples in each frame
frame_starts_n : a list of int denoting starting sample index of each frame
fft_size : number of frequency bins
win : windowing function on spectrogram; len(win) == frame_size_n
awin : original windowing function on amplitude; len(win) == frame_size_n
"""
frame_starts_n = np.int32(frame_starts_n)
if fft_size is None:
fft_size = frame_size_n
if win is None:
win=np.sqrt(hann(frame_size_n))
if awin is None:
awin=np.sqrt(hann(frame_size_n))
pro_win = win * awin
# sanity check
if not frame_starts_n.ndim == 1:
raise ValueError('frame_starts_n is not 1-d')
elif not len(win) == frame_size_n:
raise ValueError('win does not match frame_starts_n (%s != %s)', len(win), frame_size_n)
elif not len(awin) == frame_size_n:
raise ValueError('awin does not match frame_starts_n (%s != %s)', len(win), frame_size_n)
elif spec.shape[0] < frame_starts_n.shape[0]:
raise ValueError('Number of frames in the spectrogram cannot be \
less than the size of frame starts')
N = frame_starts_n[-1] + frame_size_n
signal = np.zeros(N)
normalizer = np.zeros(N, dtype=np.float32)
n_range = np.arange(frame_size_n)
for i, n_offset in enumerate(frame_starts_n):
frames = np.real(scipy.ifft(np.concatenate((spec[i], spec[i][-2:0:-1].conjugate())),
n=fft_size))[:frame_size_n]
signal[n_offset+n_range] += frames * win
normalizer[n_offset+n_range] += pro_win
nonzero = np.where(normalizer>0)
rest = np.where(normalizer<=0)
signal[nonzero] = signal[nonzero]/normalizer[nonzero]
signal[rest] = 0
return signal
def comp_spec_image(wave, decom, frame_size_n, shift_size_n, fft_size, awin, log_floor):
"""
RETURN:
float matrix of shape (2, T, F)
"""
frame_starts_n = np.arange(0, wave.shape[0]-frame_size_n, step=shift_size_n)
spec = stft_index(wave, frame_size_n, frame_starts_n, fft_size, awin)
if decom == "mp":
phase = np.angle(spec)
dbmag = np.log10(np.absolute(spec))
# print("max amplitude %s, max magnitude %s, max phase %s" % (
# np.max(wave), np.max(np.absolute(spec)), np.max(phase)))
dbmag[dbmag < log_floor] = log_floor
dbmag = 20 * dbmag
spec_image = np.concatenate([dbmag[None,...], phase[None,...]], axis=0)
elif decom == "ri":
real = np.real(spec)
imag = np.imag(spec)
# print("max amplitude %s, max real %s, max imag %s" % (
# np.max(wave), np.max(np.absolute(real)), np.max(np.absolute(imag))))
spec_image = np.concatenate([real[None,...], imag[None,...]], axis=0)
else:
raise ValueError("decomposition type %s not supported" % decom)
return spec_image
def est_phase_from_mag_spec(
mag_spec, frame_size_n, shift_size_n, fft_size,
awin=None, k=1000, min_avg_diff=1e-9, debug=False):
"""
for quality min_avg_diff 1e-9 is recommended
mag_spec - magnitude spectrogram (in linear) of shape (n_time, n_frequency)
"""
start_time = time.time()
debug_x = []
frame_starts_n = np.arange(len(mag_spec)) * shift_size_n
# initialize with white noise
# wave_len = frame_starts_n[-1] + frame_size_n + 1
# x = np.random.normal(0, 1, size=(wave_len))
X_phase = None
X = mag_spec * np.exp(1j * np.random.uniform(-np.pi, np.pi, mag_spec.shape))
x = istft_index(X, frame_size_n, frame_starts_n, fft_size, awin, awin)
for i in xrange(k):
X_phase = np.angle(stft_index(x, frame_size_n, frame_starts_n, fft_size, awin))
X = mag_spec * np.exp(1j * X_phase)
new_x = istft_index(X, frame_size_n, frame_starts_n, fft_size, awin, awin)
avg_diff = np.mean((x - new_x)**2)
x = new_x
if avg_diff < min_avg_diff:
break
if debug and i % 100 == 0:
print "done %s iterations, avg_diff is %s" % (i, avg_diff)
debug_x.append(x)
if debug:
print "time elapsed = %.2f" % (time.time() - start_time)
return X_phase, debug_x
def convert_to_complex_spec(
X, X_phase, decom, phase_type, add_dc=False, est_phase_opts=None):
"""
X/X_phase - matrix of shape (..., n_channel, n_time, n_frequency)
decom - `mp`: magnitude (in dB) / phase (in rad) decomposition
`ri`: real / imaginary decomposition
phase_type - `true`: X's n_channel = 2
`oracle`: use oracle phase X_phase
`zero`: use zero matrix as the phase matrix for X
`rand`: use random matrix as the phase matrix for X
`est`: estimate the phase from magnitude spectrogram
est_phase_opts - arguments for est_phase_from_mag_spec
complex_X is [..., t, f]
"""
X, X_phase = np.asarray(X), np.asarray(X_phase)
if X.shape[-3] != 1 and X.shape[-3] != 2:
raise ValueError("X's n_channel must be 1 or 2 (%s)" % X.shape[-3])
if np.any(np.iscomplex(X)):
raise ValueError("X should not be complex")
if np.any(np.iscomplex(X_phase)):
raise ValueError("X_phase should not be complex")
if add_dc:
X_dc = np.zeros(X.shape[:-1] + (1,))
X = np.concatenate([X_dc, X], axis=-1)
if X_phase:
X_phase_dc = np.zeros(X_phase.shape[:-1] + (1,))
X_phase = np.concatenate([X_phase_dc, X_phase], axis=-1)
if decom == "mp":
X_lin_mag = 10 ** (X[..., 0, :, :] / 20)
if phase_type == "true" and X.shape[-3] != 2:
raise ValueError("X should have 2 channels for phase_type %s" % (
phase_type,) + " (X shape is %s)" % (X.shape,))
X_phase = X[..., 1, :, :]
else:
if X.shape[-3] != 1:
print("WARNING: ignoring X's second channel (phase)")
if phase_type == "oracle":
if X_phase is None:
raise ValueError("X_phase shape %s invalid for phase_type %s" % (
X_phase.shape, phase_type))
elif phase_type == "zero":
X_phase = np.zeros_like(X_lin_mag)
elif phase_type == "rand":
X_phase = np.random.uniform(-np.pi, np.pi, X_lin_mag.shape)
elif phase_type == "est":
X_phase, _ = est_phase_from_mag_spec(X_lin_mag, debug=True, **est_phase_opts)
print("X_lin_mag shape %s" % (X_lin_mag.shape,))
print("X_phase shape %s" % (X_phase.shape,))
else:
raise ValueError("invalid phase type (%s)" % phase_type)
complex_X = X_lin_mag * np.exp(1j * X_phase)
elif decom == "ri":
if phase_type != "true":
raise ValueError("invalid phase type %s. only `true` is valid" % phase_type)
complex_X = X[..., 0, :, :] + 1j * X[..., 1, :, :]
else:
raise ValueError("invalid decomposition %s (mp|ri)" % decom)
return complex_X
def complex_spec_to_audio(
complex_spec, name=None, trim=0, fs=16000,
frame_size_n=400, shift_size_n=160, fft_size=400, win=None):
assert(np.asarray(complex_spec).ndim == 2)
frame_starts_n = np.arange(len(complex_spec)) * shift_size_n
signal = istft_index(complex_spec, frame_size_n, frame_starts_n, fft_size, win, win)
if trim > 0:
signal = signal[trim:-trim]
if name is not None:
if os.path.splitext(name)[1] != ".wav":
name = name + ".wav"
sf.write(file=name, data=signal, samplerate=fs)
return signal
|
97477
|
from .base_serializer import BaseSerializer
from .binary_serializer import BinarySerializer
from .json_serializer import JsonSerializer
__all__ = [
'BaseSerializer',
'BinarySerializer',
'JsonSerializer',
]
|
97529
|
import numpy as np
from .base_likelihood import Likelihood
from scipy.special import logsumexp, softmax
from tramp.utils.linear_region import LinearRegionLikelihood
class PiecewiseLinearLikelihood(Likelihood):
def __init__(self, name, regions, y, y_name="y"):
self.y_name = y_name
self.size = self.get_size(y)
self.repr_init()
self.name = name
self.y = y
self.regions = [LinearRegionLikelihood(**region) for region in regions]
self.n_regions = len(regions)
def sample(self, Z):
X = sum(region.sample(Z) for region in self.regions)
return X
def math(self):
return r"$\textrm{" + self.name + r"}$"
def merge_estimates(self, rs, vs, As):
ps = softmax(As, axis=0)
r = sum(p*r for p, r in zip(ps, rs))
Dr = sum(
ps[i]*ps[j]*(rs[i] - rs[j])**2
for i in range(self.n_regions)
for j in range(i+1, self.n_regions)
)
v = sum(p*v for p, v in zip(ps, vs)) + Dr
v = v.mean()
return r, v
def compute_backward_posterior(self, az, bz, y):
rs = [region.backward_mean(az, bz, y) for region in self.regions]
vs = [region.backward_variance(az, bz, y) for region in self.regions]
As = [region.log_partitions(az, bz, y) for region in self.regions]
r, v = self.merge_estimates(rs, vs, As)
return r, v
def compute_log_partition(self, az, bz, y):
As = [region.log_partitions(az, bz, y) for region in self.regions]
A = logsumexp(As, axis=0)
return A.sum()
def beliefs_measure(self, az, tau_z, f):
mu = sum(
region.beliefs_measure(az, tau_z, f) for region in self.regions
)
return mu
def measure(self, y, f):
mu = sum(region.measure(y, f) for region in self.regions)
return mu
class SgnLikelihood(PiecewiseLinearLikelihood):
def __init__(self, y, y_name="y"):
neg = dict(zmin=-np.inf, zmax=0, slope=0, x0=-1)
pos = dict(zmin=0, zmax=+np.inf, slope=0, x0=+1)
super().__init__(name="sgn", regions=[pos, neg], y=y, y_name=y_name)
class AbsLikelihood(PiecewiseLinearLikelihood):
def __init__(self, y, y_name="y"):
neg = dict(zmin=-np.inf, zmax=0, slope=-1, x0=0)
pos = dict(zmin=0, zmax=+np.inf, slope=+1, x0=0)
super().__init__(name="abs", regions=[pos, neg], y=y, y_name=y_name)
class AsymmetricAbsLikelihood(PiecewiseLinearLikelihood):
def __init__(self, y, y_name="y", shift=1e-4):
neg = dict(zmin=-np.inf, zmax=shift, slope=-1, x0=0)
pos = dict(zmin=shift, zmax=+np.inf, slope=+1, x0=0)
super().__init__(name="a-abs", regions=[pos, neg], y=y, y_name=y_name)
class ReluLikelihood(PiecewiseLinearLikelihood):
def __init__(self, y, y_name="y"):
neg = dict(zmin=-np.inf, zmax=0, slope=0, x0=0)
pos = dict(zmin=0, zmax=+np.inf, slope=1, x0=0)
super().__init__(name="relu", regions=[pos, neg], y=y, y_name=y_name)
class LeakyReluLikelihood(PiecewiseLinearLikelihood):
def __init__(self, slope, y, y_name="y"):
self.slope = slope
neg = dict(zmin=-np.inf, zmax=0, slope=slope, x0=0)
pos = dict(zmin=0, zmax=np.inf, slope=1, x0=0)
super().__init__(name="l-relu", regions=[pos, neg], y=y, y_name=y_name)
class HardTanhLikelihood(PiecewiseLinearLikelihood):
def __init__(self, y, y_name="y"):
neg = dict(zmin=-np.inf, zmax=-1, slope=0, x0=-1)
mid = dict(zmin=-1, zmax=+1, slope=1, x0=0)
pos = dict(zmin=+1, zmax=+np.inf, slope=0, x0=+1)
super().__init__(
name="h-tanh", regions=[pos, mid, neg], y=y, y_name=y_name
)
class HardSigmoidLikelihood(PiecewiseLinearLikelihood):
def __init__(self, y, y_name="y"):
l = 2.5
neg = dict(zmin=-np.inf, zmax=-l, slope=0, x0=0)
mid = dict(zmin=-l, zmax=+l, slope=1/(2*l), x0=0.5)
pos = dict(zmin=l, zmax=np.inf, slope=0, x0=1)
super().__init__(
name="h-sigm", regions=[pos, mid, neg], y=y, y_name=y_name
)
class SymmetricDoorLikelihood(PiecewiseLinearLikelihood):
def __init__(self, width, y, y_name="y"):
self.width = width
neg = dict(zmin=-np.inf, zmax=-width, slope=0, x0=+1)
mid = dict(zmin=-width, zmax=+width, slope=0, x0=-1)
pos = dict(zmin=+width, zmax=+np.inf, slope=0, x0=+1)
super().__init__(
name="door", regions=[pos, mid, neg], y=y, y_name=y_name
)
|
97537
|
import collections
import contextlib
import pytest
from siosocks.exceptions import SocksException
from siosocks.protocol import SocksClient, SocksServer
from siosocks.sansio import SansIORW
class ConnectionFailed(Exception):
pass
class Node:
def __init__(self, generator):
self.generator = generator
self.receive = [None]
self.send = None
self.calls = collections.defaultdict(int)
def run(self, *, fail_connection=False):
gen_method = self.generator.send
while True:
request = gen_method(self.receive.pop(0))
gen_method = self.generator.send
method = request.pop("method")
self.calls[method] += 1
if method == "write":
data = request["data"]
if data:
self.send.append(data)
self.receive.append(None)
elif method == "connect":
if fail_connection:
gen_method = self.generator.throw
self.receive.append(ConnectionFailed("test"))
else:
self.receive.append(None)
elif method == "passthrough":
return
elif method == "read":
if not self.receive:
return
else:
raise ValueError(f"Unexpected method {method}")
def rotor(client, server, *, fail_connection=False):
nodes = collections.deque([Node(client), Node(server)])
nodes[0].send = nodes[1].receive
nodes[1].send = nodes[0].receive
while not all(n.calls["passthrough"] for n in nodes):
with contextlib.suppress(ConnectionFailed):
nodes[0].run(fail_connection=fail_connection)
nodes.rotate(1)
def test_client_bad_socks_version():
def server():
io = SansIORW(encoding="utf-8")
yield from io.read_exactly(1)
with pytest.raises(SocksException):
rotor(SocksClient("abc", 123, 6), server())
def test_client_socks4_and_auth():
def server():
io = SansIORW(encoding="utf-8")
yield from io.read_exactly(1)
with pytest.raises(SocksException):
rotor(SocksClient("abc", 123, 4, username="yoba", password="<PASSWORD>"), server())
def test_client_socks4_connection_failed():
def server():
io = SansIORW(encoding="utf-8")
version, command, port, ipv4 = yield from io.read_struct("BBH4s")
assert version == 4
assert command == 1
assert port == 123
assert ipv4 == b"\x7f\x00\x00\x01"
user_id = yield from io.read_c_string()
assert user_id == "yoba"
yield from io.connect(ipv4, port)
yield from io.write_struct("BBH4s", 0, 0x5b, 0, b"\x00" * 4)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("127.0.0.1", 123, 4, socks4_extras=dict(user_id="yoba")), server())
@pytest.mark.skip("this check removed")
def test_client_socks4_redirect_not_supported_by_port():
def server():
io = SansIORW(encoding="utf-8")
version, command, port, ipv4 = yield from io.read_struct("BBH4s")
assert version == 4
assert command == 1
assert port == 123
assert ipv4 == b"\x7f\x00\x00\x01"
user_id = yield from io.read_c_string()
assert user_id == "yoba"
yield from io.connect(ipv4, port)
yield from io.write_struct("BBH4s", 0, 0x5a, 666, b"\x00" * 4)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("127.0.0.1", 123, 4, socks4_extras=dict(user_id="yoba")), server())
@pytest.mark.skip("this check removed")
def test_client_socks4_redirect_not_supported_by_host():
def server():
io = SansIORW(encoding="utf-8")
version, command, port, ipv4 = yield from io.read_struct("BBH4s")
assert version == 4
assert command == 1
assert port == 123
assert ipv4 == b"\x7f\x00\x00\x01"
user_id = yield from io.read_c_string()
assert user_id == "yoba"
yield from io.connect(ipv4, port)
yield from io.write_struct("BBH4s", 0, 0x5a, 0, b"\x7f\x00\x00\x01")
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("127.0.0.1", 123, 4, socks4_extras=dict(user_id="yoba")), server())
def test_client_socks4_success_by_ipv4():
def server():
io = SansIORW(encoding="utf-8")
version, command, port, ipv4 = yield from io.read_struct("BBH4s")
assert version == 4
assert command == 1
assert port == 123
assert ipv4 == b"\x7f\x00\x00\x01"
user_id = yield from io.read_c_string()
assert user_id == ""
yield from io.connect(ipv4, port)
yield from io.write_struct("BBH4s", 0, 0x5a, 0, b"\x00" * 4)
yield from io.passthrough()
rotor(SocksClient("127.0.0.1", 123, 4), server())
def test_client_socks4_success_by_host():
def server():
io = SansIORW(encoding="utf-8")
version, command, port, ipv4 = yield from io.read_struct("BBH4s")
assert version == 4
assert command == 1
assert port == 123
assert ipv4 == b"\x00\x00\x00\xff"
user_id = yield from io.read_c_string()
assert user_id == ""
host = yield from io.read_c_string()
assert host == "python.org"
yield from io.connect(host, port)
yield from io.write_struct("BBH4s", 0, 0x5a, 0, b"\x00" * 4)
yield from io.passthrough()
rotor(SocksClient("python.org", 123, 4), server())
def test_server_socks4_auth_required():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write(b"\x04")
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer(username="foo"))
def test_server_socks_bad_socks_version():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write(b"\x06")
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer())
def test_server_socks_bad_socks_version_but_allowed():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write(b"\x06")
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer(allowed_versions={6}))
def test_server_socks4_unsupported_command():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("BBH4s", 4, 2, 123, b"\x7f\x00\x00\x01")
yield from io.write_c_string("yoba")
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer())
def test_server_socks4_connect_failed():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("BBH4s", 4, 1, 123, b"\x7f\x00\x00\x01")
yield from io.write_c_string("yoba")
prefix, code, port, ipv4 = yield from io.read_struct("BBH4s")
assert prefix == 0
assert code == 0x5b
assert port == 0
assert ipv4 == b"\x00" * 4
raise RuntimeError("connection failed")
with pytest.raises(SocksException):
rotor(client(), SocksServer(), fail_connection=True)
def test_server_socks4_success_by_ipv4():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("BBH4s", 4, 1, 123, b"\x7f\x00\x00\x01")
yield from io.write_c_string("yoba")
prefix, code, port, ipv4 = yield from io.read_struct("BBH4s")
assert prefix == 0
assert code == 0x5a
assert port == 0
assert ipv4 == b"\x00" * 4
yield from io.passthrough()
rotor(client(), SocksServer())
def test_server_socks4_success_by_host():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("BBH4s", 4, 1, 123, b"\x00\x00\x00\x01")
yield from io.write_c_string("yoba")
yield from io.write_c_string("python.org")
prefix, code, port, ipv4 = yield from io.read_struct("BBH4s")
assert prefix == 0
assert code == 0x5a
assert port == 0
assert ipv4 == b"\x00" * 4
yield from io.passthrough()
rotor(client(), SocksServer())
def test_client_socks5_request_auth_bad_version():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert version == 5
assert one == 1
assert auth_method == 0
yield from io.write_struct("BB", 1, 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("abc", 666, 5), server())
def test_client_socks5_request_auth_not_accepted():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert version == 5
assert one == 1
assert auth_method == 0
yield from io.write_struct("BB", 5, 1)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("abc", 666, 5), server())
def test_client_socks5_request_auth_username_bad_auth_version():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert version == 5
assert one == 1
assert auth_method == 2
yield from io.write_struct("BB", 5, 2)
auth_version = yield from io.read_struct("B")
assert auth_version == 1
username = yield from io.read_pascal_string()
password = yield from io.read_pascal_string()
assert username == "yoba"
assert password == "<PASSWORD>"
yield from io.write_struct("BB", 0, 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("abc", 666, 5, username="yoba", password="<PASSWORD>"), server())
def test_client_socks5_request_auth_username_failed():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert version == 5
assert one == 1
assert auth_method == 2
yield from io.write_struct("BB", 5, 2)
auth_version = yield from io.read_struct("B")
assert auth_version == 1
username = yield from io.read_pascal_string()
password = yield from io.read_pascal_string()
assert username == "yoba"
assert password == "<PASSWORD>"
yield from io.write_struct("BB", 1, 1)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("abc", 666, 5, username="yoba", password="<PASSWORD>"), server())
def test_client_socks5_command_bad_version():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert (version, one, auth_method) == (5, 1, 0)
yield from io.write_struct("BB", 5, 0)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x7f\x00\x00\x01", 666)
yield from io.write_struct("4B", 6, 0, 0, 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("127.0.0.1", 666, 5), server())
def test_client_socks5_command_request_not_granted():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert (version, one, auth_method) == (5, 1, 0)
yield from io.write_struct("BB", 5, 0)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x7f\x00\x00\x01", 666)
yield from io.write_struct("4B", 5, 1, 0, 1)
yield from io.write(b"\x00" * 4)
yield from io.write_struct("H", 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("127.0.0.1", 666, 5), server())
@pytest.mark.skip("this check removed")
def test_client_socks5_command_redirect_is_not_allowed():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert (version, one, auth_method) == (5, 1, 0)
yield from io.write_struct("BB", 5, 0)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x7f\x00\x00\x01", 666)
yield from io.write_struct("4B", 5, 0, 0, 1)
yield from io.write(b"\x00" * 4)
yield from io.write_struct("H", 1)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(SocksClient("127.0.0.1", 666, 5), server())
def test_client_socks5_success_ipv4():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert (version, one, auth_method) == (5, 1, 0)
yield from io.write_struct("BB", 5, 0)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x7f\x00\x00\x01", 666)
yield from io.write_struct("4B", 5, 0, 0, 1)
yield from io.write(b"\x00" * 4)
yield from io.write_struct("H", 0)
yield from io.passthrough()
rotor(SocksClient("127.0.0.1", 666, 5), server())
def test_client_socks5_success_ipv6():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert (version, one, auth_method) == (5, 1, 0)
yield from io.write_struct("BB", 5, 0)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 4)
ipv6, port = yield from io.read_struct("16sH")
assert (ipv6, port) == ((b"\x00" * 15) + b"\x01", 666)
yield from io.write_struct("4B", 5, 0, 0, 1)
yield from io.write(b"\x00" * 4)
yield from io.write_struct("H", 0)
yield from io.passthrough()
rotor(SocksClient("::1", 666, 5), server())
def test_client_socks5_success_domain():
def server():
io = SansIORW(encoding="utf-8")
version, one, auth_method = yield from io.read_struct("BBB")
assert (version, one, auth_method) == (5, 1, 0)
yield from io.write_struct("BB", 5, 0)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 3)
domain = yield from io.read_pascal_string()
port = yield from io.read_struct("H")
assert (domain, port) == ("python.org", 666)
yield from io.write_struct("4B", 5, 0, 0, 1)
yield from io.write(b"\x00" * 4)
yield from io.write_struct("H", 0)
yield from io.passthrough()
rotor(SocksClient("python.org", 666, 5), server())
def test_server_socks5_no_auth_methods():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("B", 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer())
def test_server_socks5_bad_username_auth_version():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 2)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 2)
yield from io.write_struct("B", 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer(allowed_versions={5}, username="yoba", password="<PASSWORD>"))
def test_server_socks5_bad_username():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 2)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 2)
yield from io.write_struct("B", 1)
yield from io.write_pascal_string("yoba1")
yield from io.write_pascal_string("foo")
auth_version, retcode = yield from io.read_struct("BB")
assert (auth_version, retcode) == (1, 1)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer(allowed_versions={5}, username="yoba", password="<PASSWORD>"))
def test_server_socks5_bad_password():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 2)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 2)
yield from io.write_struct("B", 1)
yield from io.write_pascal_string("yoba")
yield from io.write_pascal_string("foo1")
auth_version, retcode = yield from io.read_struct("BB")
assert (auth_version, retcode) == (1, 1)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer(allowed_versions={5}, username="yoba", password="<PASSWORD>"))
def test_server_socks5_command_not_supported():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 0)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 0)
yield from io.write_struct("4B", 5, 2, 0, 1)
yield from io.write_struct("4sH", b"\x00" * 4, 666)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 7, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x00" * 4, 0)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer())
def test_server_socks5_address_type_not_supported():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 0)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 0)
yield from io.write_struct("4B", 5, 2, 0, 13)
yield from io.passthrough()
with pytest.raises(SocksException):
rotor(client(), SocksServer())
def test_server_socks5_connection_failed():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 0)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 0)
yield from io.write_struct("4B", 5, 1, 0, 1)
yield from io.write_struct("4sH", b"\x00" * 4, 666)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 1, 0, 1)
raise RuntimeError("connection failed")
with pytest.raises(SocksException):
rotor(client(), SocksServer(), fail_connection=True)
def test_server_socks5_connection_ipv4_success():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 0)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 0)
yield from io.write_struct("4B", 5, 1, 0, 1)
yield from io.write_struct("4sH", b"\x00" * 4, 666)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 0, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x00" * 4, 0)
yield from io.passthrough()
rotor(client(), SocksServer())
def test_server_socks5_connection_ipv6_success():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 0)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 0)
yield from io.write_struct("4B", 5, 1, 0, 4)
yield from io.write_struct("16sH", b"\x00" * 16, 666)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 0, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x00" * 4, 0)
yield from io.passthrough()
rotor(client(), SocksServer())
def test_server_socks5_connection_domain_success():
def client():
io = SansIORW(encoding="utf-8")
yield from io.write_struct("B", 5)
yield from io.write_struct("BB", 1, 0)
version, auth_method = yield from io.read_struct("BB")
assert (version, auth_method) == (5, 0)
yield from io.write_struct("4B", 5, 1, 0, 3)
yield from io.write_pascal_string("python.org")
yield from io.write_struct("H", 666)
version, command, zero, address_type = yield from io.read_struct("4B")
assert (version, command, zero, address_type) == (5, 0, 0, 1)
ipv4, port = yield from io.read_struct("4sH")
assert (ipv4, port) == (b"\x00" * 4, 0)
yield from io.passthrough()
rotor(client(), SocksServer())
|
97598
|
import cv2
#读取视频
cap = cv2.VideoCapture("rtsp://admin:IVDCRX@192.168.1.139:554//Streaming/Channels/1") # 打开摄像头
ret,frame = cap.read()
size = (int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output1.avi', fourcc, 6.0, size)
while (1):
ret, frame = cap.read()
if ret == True:
out.write(frame)
cv2.imshow('frame', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
break
cap.release()
out.release()
cv2.destroyAllWindows()
'''
""" 从视频读取帧保存为图片"""
import cv2
cap = cv2.VideoCapture("output1.avi")
print(cap.isOpened())
frame_count = 1
success = True
while (success):
success, frame = cap.read()
print('Read a new frame: ', success)
params = []
# params.append(cv.CV_IMWRITE_PXM_BINARY)
params.append(1)
cv2.imwrite("_%d.jpg" % frame_count, frame, params)
frame_count = frame_count + 1
cap.release()
'''
|
97605
|
from typing import Optional
from setuptools import setup, find_packages
package_name = 'rozental_as_a_service'
def get_version() -> Optional[str]:
with open('rozental_as_a_service/__init__.py', 'r') as f:
lines = f.readlines()
for line in lines:
if line.startswith('__version__'):
return line.split('=')[-1].strip().strip("'")
def get_long_description() -> str:
with open('README.md', encoding='utf8') as f:
return f.read()
setup(
name=package_name,
description='Package to find typos in russian text.',
long_description=get_long_description(),
long_description_content_type='text/markdown',
classifiers=[
'Environment :: Console',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
'Programming Language :: Python',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
packages=find_packages(),
package_data={
"": ["*.gz"],
},
include_package_data=True,
keywords='typos',
version=get_version(),
author='<NAME>',
author_email='<EMAIL>',
install_requires=[
'setuptools',
'tabulate>=0.8',
'requests>=2.22.0',
'Markdown>=3.1.1',
'beautifulsoup4>=4.8.0',
'esprima==4.0.1',
'mypy-extensions>=0.4.1',
'sentry-sdk>=0.14.3',
'chardet',
'autocorrect',
],
entry_points={
'console_scripts': [
'rozental = rozental_as_a_service.rozental:main',
],
},
url='https://github.com/Melevir/rozental_as_a_service',
license='MIT',
py_modules=[package_name],
zip_safe=False,
)
|
97610
|
from typing import List
import logging
import orjson
from instauto.api.actions.structs.feed import FeedGet
from instauto.api.client import ApiClient
from instauto.helpers import models
logging.basicConfig()
logger = logging.getLogger(__name__)
def get_feed(client: ApiClient, limit: int) -> List[models.Post]:
ret = []
obj = FeedGet()
while len(ret) < limit:
obj, resp = client.feed_get(obj)
data = orjson.loads(resp.text)
items = list(filter(lambda i: 'media_or_ad' in i, data['feed_items']))
logger.info("Retrieved {} posts, {} more to go.".format(
len(ret), limit - len(ret))
)
if len(items) == 0:
break
ret.extend(items)
return [models.Post.parse(p) for p in ret]
|
97617
|
from collections import Counter
def can_scramble(source, dest):
if len(source) != len(dest):
return False
return Counter(source) == Counter(dest)
assert(can_scramble("abc", "cba") == True)
assert(can_scramble("abc", "ccc") == False)
assert(can_scramble("aab", "bbc") == False)
assert(can_scramble("aabaaaa", "bbc") == False)
|
97628
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponseRedirect
from users.forms import LoginForm, RegistrationForm
import cass
def login(request):
login_form = LoginForm()
register_form = RegistrationForm()
next = request.REQUEST.get('next')
if 'kind' in request.POST:
if request.POST['kind'] == 'login':
login_form = LoginForm(request.POST)
if login_form.is_valid():
username = login_form.get_username()
request.session['username'] = username
if next:
return HttpResponseRedirect(next)
return HttpResponseRedirect('/')
elif request.POST['kind'] == 'register':
register_form = RegistrationForm(request.POST)
if register_form.is_valid():
username = register_form.save()
request.session['username'] = username
if next:
return HttpResponseRedirect(next)
return HttpResponseRedirect('/')
context = {
'login_form': login_form,
'register_form': register_form,
'next': next,
}
return render_to_response(
'users/login.html', context, context_instance=RequestContext(request))
def logout(request):
request.session.pop('username', None)
return render_to_response(
'users/logout.html', {}, context_instance=RequestContext(request))
def find_friends(request):
friend_usernames = []
if request.user['is_authenticated']:
friend_usernames = cass.get_friend_usernames(
request.session['username']) + [request.session['username']]
q = request.GET.get('q')
result = None
searched = False
if q is not None:
searched = True
try:
result = cass.get_user_by_username(q)
result = {
'username': result.username,
'friend': q in friend_usernames
}
except cass.DatabaseError:
pass
context = {
'q': q,
'result': result,
'searched': searched,
'friend_usernames': friend_usernames,
}
return render_to_response(
'users/add_friends.html', context, context_instance=RequestContext(request))
def modify_friend(request):
next = request.REQUEST.get('next')
added = False
removed = False
if request.user['is_authenticated']:
if 'add-friend' in request.POST:
cass.add_friends(
request.session['username'],
[request.POST['add-friend']]
)
added = True
if 'remove-friend' in request.POST:
cass.remove_friends(
request.session['username'],
[request.POST['remove-friend']]
)
removed = True
if next:
return HttpResponseRedirect(next)
context = {
'added': added,
'removed': removed,
}
return render_to_response(
'users/modify_friend.html', context, context_instance=RequestContext(request))
|
97640
|
from django_webserver.management.commands.pyuwsgi import Command as uWSGICommand
class Command(uWSGICommand):
help = "Start Nautobot uWSGI server."
|
97682
|
import testutil
import responses
@responses.activate
def test_search():
testutil.add_response("login_response_200")
testutil.add_response("search_response_200")
testutil.add_response("api_version_response_200")
client = testutil.get_client()
search_result = client.search(
"FIND {sfdc_py} RETURNING Account(Id, Name) LIMIT 5")
assert search_result[0] == testutil.mock_responses["search_response_200"]["body"]
assert search_result[1].status == 200
@responses.activate
def test_search_with_proxy():
testutil.add_response("login_response_200")
testutil.add_response("search_response_200")
testutil.add_response("api_version_response_200")
client = testutil.get_client_with_proxy()
search_result = client.search(
"FIND {sfdc_py} RETURNING Account(Id, Name) LIMIT 5")
assert search_result[0] == testutil.mock_responses["search_response_200"]["body"]
assert search_result[1].status == 200
assert search_result[1].proxies.get("https") is testutil.proxies.get("https")
|
97704
|
from flask import Blueprint, make_response, url_for
from portality import util
from portality.core import app
from portality import models
from portality.lib import dates
import json, requests, math, os, time
from datetime import datetime
blueprint = Blueprint('status', __name__)
@blueprint.route('/stats')
@util.jsonp
def stats():
res = {}
# Get inode use
try:
st = os.statvfs('/')
res['inode_used_pc'] = int((float(st.f_files-st.f_ffree)/st.f_files)*100)
# could complete this by installing and using psutil but as disk and memory can currently
# be monitored directly by DO, no current need - can change if we move from DO
#res['disk_used_pc'] = int((float(st.f_blocks-st.f_bavail)/st.f_blocks)*100)
#res['memory_used_pc'] = 0
except:
pass
# Test writing to filesystem
ts = int(time.time())
fn = '/tmp/status_test_write_' + str(ts) + '.txt'
try:
f = open(fn, "w")
f.write("I am a test at " + str(ts))
f.close()
res['writable'] = True
except:
res['writable'] = False
try:
os.remove(fn)
except:
pass
# Retrieve the hostname
try:
hn = os.uname()[1]
res['host'] = hn
except:
pass
# Return a JSON response
resp = make_response(json.dumps(res))
resp.mimetype = "application/json"
return resp
@blueprint.route('/')
@util.jsonp
def status():
res = {'stable': True, 'ping': {'apps': {}, 'indices': {}}, 'background': {'status': 'Background jobs are stable', 'info': []}, 'notes': []}
# to get monitoring on this, use uptime robot or similar to check that the status page
# contains the 'stable': True string and the following note strings
app_note = 'apps reachable'
app_unreachable = 0
inodes_note = 'inode use on app machines below 95%'
inodes_high = 0
writable_note = 'app machines can write to disk'
not_writable = 0
#disk_note = 'disk use on app machines below 95%'
#disk_high = 0
#memory_note = 'memory use on app machines below 95%'
#memory_high = 0
es_note = 'indexes stable'
es_unreachable = 0
indexable_note = 'index accepts index/delete operations'
cluster_note = 'cluster stable'
for addr in app.config.get('APP_MACHINES_INTERNAL_IPS',[]):
if not addr.startswith('http'): addr = 'http://' + addr
addr += url_for('.stats')
r = requests.get(addr)
res['ping']['apps'][addr] = r.status_code if r.status_code != 200 else r.json()
try:
if res['ping']['apps'][addr].get('inode_used_pc',0) >= 95:
inodes_high += 1
inodes_note = 'INODE GREATER THAN 95% ON ' + str(inodes_high) + ' APP MACHINES'
if res['ping']['apps'][addr].get('writable',False) != True:
not_writable += 1
writable_note = 'WRITE FAILURE ON ' + str(not_writable) + ' APP MACHINES'
#if res['ping']['apps'][addr].get('disk_used_pc',0) >= 95:
# disk_high += 1
# disk_note = 'DISK USE GREATER THAN 95% ON ' + disk_high + ' APP MACHINES'
#if res['ping']['apps'][addr].get('memory_used_pc',0) >= 95:
# memory_high += 1
# memory_note = 'MEMORY USE GREATER THAN 95% ON ' + memory_high + ' APP MACHINES'
except:
pass
if r.status_code != 200:
res['stable'] = False
app_unreachable += 1
app_note = str(app_unreachable) + ' APPS UNREACHABLE'
res['notes'].append(app_note)
res['notes'].append(inodes_note)
res['notes'].append(writable_note)
#res['notes'].append(disk_note)
#res['notes'].append(memory_note)
# check that all necessary ES nodes can actually be pinged from this machine
for eddr in [app.config['ELASTIC_SEARCH_HOST']] if isinstance(app.config['ELASTIC_SEARCH_HOST'], str) else app.config['ELASTIC_SEARCH_HOST']:
if not eddr.startswith('http'): eddr = 'http://' + eddr
if not eddr.endswith(':9200'): eddr += ':9200'
r = requests.get(eddr)
res['ping']['indices'][eddr] = r.status_code
res['stable'] = r.status_code == 200
if r.status_code != 200:
res['stable'] = False
es_unreachable += 1
es_note = str(es_unreachable) + ' INDEXES UNREACHABLE'
res['notes'].append(es_note)
# query ES for cluster health and nodes up
es_addr = str(app.config['ELASTIC_SEARCH_HOST'][0] if not isinstance(app.config['ELASTIC_SEARCH_HOST'], str) else app.config['ELASTIC_SEARCH_HOST']).rstrip('/')
if not es_addr.startswith('http'): es_addr = 'http://' + es_addr
if not es_addr.endswith(':9200'): es_addr += ':9200'
try:
es = requests.get(es_addr + '/_status').json()
res['index'] = { 'cluster': {}, 'shards': { 'total': es['_shards']['total'], 'successful': es['_shards']['successful'] }, 'indices': {} }
for k, v in es['indices'].items():
res['index']['indices'][k] = { 'docs': v['docs']['num_docs'], 'size': int(math.ceil(v['index']['primary_size_in_bytes']) / 1024 / 1024) }
try:
ces = requests.get(es_addr + '/_cluster/health')
res['index']['cluster'] = ces.json()
res['stable'] = res['index']['cluster']['status'] == 'green'
if res['index']['cluster']['status'] != 'green': cluster_note = 'CLUSTER UNSTABLE'
except:
res['stable'] = False
cluster_note = 'CLUSTER UNSTABLE'
except:
res['stable'] = False
cluster_note = 'CLUSTER UNSTABLE'
res['notes'].append(cluster_note)
if False: # remove this False if happy to test write to the index (could be a setting)
if res['stable'] and False:
try:
ts = str(int(time.time()))
test_index = 'status_test_writable_' + ts
test_type = 'test_' + ts
test_id = ts
rp = requests.put(es_addr + '/' + test_index + '/' + test_type + '/' + test_id, json={'hello': 'world'})
if rp.status_code != 201:
indexable_note = 'NEW INDEX WRITE OPERATION FAILED TO WRITE, RETURNED ' + str(rp.status_code)
else:
try:
rr = requests.get(es_addr + '/' + test_index + '/' + test_type + '/' + test_id).json()
if rr['hello'] != 'world':
indexable_note = 'INDEX READ DID NOT FIND EXPECTED VALUE IN NEW WRITTEN RECORD'
try:
rd = requests.delete(es_addr + '/' + test_index)
if rd.status_code != 200:
indexable_note = 'INDEX DELETE OF TEST INDEX DID NOT RETURNED UNEXPECTED STATUS CODE OF ' + str(rd.status_code)
try:
rg = requests.get(es_addr + '/' + test_index)
if rg.status_code != 404:
indexable_note = 'INDEX READ AFTER DELETE TEST RETURNED UNEXPECTED STATUS CODE OF ' + str(rg.status_code)
except:
pass
except:
indexable_note = 'INDEX DELETE OF TEST INDEX FAILED'
except:
indexable_note = 'INDEX READ OF NEW WRITTEN RECORD DID NOT SUCCEED'
except:
indexable_note = 'INDEX/DELETE OPERATIONS CAUSED EXCEPTION'
else:
indexable_note = 'INDEX/DELETE OPERATIONS NOT TESTED DUE TO SYSTEM ALREADY UNSTABLE'
res['notes'].append(indexable_note)
# check background jobs
try:
# check if journal_csv, which should run at half past every hour on the main queue, has completed in the last 2 hours (which confirms main queue)
qcsv = {"query": {"bool": {"must": [
{"term":{"status":"complete"}},
{"term":{"action":"journal_csv"}},
{"range": {"created_date": {"gte": dates.format(dates.before(datetime.utcnow(), 7200))}}}
]}}, "size": 1, "sort": {"created_date": {"order": "desc"}}}
rcsv = models.BackgroundJob.send_query(qcsv)['hits']['hits'][0]['_source']
res['background']['info'].append('journal_csv has run in the last 2 hours, confirming main queue is running')
except:
res['background']['status'] = 'Unstable'
res['background']['info'].append('Error when trying to check background job journal_csv in the last 2 hours - could be a problem with this job or with main queue')
res['stable'] = False
try:
# check if prune_es_backups, which should run at 9.30am every day, has completed in the last 24.5 hours (which confirms long running queue)
qprune = {"query": {"bool": {"must": [
{"term": {"status": "complete"}},
{"term": {"action": "prune_es_backups"}},
{"range": {"created_date": {"gte": dates.format(dates.before(datetime.utcnow(), 88200))}}}
]}}, "size": 1, "sort": {"created_date": {"order": "desc"}}}
rprune = models.BackgroundJob.send_query(qprune)['hits']['hits'][0]['_source']
res['background']['info'].append('prune_es_backups has run in the last 24.5 hours, confirming long running queue is running')
except:
res['background']['status'] = 'Unstable'
res['background']['info'].append('Error when trying to check background job prune_es_backups in the last 24 hours - could be a problem with this job or with long running queue')
res['stable'] = False
# try: #fixme: commented out by SE - this isn't working well, it should probably be a background task itself
# # remove old jobs if there are too many - remove anything over six months and complete
# old_seconds = app.config.get("STATUS_OLD_REMOVE_SECONDS", 15552000)
# qbg = {"query": {"bool": {"must": [
# {"term": {"status": "complete"}},
# {"range": {"created_date": {"lte": dates.format(dates.before(datetime.utcnow(), old_seconds))}}}
# ]}}, "size": 10000, "sort": {"created_date": {"order": "desc"}}, "fields": "id"}
# rbg = models.BackgroundJob.send_query(qbg)
# for job in rbg.get('hits', {}).get('hits', []):
# models.BackgroundJob.remove_by_id(job['fields']['id'][0])
# res['background']['info'].append('Removed {0} old complete background jobs'.format(rbg.get('hits', {}).get('total', 0)))
# except:
# res['background']['status'] = 'Unstable'
# res['background']['info'].append('Error when trying to remove old background jobs')
# res['stable'] = False
try:
# alert about errors in the last ten minutes - assuming we are going to use uptimerobot to check this every ten minutes
error_seconds = app.config.get("STATUS_ERROR_CHECK_SECONDS", 600)
error_ignore = app.config.get("STATUS_ERROR_IGNORE", []) # configure a list of strings that denote something to ignore
error_ignore = [error_ignore] if isinstance(error_ignore, str) else error_ignore
error_ignore_fields = app.config.get("STATUS_ERROR_IGNORE_FIELDS_TO_CHECK", False) # which fields to get in the query, to check for the strings provided above
error_ignore_fields = [error_ignore_fields] if isinstance(error_ignore_fields, str) else error_ignore_fields
error_means_unstable = app.config.get("STATUS_ERROR_MEANS_UNSTABLE", True)
qer = {"query": {"bool": {"must": [
{"term": {"status": "error"}},
{"range": {"created_date": {"gte": dates.format(dates.before(datetime.utcnow(), error_seconds))}}}
]}}, "size": 10000, "sort": {"created_date": {"order": "desc"}}} # this could be customised with a fields list if we only want to check certain fields for ignore types
if error_ignore_fields != False:
qer["fields"] = error_ignore_fields
rer = models.BackgroundJob.send_query(qer)
error_count = 0
for job in rer.get('hits', {}).get('hits', []):
countable = True
jsj = json.dumps(job)
for ig in error_ignore:
if ig in jsj:
countable = False
break
if countable:
error_count += 1
if error_count != 0:
res['background']['status'] = 'Unstable'
res['background']['info'].append('Background jobs are causing errors')
res['stable'] = error_means_unstable
emsg = 'Found {0} background jobs in error status in the last {1} seconds'.format(error_count, error_seconds)
if len(error_ignore) != 0:
emsg += '. Ignoring ' + ', '.join(error_ignore) + ' which reduced the error count from ' + str(rer.get('hits', {}).get('total', 0))
res['background']['info'].append(emsg)
except:
res['background']['status'] = 'Unstable'
res['background']['info'].append('Error when trying to check background jobs for errors')
res['stable'] = False
resp = make_response(json.dumps(res))
resp.mimetype = "application/json"
return resp
#{"query": {"bool": {"must": [{"term":{"status":"complete"}}]}}, "size": 10000, "sort": {"created_date": {"order": "desc"}}, "fields": "id"}
|
97708
|
import myprofiler
import pytest
def test_SummingCollector():
collector = myprofiler.SummingCollector()
assert collector.summary() == []
collector.append("foo")
collector.append("bar")
collector.append("foo")
assert collector.summary() == [("foo", 2), ("bar", 1)]
collector.turn()
def test_CappedCollector():
collector = myprofiler.CappedCollector(3)
assert collector.summary() == []
collector.append("foo")
collector.append("bar")
collector.turn()
collector.append("foo")
collector.turn()
assert collector.summary() == [("foo", 2), ("bar", 1)]
collector.turn()
assert collector.summary() == [("foo", 1)]
collector.turn()
assert collector.summary() == []
class DummyProcesslist(object):
querylist = [
["foo"] * 3,
["bar"] * 2,
["baz"],
]
def __init__(self):
self.iter = iter(self.querylist)
def __call__(self, con):
return next(self.iter)
def test_profile(monkeypatch):
monkeypatch.setattr(myprofiler, 'processlist', DummyProcesslist())
summaries = []
def show_summary(collector, num_summary):
summaries.append(collector.summary())
monkeypatch.setattr(myprofiler, 'show_summary', show_summary)
try:
myprofiler.profile(None, 1, 0, 0, None)
except StopIteration:
pass
assert len(summaries) == 3
assert summaries[0] == [('foo', 3)]
assert summaries[1] == [('foo', 3), ('bar', 2)]
assert summaries[2] == [('foo', 3), ('bar', 2), ('baz', 1)]
def test_profile_capped(monkeypatch):
monkeypatch.setattr(myprofiler, 'processlist', DummyProcesslist())
summaries = []
def show_summary(collector, num_summary):
summaries.append(collector.summary())
monkeypatch.setattr(myprofiler, 'show_summary', show_summary)
try:
myprofiler.profile(None, 1, 2, 0, None)
except StopIteration:
pass
assert len(summaries) == 3
assert summaries[0] == [('foo', 3)]
assert summaries[1] == [('foo', 3), ('bar', 2)]
assert summaries[2] == [('bar', 2), ('baz', 1)]
def test_normalize():
normalize = myprofiler.normalize_query
assert normalize("IN ('a', 'b', 'c')") == "IN (S, S, S)"
assert normalize("IN ('a', 'b', 'c', 'd', 'e')") == "IN (...S)"
assert normalize("IN (1, 2, 3)") == "IN (N, N, N)"
assert normalize("IN (1, 2, 3, 4, 5)") == "IN (...N)"
|
97736
|
from kivy.uix.label import Label
from kivy.uix.widget import Widget
from utils import import_kv
import_kv(__file__)
class BaseDescriptionWidget(Widget):
pass
class TextDescriptionWidget(Label, BaseDescriptionWidget):
pass
class SymbolTextDescriptionWidget(Label, BaseDescriptionWidget):
def __init__(self, **kwargs):
super(SymbolTextDescriptionWidget, self).__init__(**kwargs)
self.font_name = "glyphicons"
|
97761
|
from AppKit import *
from vanillaButton import ImageButton
class GradientButton(ImageButton):
nsBezelStyle = NSSmallSquareBezelStyle
|
97811
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.utilities2 import utilities2
def test_utilities2():
"""Test module utilities2.py by downloading
utilities2.csv and testing shape of
extracted data has 117 rows and 19 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = utilities2(test_path)
try:
assert x_train.shape == (117, 19)
except:
shutil.rmtree(test_path)
raise()
|
97852
|
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
import pytest # type: ignore
from looker_sdk import methods, models
from henry.modules import exceptions, fetcher
@pytest.fixture(name="fc")
def initialize() -> fetcher.Fetcher:
"""Returns an instance of fetcher"""
options = fetcher.Input(
command="some_cmd", config_file="looker.ini", section="Looker"
)
return fetcher.Fetcher(options)
def test_get_projects_returns_projects(fc: fetcher.Fetcher):
"""fetcher.get_projects() should return a list of projects."""
projects = fc.get_projects()
assert isinstance(projects, list)
assert isinstance(projects[0], models.Project)
def test_get_projects_filters(fc: fetcher.Fetcher, test_project_name):
"""fetchet.get_projects() should be able to filter on project."""
projects = fc.get_projects(test_project_name)
assert isinstance(projects, list)
assert len(projects) == 1
assert projects[0].name == test_project_name
def test_get_projects_throws_if_project_does_not_exist(fc: fetcher.Fetcher):
"""fetchet.get_projects() should error if filter is invalid"""
with pytest.raises(exceptions.NotFoundError) as exc:
fc.get_projects("BadProject")
assert "An error occured while getting projects." in str(exc.value)
def test_get_models_returns_models(fc: fetcher.Fetcher):
"""fetcher.get_models() should return a list of models."""
ml = fc.get_models()
assert isinstance(ml, list)
assert isinstance(ml[0], models.LookmlModel)
def test_get_models_filters(fc: fetcher.Fetcher, test_project_name, test_model):
"""fetcher.get_models() should be able to filter on project or model."""
ml = fc.get_models(project=test_project_name)
assert all(m.project_name == test_project_name for m in ml)
ml = fc.get_models(model=test_model["name"])
assert all(m.name == test_model["name"] for m in ml)
ml = fc.get_models(project=test_project_name, model=test_model["name"])
assert all(
m.project_name == test_project_name and m.name == test_model["name"] for m in ml
)
@pytest.mark.parametrize(
"project, model", [(None, "BadModel")],
)
def test_get_models_throws_if_model_does_not_exist(fc: fetcher.Fetcher, project, model):
"""fetcher.get_models() should throw if a model is not found."""
with pytest.raises(exceptions.NotFoundError) as exc:
fc.get_models(project=project, model=model)
assert "An error occured while getting models." in str(exc.value)
@pytest.mark.parametrize(
"project, model", [("BadProject", None), ("BadProject", "BadModel")],
)
def test_get_models_throws_if_project_does_not_exist(
fc: fetcher.Fetcher, project, model
):
"""fetcher.get_models() should throw if a model is not found."""
with pytest.raises(exceptions.NotFoundError) as exc:
fc.get_models(project=project, model=model)
assert "An error occured while getting projects." in str(exc.value)
def test_get_used_models(fc: fetcher.Fetcher, test_model):
"""fetcher.get_used_models() should return models that have queries against them."""
used_models = fc.get_used_models()
assert isinstance(used_models, dict)
assert len(used_models) > 0
assert all(type(model_name) == str for model_name in used_models.keys())
assert all(type(query_count) == int for query_count in used_models.values())
assert test_model["name"] in used_models.keys()
def test_get_explores(fc: fetcher.Fetcher):
"""fetcher.get_explores() should return a list of explores."""
explores = fc.get_explores()
assert isinstance(explores, list)
assert len(explores) > 0
assert isinstance(explores[0], models.LookmlModelExplore)
def test_get_explores_filters(fc: fetcher.Fetcher):
"""fetcher.get_explores() should be able to filter on model and/or explore."""
explores = fc.get_explores(model="henry_dusty")
assert all(e.model_name == "henry_dusty" for e in explores)
explores = fc.get_explores(model="henry_qa", explore="explore_2_joins_all_used")
assert all(
e.model_name == "henry_qa" and e.name == "explore_2_joins_all_used"
for e in explores
)
@pytest.mark.parametrize(
"model, explore, msg",
[
("non_existing_model", None, "An error occured while getting models."),
(
"non_existing_model",
"non_existing_explore",
"An error occured while getting models/explores.",
),
],
)
def test_get_explores_throws_if_model_or_explore_does_not_exist(
fc: fetcher.Fetcher, model: Optional[str], explore: Optional[str], msg: str
):
"""fetcher.get_explores() should throw if an explore/model is not found."""
with pytest.raises(exceptions.NotFoundError) as exc:
fc.get_explores(model=model, explore=explore)
assert msg in str(exc.value)
def test_get_used_explores(fc: fetcher.Fetcher, test_model, test_used_explore_names):
"""fetcher.get_used_explores() should return all used explores."""
used_explores = fc.get_used_explores(model=test_model["name"])
assert isinstance(used_explores, dict)
assert all(e in test_used_explore_names for e in used_explores)
def test_get_unused_explores(fc: fetcher.Fetcher, test_model, test_unused_explores):
"""fetcher.get_unused_explores() should return all unused explores."""
unused_explores = fc.get_unused_explores(model=test_model["name"])
assert all(e in test_unused_explores for e in unused_explores)
def test_get_explore_fields_gets_fields(
fc: fetcher.Fetcher, test_model, test_explores_stats
):
"""fetcher.get_explore_fields() should return an explores fields."""
test_explore = test_explores_stats[0]
explore = fc.get_explores(model=test_model["name"], explore=test_explore["name"])
assert isinstance(explore, list)
explore = explore[0]
assert isinstance(explore, models.LookmlModelExplore)
assert explore.model_name == test_model["name"]
assert explore.name == test_explore["name"]
fields = fc.get_explore_fields(explore)
assert isinstance(fields, list)
assert fields == test_explore["all_fields"]
def test_get_explore_fields_gets_fields_for_dimension_or_measure_only_explores(
fc: fetcher.Fetcher, test_model, test_dimensions_or_measures_only_explores
):
"""fetcher.get_explore_fields() should return when an explore has only dimensions
or only measures.
"""
expected = test_dimensions_or_measures_only_explores[0]
explore = fc.get_explores(model=test_model["name"], explore=expected["name"])
assert isinstance(explore, list)
actual = explore[0]
assert actual.name == expected["name"]
assert not (actual.fields.dimensions and actual.fields.measures)
expected_fields = [f["name"] for f in expected["fields"]]
actual_fields = fc.get_explore_fields(actual)
assert actual_fields == expected_fields
def test_get_explore_field_stats(
fc: fetcher.Fetcher,
looker_sdk: methods.LookerSDK,
test_model,
test_used_explore_names,
test_explores_stats,
):
"""fetcher.get_explore_field_stats() should get the stats of all fields in
an explore.
"""
explore = fc.get_explores(
model=test_model["name"], explore=test_used_explore_names[0]
)[0]
actual_stats = fc.get_explore_field_stats(explore)
assert isinstance(actual_stats, dict)
for e in test_explores_stats:
if e["name"] == test_used_explore_names[0]:
expected_stats = e
assert all(actual_stats[k] == 0 for k in expected_stats["unused_fields"])
assert all(actual_stats[k] > 0 for k in expected_stats["used_fields"])
def test_get_explore_join_stats(fc: fetcher.Fetcher, test_model):
"""fetcher.get_explore_join_stats() should return the stats of all joins in
an explore.
"""
explore = fc.get_explores(
model=test_model["name"], explore="explore_2_joins_1_used"
)[0]
field_stats = {
"explore_2_joins_1_used.d1": 10,
"explore_2_joins_1_used.d2": 5,
"explore_2_joins_1_used.d3": 0,
"explore_2_joins_1_used.m1": 0,
"join1.d1": 10,
"join1.d2": 10,
"join1.d3": 10,
"join1.m1": 0,
"join2.d1": 0,
"join2.d2": 0,
"join2.d3": 0,
"join2.m1": 0,
}
join_stats = fc.get_explore_join_stats(explore=explore, field_stats=field_stats)
assert isinstance(join_stats, dict)
assert len(join_stats) == 2
assert join_stats == {"join1": 30, "join2": 0}
@pytest.mark.parametrize(
"limit, input_data, expected_result",
[
(
None,
[{"a": 1}, {"b": 2}, {"c": 3}, {"d": 4}, {"e": 5}],
[{"a": 1}, {"b": 2}, {"c": 3}, {"d": 4}, {"e": 5}],
),
(2, [{"a": 1}, {"b": 2}, {"c": 3}, {"d": 4}, {"e": 5}], [{"a": 1}, {"b": 2}]),
],
)
def test_limit(
fc: fetcher.Fetcher,
limit: Optional[int],
input_data: Sequence[Dict[str, Union[bool, int, str]]],
expected_result: Sequence[int],
):
fc.limit = limit
result = fc._limit(input_data)
assert result == expected_result
@pytest.mark.parametrize(
"data, condition, expected_output",
[
({"e1": 0, "e2": 0, "e3": 5, "e4": 10, "e5": 15}, None, {"e1": 0, "e2": 0},),
(
{"e1": 0, "e2": 0, "e3": 5, "e4": 10, "e5": 15},
lambda x: x[1] >= 10,
{"e4": 10, "e5": 15},
),
({"e1": 0, "e2": 0, "e3": 5, "e4": 10, "e5": 15}, lambda x: x[1] >= 100, {},),
(None, lambda x: x[1] > 0, {}),
(None, None, {}),
],
)
def test_filter(
fc: fetcher.Fetcher,
data: Optional[Dict[str, int]],
condition: Callable,
expected_output: Dict[str, int],
):
result = fc._filter(data, condition)
assert result == expected_output
DATA: Sequence[Dict[str, Union[str, int]]] = [
{"explore": "a", "join count": 1},
{"explore": "b", "join count": 0},
{"explore": "c", "join count": 2},
{"explore": "d", "join count": 3},
]
@pytest.mark.parametrize(
"sortkey, expected_output",
[
(("explore", "asc"), DATA),
(
("join count", "desc"),
[
{"explore": "d", "join count": 3},
{"explore": "c", "join count": 2},
{"explore": "a", "join count": 1},
{"explore": "b", "join count": 0},
],
),
],
)
def test_sort(
fc: fetcher.Fetcher,
sortkey: Tuple[str, str],
expected_output: Sequence[Dict[str, Union[int, str, bool]]],
):
fc.sortkey = sortkey
result = fc._sort(DATA)
assert result == expected_output
@pytest.mark.parametrize(
"sortkey", [(("explore", "invalid")), (("invalid field", "asc"))]
)
def test_sort_throws_for_invalid_sort_keys(
fc: fetcher.Fetcher, sortkey: Tuple[str, str]
):
with pytest.raises(KeyError):
fc.sortkey = sortkey
fc._sort(DATA)
|
97886
|
class Book:
"""The Book object contains all the information about a book"""
def __init__(self, title, author, date, genre):
"""Object constructor
:param title: title of the Book
:type title: str
:param author: author of the book
:type author: str
:param data: the date at which the book has been published
:param date: str
:param genre: the subject/type of the book
:type genre: str
"""
self.title = title
self.author = author
self.date = date
self.genre = genre
def to_json(self):
"""
to_json converts the object into a json object
:var json_dict: contains information about the book
:type json_dict: dict
:returns: a dict (json) containing of the information of the book
:rtype: dict
"""
json_dict = {
'title': self.title,
'author': self.author,
'date': self.date,
'genre': self.genre
}
return json_dict
def __eq__(self, other):
return (self.to_json() == other.to_json())
def __repr__(self):
return str(self.to_json())
def __str__(self):
return str(self.to_json())
|
97891
|
from __future__ import absolute_import
__version__ = "0.3.6"
from pyfor import cloud
from pyfor import rasterizer
from pyfor import gisexport
from pyfor import clip
from pyfor import ground_filter
from pyfor import collection
from pyfor import voxelizer
from pyfor import metrics
|
98000
|
import os
import numpy as np
from pyfftw.builders import rfft
from scipy.interpolate import interp1d
from scipy.special import gamma
from scipy.integrate import quad
import matplotlib.pyplot as plt
class FFTLog(object):
def __init__(self, **kwargs):
self.Nmax = kwargs['Nmax']
self.xmin = kwargs['xmin']
self.xmax = kwargs['xmax']
self.bias = kwargs['bias']
self.dx = np.log(self.xmax/self.xmin) / (self.Nmax-1.)
self.setx()
self.setPow()
def setx(self):
self.x = self.xmin * np.exp(np.arange(self.Nmax) * self.dx)
def setPow(self):
self.Pow = self.bias + 1j * 2. * np.pi / (self.Nmax * self.dx) * (np.arange(self.Nmax+1) - self.Nmax/2.)
def Coef(self, xin, f, window=1, co=common):
interpfunc = interp1d(xin, f, kind='cubic')
if xin[0] > self.x[0]:
print ('low extrapolation')
nslow = (log(f[1])-log(f[0])) / (log(xin[1])-log(xin[0]))
Aslow = f[0] / xin[0]**nslow
if xin[-1] < self.x[-1]:
print ('high extrapolation')
nshigh = (log(f[-1])-log(f[-2])) / (log(xin[-1])-log(xin[-2]))
Ashigh = f[-1] / xin[-1]**nshigh
fx = np.empty(self.Nmax)
tmp = np.empty(int(self.Nmax/2+1), dtype = complex)
Coef = np.empty(self.Nmax+1, dtype = complex)
for i in range(self.Nmax):
if xin[0] > self.x[i]: fx[i] = Aslow * self.x[i]**nslow * np.exp(-self.bias*i*self.dx)
elif xin[-1] < self.x[i]: fx[i] = Ashigh * self.x[i]**nshigh * np.exp(-self.bias*i*self.dx)
else: fx[i] = interpfunc(self.x[i]) * np.exp(-self.bias*i*self.dx)
#tmp = rfft(fx) ### numpy
tmp = rfft(fx)() ### pyfftw
for i in range(self.Nmax+1):
if (i < self.Nmax/2): Coef[i] = np.conj(tmp[int(self.Nmax/2-i)]) * self.xmin**(-self.Pow[i]) / float(self.Nmax)
else: Coef[i] = tmp[int(i-self.Nmax/2)] * self.xmin**(-self.Pow[i]) / float(self.Nmax)
if window is not None: Coef = Coef*CoefWindow(self.Nmax, window=window)
else:
Coef[0] /= 2.
Coef[self.Nmax] /= 2.
return Coef
#return self.x,
def sumCoefxPow(self, xin, f, x, window=1):
Coef = self.Coef(xin, f, window=window)
fFFT = np.empty_like(x)
for i, xi in enumerate(x):
fFFT[i] = np.real( np.sum(Coef * xi**self.Pow) )
return fFFT
|
98011
|
from django.shortcuts import render
from django.views.generic.base import View
from base.models import SiteInfo
class IndexView(View):
def get(self, request):
site_infos = SiteInfo.objects.all().filter(is_live=True)[0]
context = {
'site_infos': site_infos
}
request.session['__access_auth__'] = site_infos.access_password_encrypt
return render(request, 'index.html', context)
|
98028
|
from .. import util
import duo_client.admin
from .base import TestAdmin
class TestPhones(TestAdmin):
def test_get_phones_generator(self):
""" Test to get phones generator.
"""
generator = self.client_list.get_phones_generator()
response = next(generator)
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['100'],
'offset': ['0'],
})
def test_get_phones(self):
""" Test to get phones without pagination params.
"""
response = self.client_list.get_phones()
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['100'],
'offset': ['0'],
})
def test_get_phones_with_limit(self):
""" Test to get phones with pagination params.
"""
response = self.client_list.get_phones(limit=20)
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['20'],
'offset': ['0'],
})
def test_get_phones_with_limit_offset(self):
""" Test to get phones with pagination params.
"""
response = self.client_list.get_phones(limit=20, offset=2)
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['20'],
'offset': ['2'],
})
def test_get_phones_with_offset(self):
""" Test to get phones with pagination params.
"""
response = self.client_list.get_phones(offset=9001)
response = response[0]
self.assertEqual(response['method'], 'GET')
(uri, args) = response['uri'].split('?')
self.assertEqual(uri, '/admin/v1/phones')
self.assertEqual(
util.params_to_dict(args),
{
'account_id': [self.client.account_id],
'limit': ['100'],
'offset': ['0'],
})
|
98072
|
import pyaf.Bench.TS_datasets as tsds
import pyaf.Bench.MComp as mcomp
#tester1 = mcomp.cMComp_Tester(tsds.load_M1_comp());
#tester1.testSignals('')
#tester1.testAllSignals()
#tester2 = mcomp.cMComp_Tester(tsds.load_M2_comp());
#tester1.testSignals('')
#tester2.testAllSignals()
#tester3 = mcomp.cMComp_Tester(tsds.load_M3_Y_comp());
#tester1.testSignals('')
#tester3.testAllSignals()
#tester4 = mcomp.cMComp_Tester(tsds.load_M3_Q_comp());
#tester1.testSignals('')
#tester4.testAllSignals()
#tester5 = mcomp.cMComp_Tester(tsds.load_M3_M_comp());
#tester1.testSignals('')
#tester5.testAllSignals()
#tester6 = mcomp.cMComp_Tester(tsds.load_M3_Other_comp());
#tester1.testSignals('')
#tester6.testAllSignals()
tester7 = mcomp.cMComp_Tester(tsds.load_M4_comp("FINANCE") , "M4COMP");
tester7.testSignals('FIN1')
# tester7.testAllSignals()
|
98101
|
import asyncio
from irrexplorer.backends.bgp import BGPImporter
from irrexplorer.backends.rirstats import RIRStatsImporter
from irrexplorer.state import RIR
async def main():
"""
Run an import for all backends with local data.
All imports are run "simultaneously" (one CPU, but async)
"""
tasks = []
for rir in RIR:
tasks.append(RIRStatsImporter(rir).run_import())
tasks.append(BGPImporter().run_import())
await asyncio.gather(*tasks)
if __name__ == "__main__":
asyncio.run(main())
|
98106
|
import json
import os
import sys
import argparse
import subprocess
import tempfile
import shutil
from collections import namedtuple
from molotov import __version__
from molotov.run import run
def clone_repo(github):
# XXX security
subprocess.check_call('git clone %s .' % github, shell=True)
def create_virtualenv(virtualenv, python):
# XXX security
subprocess.check_call('%s --python %s venv' % (virtualenv, python),
shell=True)
def install_reqs(reqfile):
subprocess.check_call('./venv/bin/pip install -r %s' % reqfile,
shell=True)
_DEFAULTS = {'processes': False, 'verbose': False, 'scenario': 'loadtest.py',
'users': 1, 'duration': 10, 'quiet': False,
'statsd': False}
def run_test(**options):
for option, value in _DEFAULTS.items():
if option not in options:
options[option] = value
args = namedtuple('Arguments', options.keys())(**options)
print('Running molotov with %s' % str(args))
return run(args)
def main():
parser = argparse.ArgumentParser(description='Github-based load test')
parser.add_argument('--version', action='store_true', default=False,
help='Displays version and exits.')
parser.add_argument('--virtualenv', type=str, default='virtualenv',
help='Virtualenv executable.')
parser.add_argument('--python', type=str, default=sys.executable,
help='Python executable.')
parser.add_argument('--config', type=str, default='molotov.json',
help='Path of the configuration file.')
parser.add_argument('repo', help='Github repo', type=str)
parser.add_argument('run', help='Test to run')
args = parser.parse_args()
if args.version:
print(__version__)
sys.exit(0)
tempdir = tempfile.mkdtemp()
curdir = os.getcwd()
os.chdir(tempdir)
print('Working directory is %s' % tempdir)
try:
clone_repo(args.repo)
config_file = os.path.join(tempdir, args.config)
with open(config_file) as f:
config = json.loads(f.read())
# creating the virtualenv
create_virtualenv(args.virtualenv, args.python)
# install deps
if 'requirements' in config['molotov']:
install_reqs(config['molotov']['requirements'])
# environment
if 'env' in config['molotov']:
for key, value in config['molotov']['env'].items():
os.environ[key] = value
run_test(**config['molotov']['tests'][args.run])
except Exception:
os.chdir(curdir)
shutil.rmtree(tempdir, ignore_errors=True)
raise
if __name__ == '__main__':
main()
|
98108
|
class KelvinHelmholtzUniform:
"""
Kelvin-Helmholtz instability with anisotropic viscosity and a constant
magnetic field in the x-direction. The equilibrium is assumed to have
constant density, temperature and pressure. The velocity profile varies
smoothly and the setup is periodic.
More details about this specific setup can be found in
<NAME>. & <NAME>. (2019). *On the Kelvin-Helmholtz instability
with smooth initial conditions – Linear theory and simulations*, MNRAS,
485, 908
Another reference for the KHI with anisotric viscosity is
<NAME>., <NAME>., <NAME>., & <NAME>. (2013).
Magnetohydrodynamic simulations of the formation of cold fronts in
clusters of galaxies: Effects of anisotropic viscosity. Astrophysical
Journal, 768(2). https://doi.org/10.1088/0004-637X/768/2/175
"""
def __init__(self, grid, beta, nu, kx, u0=1, z1=0.5, z2=1.5, a=0.05):
import numpy as np
# Parameters that change (TODO: make nu, beta, and chi0 part of this)
self._u0 = u0
self.nu = nu
self.beta = beta
self.kx = kx
self.gamma = 5.0 / 3
self.p = 1.0
self.rho = 1.0
self.mu0 = 1.0
self.B = np.sqrt(2 * self.p / beta)
self.va = self.B / np.sqrt(self.mu0 * self.rho)
self.grid = grid
self.grid.bind_to(self.make_background)
self.z1 = z1
self.z2 = z2
self.a = a
# Create initial background
self.make_background()
# Variables to solve for
self.variables = ["drho", "dA", "dvx", "dvz", "dT"]
self.labels = [
r"$\delta \rho/\rho$",
r"$\delta A/B$",
r"$\delta v_x/c_0$",
r"$\delta v_z/c_0$",
r"$\delta T/T$",
]
# Boundary conditions
self.boundaries = [False, False, False, False, False]
# Number of equations in system
self.dim = len(self.variables)
# String used for eigenvalue (do not use lambda!)
self.eigenvalue = "sigma"
# Equations (Careful! No space behind minus
eq1 = "sigma*drho = -1j*kx*v*drho -1j*kx*dvx -1.0*dz(dvz)"
eq2 = "sigma*dA = -1j*kx*v*dA +1.0*dvz"
eq3 = "sigma*dvx = -1j*kx*v*dvx -dvdz*dvz -1j*kx*p/rho*drho -1j*kx*p/rho*dT -nu*4/3*kx**2*dvx -nu*2*kx**2*dvdz*dA -nu*1j*kx*2/3*dz(dvz)"
eq4 = "sigma*dvz = -1j*kx*v*dvz -1/rho*p*dz(drho) -1/rho*p*dz(dT) +va**2*dz(dz(dA)) -va**2*kx**2*dA -1j*kx*nu*2/3*dz(dvx) -1j*kx*nu*d2vdz*dA -1j*kx*nu*dvdz*dz(dA) +nu*1/3*dz(dz(dvz))"
eq5 = "sigma*dT = -1j*kx*v*dT -1j*kx*2/3*dvx -2/3*dz(dvz)"
self.equations = [eq1, eq2, eq3, eq4, eq5]
@property
def u0(self):
return self._u0
@u0.setter
def u0(self, value):
self._u0 = value
self.make_background()
def make_background(self):
from sympy import tanh, diff, lambdify, symbols
z = symbols("z")
zg = self.grid.zg
u0 = self._u0
z1 = self.z1
z2 = self.z2
a = self.a
# Define Background Functions
v_sym = u0 * (tanh((z - z1) / a) - tanh((z - z2) / a) - 1.0)
dvdz_sym = diff(v_sym, z)
d2vdz_sym = diff(dvdz_sym, z)
self.v = lambdify(z, v_sym)(zg)
self.dvdz = lambdify(z, dvdz_sym)(zg)
self.d2vdz = lambdify(z, d2vdz_sym)(zg)
|
98110
|
from .merge_result_infos import merge_result_infos
from .field_to_fc import field_to_fc
from .html_doc import html_doc
from .unitary_field import unitary_field
from .extract_field import extract_field
from .bind_support import bind_support
from .scalars_to_field import scalars_to_field
from .change_location import change_location
from .strain_from_voigt import strain_from_voigt
from .set_property import set_property
from .forward_field import forward_field
from .forward_fields_container import forward_fields_container
from .forward_meshes_container import forward_meshes_container
from .forward import forward
from .txt_file_to_dpf import txt_file_to_dpf
from .bind_support_fc import bind_support_fc
from .default_value import default_value
from .extract_time_freq import extract_time_freq
from .python_generator import python_generator
from .make_overall import make_overall
from .merge_fields_containers import merge_fields_containers
from .merge_scopings import merge_scopings
from .merge_materials import merge_materials
from .merge_property_fields import merge_property_fields
from .remote_workflow_instantiate import remote_workflow_instantiate
from .remote_operator_instantiate import remote_operator_instantiate
from .merge_fields_by_label import merge_fields_by_label
from .merge_scopings_containers import merge_scopings_containers
from .merge_meshes import merge_meshes
from .merge_time_freq_supports import merge_time_freq_supports
from .merge_fields import merge_fields
from .merge_supports import merge_supports
from .merge_meshes_containers import merge_meshes_containers
from .change_shell_layers import change_shell_layers
|
98164
|
import pathlib
import pytest
import pymatgen as pmg
from pymatgen.io.cif import CifParser
from dftfit.potential import Potential
from dftfit.training import Training
@pytest.fixture
def structure():
def f(filename, conventional=True, oxidized=False):
filename = pathlib.Path(filename)
if not filename.is_file():
raise ValueError(f'given filename "{filename}" is not a file')
if filename.suffix == '.cif':
s = CifParser(str(filename)).get_structures(primitive=(not conventional))[0]
elif filename.stem == 'POSCAR':
s = pmg.io.vasp.inputs.Poscar(str(filename)).structure
else:
raise ValueError(f'do not know how to convert filename {filename} to structure')
if not oxidized:
s.remove_oxidation_states()
return s
return f
@pytest.fixture
def potential():
def f(filename, format=None):
return Potential.from_file(filename, format=format)
return f
@pytest.fixture
def training():
def f(filename, format=None, cache_filename=None):
return Training.from_file(filename, format=format, cache_filename=cache_filename)
return f
@pytest.fixture
def mgo_structure():
a = 4.1990858
lattice = Lattice.from_parameters(a, a, a, 90, 90, 90)
species = ['Mg', 'O']
coordinates = [[0, 0, 0], [0.5, 0.5, 0.5]]
return Structure.from_spacegroup(225, lattice, species, coordinates)
|
98186
|
import json
import os
import random
import string
from math import asin
from math import ceil
from math import cos
from math import degrees
from math import pi
from math import radians
from math import sin
from math import sqrt
from math import tan
from pyaedt.generic.general_methods import _retry_ntimes
from pyaedt.generic.general_methods import pyaedt_function_handler
from pyaedt.modeler.actors import Bird
from pyaedt.modeler.actors import Person
from pyaedt.modeler.actors import Vehicle
from pyaedt.modeler.GeometryOperators import GeometryOperators
from pyaedt.modeler.multiparts import Environment
from pyaedt.modeler.multiparts import MultiPartComponent
from pyaedt.modeler.Primitives import Primitives
class Primitives3D(Primitives, object):
"""Manages primitives in 3D tools.
This class is inherited in the caller application and is
accessible through the primitives variable part of modeler object(
e.g. ``hfss.modeler`` or ``icepak.modeler``).
Parameters
----------
application : str
Name of the application.
Examples
--------
Basic usage demonstrated with an HFSS, Maxwell 3D, Icepak, Q3D, or Mechanical design:
>>> from pyaedt import Hfss
>>> aedtapp = Hfss()
>>> prim = aedtapp.modeler
"""
def __init__(self):
Primitives.__init__(self)
self.multiparts = []
@pyaedt_function_handler()
def create_point(self, position, name=None, color="(143 175 143)"):
"""Create a point.
Parameters
----------
position : list
List of ``[x, y, z]`` coordinates. Note, The list can be empty or contain less than 3 elements.
name : str, optional
Name of the point. The default is ``None``, in which case the
default name is assigned.
color : str, optional
String exposing 3 int values such as "(value1 value2 value3)". Default value is ``"(143 175 143)"``.
Returns
-------
:class:`pyaedt.modeler.object3dlayout.Point`
Point object.
References
----------
>>> oEditor.CreateBox
Examples
--------
>>> from pyaedt import hfss
>>> hfss = Hfss()
>>> point_object = hfss.modeler.primivites.create_point([0,0,0], name="mypoint")
"""
x_position, y_position, z_position = self._pos_with_arg(position)
if not name:
unique_name = "".join(random.sample(string.ascii_uppercase + string.digits, 6))
name = "NewPoint_" + unique_name
parameters = ["NAME:PointParameters"]
parameters.append("PointX:="), parameters.append(x_position)
parameters.append("PointY:="), parameters.append(y_position)
parameters.append("PointZ:="), parameters.append(z_position)
attributes = ["NAME:Attributes"]
attributes.append("Name:="), attributes.append(name)
attributes.append("Color:="), attributes.append(color)
point = _retry_ntimes(10, self.oeditor.CreatePoint, parameters, attributes)
return self._create_point(name)
@pyaedt_function_handler()
def create_box(self, position, dimensions_list, name=None, matname=None):
"""Create a box.
Parameters
----------
position : list
Center point for the box in a list of ``[x, y, z]`` coordinates.
dimensions_list : list
Dimensions for the box in a list of ``[x, y, z]`` coordinates.
name : str, optional
Name of the box. The default is ``None``, in which case the
default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case the
default material is assigned. If the material name supplied is
invalid, the default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateBox
Examples
--------
>>> from pyaedt import hfss
>>> hfss = Hfss()
>>> origin = [0,0,0]
>>> dimensions = [10,5,20]
>>> #Material and name are not mandatory fields
>>> box_object = hfss.modeler.primivites.create_box(origin, dimensions, name="mybox", matname="copper")
"""
assert len(position) == 3, "Position Argument must be a valid 3 Element List"
assert len(dimensions_list) == 3, "Dimension Argument must be a valid 3 Element List"
XPosition, YPosition, ZPosition = self._pos_with_arg(position)
XSize, YSize, ZSize = self._pos_with_arg(dimensions_list)
vArg1 = ["NAME:BoxParameters"]
vArg1.append("XPosition:="), vArg1.append(XPosition)
vArg1.append("YPosition:="), vArg1.append(YPosition)
vArg1.append("ZPosition:="), vArg1.append(ZPosition)
vArg1.append("XSize:="), vArg1.append(XSize)
vArg1.append("YSize:="), vArg1.append(YSize)
vArg1.append("ZSize:="), vArg1.append(ZSize)
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = _retry_ntimes(10, self.oeditor.CreateBox, vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_cylinder(self, cs_axis, position, radius, height, numSides=0, name=None, matname=None):
"""Create a cylinder.
Parameters
----------
cs_axis : int or str
Axis of rotation of the starting point around the center point.
:class:`pyaedt.constants.AXIS` Enumerator can be used as input.
position : list
Center point of the cylinder in a list of ``(x, y, z)`` coordinates.
radius : float
Radius of the cylinder.
height : float
Height of the cylinder.
numSides : int, optional
Number of sides. The default is ``0``, which is correct for
a cylinder.
name : str, optional
Name of the cylinder. The default is ``None``, in which case
the default name is assigned.
matname : str, optional
Name of the material. The default is ''None``, in which case the
default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateCylinder
Examples
--------
>>> from pyaedt import Hfss
>>> aedtapp = Hfss()
>>> cylinder_object = aedtapp.modeler..create_cylinder(cs_axis='Z', position=[0,0,0],
... radius=2, height=3, name="mycyl",
... matname="vacuum")
"""
if isinstance(radius, (int, float)) and radius < 0:
raise ValueError("Radius must be greater than 0.")
szAxis = GeometryOperators.cs_axis_str(cs_axis)
XCenter, YCenter, ZCenter = self._pos_with_arg(position)
Radius = self._arg_with_dim(radius)
Height = self._arg_with_dim(height)
vArg1 = ["NAME:CylinderParameters"]
vArg1.append("XCenter:="), vArg1.append(XCenter)
vArg1.append("YCenter:="), vArg1.append(YCenter)
vArg1.append("ZCenter:="), vArg1.append(ZCenter)
vArg1.append("Radius:="), vArg1.append(Radius)
vArg1.append("Height:="), vArg1.append(Height)
vArg1.append("WhichAxis:="), vArg1.append(szAxis)
vArg1.append("NumSides:="), vArg1.append("{}".format(numSides))
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateCylinder(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_polyhedron(
self,
cs_axis=None,
center_position=(0.0, 0.0, 0.0),
start_position=(0.0, 1.0, 0.0),
height=1.0,
num_sides=12,
name=None,
matname=None,
):
"""Create a regular polyhedron.
Parameters
----------
cs_axis : optional
Axis of rotation of the starting point around the center point.
The default is ``None``, in which case the Z axis is used.
center_position : list, optional
List of ``[x, y, z]`` coordinates for the center position.
The default is ``(0.0, 0.0, 0.0)``.
start_position : list, optional
List of ``[x, y, z]`` coordinates for the starting position.
The default is ``(0.0, 0.0, 0.0)``.
height : float, optional
Height of the polyhedron. The default is ``1.0``.
num_sides : int, optional
Number of sides of the polyhedron. The default is ``12``.
name : str, optional
Name of the polyhedron. The default is ``None``, in which the
default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which the
default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateRegularPolyhedron
Examples
--------
>>> from pyaedt import Hfss
>>> aedtapp = Hfss()
>>> ret_obj = aedtapp.modeler.create_polyhedron(cs_axis='X', center_position=[0, 0, 0],
... start_position=[0,5,0], height=0.5,
... num_sides=8, name="mybox", matname="copper")
"""
test = cs_axis
cs_axis = GeometryOperators.cs_axis_str(cs_axis)
x_center, y_center, z_center = self._pos_with_arg(center_position)
x_start, y_start, z_start = self._pos_with_arg(start_position)
height = self._arg_with_dim(height)
vArg1 = ["NAME:PolyhedronParameters"]
vArg1.append("XCenter:="), vArg1.append(x_center)
vArg1.append("YCenter:="), vArg1.append(y_center)
vArg1.append("ZCenter:="), vArg1.append(z_center)
vArg1.append("XStart:="), vArg1.append(x_start)
vArg1.append("YStart:="), vArg1.append(y_start)
vArg1.append("ZStart:="), vArg1.append(z_start)
vArg1.append("Height:="), vArg1.append(height)
vArg1.append("NumSides:="), vArg1.append(int(num_sides))
vArg1.append("WhichAxis:="), vArg1.append(cs_axis)
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateRegularPolyhedron(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_cone(self, cs_axis, position, bottom_radius, top_radius, height, name=None, matname=None):
"""Create a cone.
Parameters
----------
cs_axis : str
Axis of rotation of the starting point around the center point.
The default is ``None``, in which case the Z axis is used.
center_position : list, optional
List of ``[x, y, z]`` coordinates for the center position
of the bottom of the cone.
bottom_radius : float
Bottom radius of the cone.
top_radius : float
Top radius of the cone.
height : float
Height of the cone.
name : str, optional
Name of the cone. The default is ``None``, in which case
the default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case
the default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateCone
Examples
--------
>>> from pyaedt import Hfss
>>> aedtapp = Hfss()
>>> cone_object = aedtapp.modeler.create_cone(cs_axis='Z', position=[0, 0, 0],
... bottom_radius=2, top_radius=3, height=4,
... name="mybox", matname="copper")
"""
if bottom_radius == top_radius:
raise ValueError("Bottom radius and top radius must have different values.")
if isinstance(bottom_radius, (int, float)) and bottom_radius < 0:
raise ValueError("Bottom radius must be greater than 0.")
if isinstance(top_radius, (int, float)) and top_radius < 0:
raise ValueError("Top radius must be greater than 0.")
if isinstance(height, (int, float)) and height <= 0:
raise ValueError("Height must be greater than 0.")
XCenter, YCenter, ZCenter = self._pos_with_arg(position)
szAxis = GeometryOperators.cs_axis_str(cs_axis)
Height = self._arg_with_dim(height)
RadiusBt = self._arg_with_dim(bottom_radius)
RadiusUp = self._arg_with_dim(top_radius)
vArg1 = ["NAME:ConeParameters"]
vArg1.append("XCenter:="), vArg1.append(XCenter)
vArg1.append("YCenter:="), vArg1.append(YCenter)
vArg1.append("ZCenter:="), vArg1.append(ZCenter)
vArg1.append("WhichAxis:="), vArg1.append(szAxis)
vArg1.append("Height:="), vArg1.append(Height)
vArg1.append("BottomRadius:="), vArg1.append(RadiusBt)
vArg1.append("TopRadius:="), vArg1.append(RadiusUp)
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateCone(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_sphere(self, position, radius, name=None, matname=None):
"""Create a sphere.
Parameters
----------
position : list
List of ``[x, y, z]`` coordinates for the center position
of the sphere.
radius : float
Radius of the sphere.
name : str, optional
Name of the sphere. The default is ``None``, in which case
the default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case
the default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateSphere
Examples
--------
>>> from pyaedt import Hfss
>>> aedtapp = Hfss()
>>> ret_object = aedtapp.modeler.create_sphere(position=[0,0,0], radius=2,
... name="mysphere", matname="copper")
"""
if len(position) != 3:
raise ValueError("Position argument must be a valid 3 elements List.")
if isinstance(radius, (int, float)) and radius < 0:
raise ValueError("Radius must be greater than 0.")
XCenter, YCenter, ZCenter = self._pos_with_arg(position)
Radius = self._arg_with_dim(radius)
vArg1 = ["NAME:SphereParameters"]
vArg1.append("XCenter:="), vArg1.append(XCenter)
vArg1.append("YCenter:="), vArg1.append(YCenter)
vArg1.append("ZCenter:="), vArg1.append(ZCenter)
vArg1.append("Radius:="), vArg1.append(Radius)
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateSphere(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_torus(self, center, major_radius, minor_radius, axis=None, name=None, material_name=None):
"""Create a torus.
Parameters
----------
center : list
Center point for the torus in a list of ``[x, y, z]`` coordinates.
major_radius : float
Major radius of the torus.
minor_radius : float
Minor radius of the torus.
axis : str, optional
Axis of revolution.
The default is ``None``, in which case the Z axis is used.
name : str, optional
Name of the torus. The default is ``None``, in which case the
default name is assigned.
material_name : str, optional
Name of the material. The default is ``None``, in which case the
default material is assigned. If the material name supplied is
invalid, the default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateTorus
Examples
--------
Create a torus named ``"mytorus"`` about the Z axis with a major
radius of 1, minor radius of 0.5, and a material of ``"copper"``.
>>> from pyaedt import Hfss
>>> hfss = Hfss()
>>> origin = [0, 0, 0]
>>> torus = hfss.modeler.create_torus(origin, major_radius=1,
... minor_radius=0.5, axis="Z",
... name="mytorus", material_name="copper")
"""
if len(center) != 3:
raise ValueError("Center argument must be a valid 3 element sequence.")
# if major_radius <= 0 or minor_radius <= 0:
# raise ValueError("Both major and minor radius must be greater than 0.")
# if minor_radius >= major_radius:
# raise ValueError("Major radius must be greater than minor radius.")
x_center, y_center, z_center = self._pos_with_arg(center)
axis = GeometryOperators.cs_axis_str(axis)
major_radius = self._arg_with_dim(major_radius)
minor_radius = self._arg_with_dim(minor_radius)
first_argument = ["NAME:TorusParameters"]
first_argument.append("XCenter:="), first_argument.append(x_center)
first_argument.append("YCenter:="), first_argument.append(y_center)
first_argument.append("ZCenter:="), first_argument.append(z_center)
first_argument.append("MajorRadius:="), first_argument.append(major_radius)
first_argument.append("MinorRadius:="), first_argument.append(minor_radius)
first_argument.append("WhichAxis:="), first_argument.append(axis)
second_argument = self._default_object_attributes(name=name, matname=material_name)
new_object_name = _retry_ntimes(10, self.oeditor.CreateTorus, first_argument, second_argument)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_bondwire(
self,
start_position,
end_position,
h1=0.2,
h2=0,
alpha=80,
beta=5,
bond_type=0,
diameter=0.025,
facets=6,
name=None,
matname=None,
):
"""Create a bondwire.
Parameters
----------
start_position : list
List of ``[x, y, z]`` coordinates for the starting
position of the bond pad.
end_position : list
List of ``[x, y, z]`` coordinates for the ending position
of the bond pad.
h1 : float, optional
Height between the IC die I/O pad and the top of the bondwire.
The default is ``0.2``.
h2 : float, optional
Height of the IC die I/O pad above the lead frame. The default
is ``0``. A negative value indicates that the I/O pad is below
the lead frame.
alpha : float, optional
Angle in degrees between the xy plane and the wire bond at the
IC die I/O pad. The default is ``80``.
beta : float, optional
Angle in degrees between the xy plane and the wire bond at the
lead frame. The default is ``5``.
bond_type : int, optional
Type of the boundwire, which indicates its shape. Options are:
* ''0'' for JEDEC 5-point
* ``1`` for JEDEC 4-point
* ''2`` for Low
The default is ''0``.
diameter : float, optional
Diameter of the wire. The default is ``0.025``.
facets : int, optional
Number of wire facets. The default is ``6``.
name : str, optional
Name of the bondwire. The default is ``None``, in which case
the default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case
the default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateBondwire
Examples
--------
>>> from pyaedt import Hfss
>>> hfss = Hfss()
>>> origin = [0,0,0]
>>> endpos = [10,5,20]
>>> #Material and name are not mandatory fields
>>> object_id = hfss.modeler.primivites.create_bondwire(origin, endpos,h1=0.5, h2=0.1, alpha=75, beta=4,
... bond_type=0, name="mybox", matname="copper")
"""
x_position, y_position, z_position = self._pos_with_arg(start_position)
if x_position is None or y_position is None or z_position is None:
raise AttributeError("Position Argument must be a valid 3 Element List")
x_length, y_length, z_length = self._pos_with_arg([n - m for m, n in zip(start_position, end_position)])
if x_length is None or y_length is None or z_length is None:
raise AttributeError("Dimension Argument must be a valid 3 Element List")
if bond_type == 0:
bondwire = "JEDEC_5Points"
elif bond_type == 1:
bondwire = "JEDEC_4Points"
elif bond_type == 2:
bondwire = "LOW"
else:
self.logger.error("Wrong Profile Type")
return False
first_argument = ["NAME:BondwireParameters"]
first_argument.append("WireType:="), first_argument.append(bondwire)
first_argument.append("WireDiameter:="), first_argument.append(self._arg_with_dim(diameter))
first_argument.append("NumSides:="), first_argument.append(str(facets))
first_argument.append("XPadPos:="), first_argument.append(x_position)
first_argument.append("YPadPos:="), first_argument.append(y_position)
first_argument.append("ZPadPos:="), first_argument.append(z_position)
first_argument.append("XDir:="), first_argument.append(x_length)
first_argument.append("YDir:="), first_argument.append(y_length)
first_argument.append("ZDir:="), first_argument.append(z_length)
first_argument.append("Distance:="), first_argument.append(
self._arg_with_dim(GeometryOperators.points_distance(start_position, end_position))
)
first_argument.append("h1:="), first_argument.append(self._arg_with_dim(h1))
first_argument.append("h2:="), first_argument.append(self._arg_with_dim(h2))
first_argument.append("alpha:="), first_argument.append(self._arg_with_dim(alpha, "deg"))
first_argument.append("beta:="), first_argument.append(self._arg_with_dim(beta, "deg"))
first_argument.append("WhichAxis:="), first_argument.append("Z")
first_argument.append("ReverseDirection:="), first_argument.append(False)
second_argument = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateBondwire(first_argument, second_argument)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_rectangle(self, csPlane, position, dimension_list, name=None, matname=None, is_covered=True):
"""Create a rectangle.
Parameters
----------
csPlane : str or int
Coordinate system plane for orienting the rectangle.
:class:`pyaedt.constants.PLANE` Enumerator can be used as input.
position : list or Position
List of ``[x, y, z]`` coordinates for the center point of the rectangle or
the positionApplicationName.modeler.Position(x,y,z) object.
dimension_list : list
List of ``[width, height]`` dimensions.
name : str, optional
Name of the rectangle. The default is ``None``, in which case
the default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case
the default material is assigned.
is_covered : bool, optional
Whether the rectangle is covered. The default is ``True``.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateRectangle
"""
szAxis = GeometryOperators.cs_plane_to_axis_str(csPlane)
XStart, YStart, ZStart = self._pos_with_arg(position)
Width = self._arg_with_dim(dimension_list[0])
Height = self._arg_with_dim(dimension_list[1])
vArg1 = ["NAME:RectangleParameters"]
vArg1.append("IsCovered:="), vArg1.append(is_covered)
vArg1.append("XStart:="), vArg1.append(XStart)
vArg1.append("YStart:="), vArg1.append(YStart)
vArg1.append("ZStart:="), vArg1.append(ZStart)
vArg1.append("Width:="), vArg1.append(Width)
vArg1.append("Height:="), vArg1.append(Height)
vArg1.append("WhichAxis:="), vArg1.append(szAxis)
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateRectangle(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_circle(self, cs_plane, position, radius, numSides=0, is_covered=True, name=None, matname=None):
"""Create a circle.
Parameters
----------
cs_plane : str or int
Coordinate system plane for orienting the circle.
:class:`pyaedt.constants.PLANE` Enumerator can be used as input.
position : list
List of ``[x, y, z]`` coordinates for the center point of the circle.
radius : float
Radius of the circle.
numSides : int, optional
Number of sides. The default is ``0``, which is correct for a circle.
name : str, optional
Name of the circle. The default is ``None``, in which case the
default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case the
default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateCircle
"""
szAxis = GeometryOperators.cs_plane_to_axis_str(cs_plane)
XCenter, YCenter, ZCenter = self._pos_with_arg(position)
Radius = self._arg_with_dim(radius)
vArg1 = ["NAME:CircleParameters"]
vArg1.append("IsCovered:="), vArg1.append(is_covered)
vArg1.append("XCenter:="), vArg1.append(XCenter)
vArg1.append("YCenter:="), vArg1.append(YCenter)
vArg1.append("ZCenter:="), vArg1.append(ZCenter)
vArg1.append("Radius:="), vArg1.append(Radius)
vArg1.append("WhichAxis:="), vArg1.append(szAxis)
vArg1.append("NumSegments:="), vArg1.append("{}".format(numSides))
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateCircle(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_ellipse(self, cs_plane, position, major_radius, ratio, is_covered=True, name=None, matname=None):
"""Create an ellipse.
Parameters
----------
cs_plane : str or int
Coordinate system plane for orienting the ellipse.
:class:`pyaedt.constants.PLANE` Enumerator can be used as input.
position : list
List of ``[x, y, z]`` coordinates for the center point of the ellipse.
major_radius : float
Base radius of the ellipse.
ratio : float
Aspect ratio of the secondary radius to the base radius.
is_covered : bool, optional
Whether the ellipse is covered. The default is ``True``,
in which case the result is a 2D sheet object. If ``False,``
the result is a closed 1D polyline object.
name : str, optional
Name of the ellipse. The default is ``None``, in which case the
default name is assigned.
matname : str, optional
Name of the material. The default is ``None``, in which case the
default material is assigned.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateEllipse
"""
szAxis = GeometryOperators.cs_plane_to_axis_str(cs_plane)
XStart, YStart, ZStart = self._pos_with_arg(position)
MajorRadius = self._arg_with_dim(major_radius)
Ratio = self._arg_with_dim(ratio)
vArg1 = ["NAME:EllipseParameters"]
vArg1.append("IsCovered:="), vArg1.append(is_covered)
vArg1.append("XCenter:="), vArg1.append(XStart)
vArg1.append("YCenter:="), vArg1.append(YStart)
vArg1.append("ZCenter:="), vArg1.append(ZStart)
vArg1.append("MajRadius:="), vArg1.append(MajorRadius)
vArg1.append("Ratio:="), vArg1.append(Ratio)
vArg1.append("WhichAxis:="), vArg1.append(szAxis)
vArg2 = self._default_object_attributes(name=name, matname=matname)
new_object_name = self.oeditor.CreateEllipse(vArg1, vArg2)
return self._create_object(new_object_name)
@pyaedt_function_handler()
def create_equationbased_curve(
self,
x_t=0,
y_t=0,
z_t=0,
t_start=0,
t_end=1,
num_points=0,
name=None,
xsection_type=None,
xsection_orient=None,
xsection_width=1,
xsection_topwidth=1,
xsection_height=1,
xsection_num_seg=0,
xsection_bend_type=None,
):
"""Create an equation-based curve.
Parameters
----------
x_t : str or float
Expression for the X-component of the curve as a function of ``"_t"``.
For example, ``"3 * cos(_t)"``.
y_t : str or float
Expression for the Y-component of the curve as a function of ``"_t"``
z_t : str or float
Expression for the Z-component of the curve as a function of ``"_t"``
t_start : str or float
Starting value of the parameter ``"_t"``.
t_end : str or float
Ending value of the parameter ``"_t"``.
num_points : int, optional
Number of vertices on the segmented curve. The default is ``0``,
in which case the curve is non-segmented.
name : str, optional
Name of the created curve in the 3D modeler. The default is ``None``,
in which case the default name is assigned.
xsection_type : str, optional
Type of the cross-section. Choices are ``"Line"``, ``"Circle"``,
``"Rectangle"``, and ``"Isosceles Trapezoid"``. The default is ``None``.
xsection_orient : str, optional
Direction of the normal vector to the width of the cross-section.
Choices are ``"X"``, ``"Y"``, ``"Z"``, and ``"Auto"``. The default is
``None``, in which case the direction is set to ``"Auto"``.
xsection_width : float or str, optional
Width or diameter of the cross-section for all types. The
default is ``1``.
xsection_topwidth : float or str, optional
Top width of the cross-section for type ``"Isosceles Trapezoid"`` only.
The default is ``1``.
xsection_height : float or str
Height of the cross-section for types ``"Rectangle"`` and ``"Isosceles
Trapezoid"`` only. The default is ``1``.
xsection_num_seg : int, optional
Number of segments in the cross-section surface for types ``"Circle"``,
``"Rectangle"``, and ``"Isosceles Trapezoid"``. The default is ``0``. The
value must be ``0`` or greater than ``2``.
xsection_bend_type : str, optional
Type of the bend for the cross-section. The default is ``None``, in which
case the bend type is set to ``"Corner"``. For the type ``"Circle"``, the
bend type should be set to ``"Curved"``.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateEquationCurve
"""
x_section = self._crosssection_arguments(
type=xsection_type,
orient=xsection_orient,
width=xsection_width,
topwidth=xsection_topwidth,
height=xsection_height,
num_seg=xsection_num_seg,
bend_type=xsection_bend_type,
)
vArg1 = [
"NAME:EquationBasedCurveParameters",
"XtFunction:=",
str(x_t),
"YtFunction:=",
str(y_t),
"ZtFunction:=",
str(z_t),
"tStart:=",
str(t_start),
"tEnd:=",
str(t_end),
"NumOfPointsOnCurve:=",
num_points,
"Version:=",
1,
x_section,
]
vArg2 = self._default_object_attributes(name)
new_name = self.oeditor.CreateEquationCurve(vArg1, vArg2)
return self._create_object(new_name)
@pyaedt_function_handler()
def create_helix(
self,
polyline_name,
position,
x_start_dir,
y_start_dir,
z_start_dir,
num_thread=1,
right_hand=True,
radius_increment=0.0,
thread=1,
):
"""Create an helix from a polyline.
Parameters
----------
polyline_name : str
Name of the polyline used as the base for the helix.
position : list
List of ``[x, y, z]`` coordinates for the center point of the circle.
x_start_dir : float
Distance along x axis from the polyline.
y_start_dir : float
Distance along y axis from the polyline.
z_start_dir : float
Distance along z axis from the polyline.
num_thread : int, optional
Number of turns. The default value is ``1``.
right_hand : bool, optional
Whether the helix turning direction is right hand. The default value is ``True``.
radius_increment : float, optional
Radius change per turn. The default value is ``0.0``.
thread : float
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateHelix
"""
if not polyline_name or polyline_name == "":
raise ValueError("The name of the polyline cannot be an empty string.")
x_center, y_center, z_center = self._pos_with_arg(position)
vArg1 = ["NAME:Selections"]
vArg1.append("Selections:="), vArg1.append(polyline_name)
vArg1.append("NewPartsModelFlag:="), vArg1.append("Model")
vArg2 = ["NAME:HelixParameters"]
vArg2.append("XCenter:=")
vArg2.append(x_center)
vArg2.append("YCenter:=")
vArg2.append(y_center)
vArg2.append("ZCenter:=")
vArg2.append(z_center)
vArg2.append("XStartDir:=")
vArg2.append(self._arg_with_dim(x_start_dir))
vArg2.append("YStartDir:=")
vArg2.append(self._arg_with_dim(y_start_dir))
vArg2.append("ZStartDir:=")
vArg2.append(self._arg_with_dim(z_start_dir))
vArg2.append("NumThread:=")
vArg2.append(num_thread)
vArg2.append("RightHand:=")
vArg2.append(right_hand)
vArg2.append("RadiusIncrement:=")
vArg2.append(self._arg_with_dim(radius_increment))
vArg2.append("Thread:=")
vArg2.append(self._arg_with_dim(thread))
new_name = self.oeditor.CreateHelix(vArg1, vArg2)
return self._create_object(new_name)
@pyaedt_function_handler()
def convert_segments_to_line(self, object_name):
"""Convert a CreatePolyline list of segments to lines.
This method applies to splines and 3-point arguments.
Parameters
----------
object_name : int, str, or Object3d
Specified for the object.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.ChangeProperty
"""
this_object = self._resolve_object(object_name)
edges = this_object.edges
for i in reversed(range(len(edges))):
self.oeditor.ChangeProperty(
[
"NAME:AllTabs",
[
"NAME:Geometry3DPolylineTab",
["NAME:PropServers", this_object.name + ":CreatePolyline:1:Segment" + str(i)],
["NAME:ChangedProps", ["NAME:Segment Type", "Value:=", "Line"]],
],
]
)
return True
@pyaedt_function_handler()
def create_udm(self, udmfullname, udm_params_list, udm_library="syslib"):
"""Create a user-defined model.
Parameters
----------
udmfullname : str
Full name for the user-defined model, including the folder name.
udm_params_list :
List of user-defined object pairs for the model.
udm_library : str, optional
Name of library for the user-defined model. The default is ``"syslib"``.
Returns
-------
:class:`pyaedt.modeler.Object3d.Object3d`
3D object.
References
----------
>>> oEditor.CreateUserDefinedModel
"""
vArg1 = ["NAME:UserDefinedModelParameters", ["NAME:Definition"], ["NAME:Options"]]
vArgParamVector = ["NAME:GeometryParams"]
for pair in udm_params_list:
if isinstance(pair, list):
name = pair[0]
val = pair[1]
else:
name = pair.Name
val = pair.Value
if isinstance(val, int):
vArgParamVector.append(
["NAME:UDMParam", "Name:=", name, "Value:=", str(val), "PropType2:=", 3, "PropFlag2:=", 2]
)
elif str(val)[0] in "0123456789":
vArgParamVector.append(
["NAME:UDMParam", "Name:=", name, "Value:=", str(val), "PropType2:=", 3, "PropFlag2:=", 4]
)
else:
vArgParamVector.append(
[
"NAME:UDMParam",
"Name:=",
name,
"Value:=",
str(val),
"DataType:=",
"String",
"PropType2:=",
1,
"PropFlag2:=",
0,
]
)
vArg1.append(vArgParamVector)
vArg1.append("DllName:=")
vArg1.append(udmfullname)
vArg1.append("Library:=")
vArg1.append(udm_library)
vArg1.append("Version:=")
vArg1.append("2.0")
vArg1.append("ConnectionID:=")
vArg1.append("")
oname = self.oeditor.CreateUserDefinedModel(vArg1)
if oname:
object_lists = self.oeditor.GetPartsForUserDefinedModel(oname)
for new_name in object_lists:
self._create_object(new_name)
return True
else:
return False
@pyaedt_function_handler()
def create_spiral(
self,
internal_radius=10,
spacing=1,
faces=8,
turns=10,
width=2,
thickness=1,
elevation=0,
material="copper",
name=None,
):
"""Create a spiral inductor from a polyline.
Parameters
----------
internal_radius : float, optional
Internal starting point of spiral. Default is `10`.
spacing : float, optional
Internal pitch between two turns. Default is `1`.
faces : int, optional
Number of faces per turn. Default is `8` as an octagon.
turns : int, optional
Number of turns. Default is `10`.
width : float, optional
Spiral width. Default is `2`.
thickness : float, optional
Spiral thickness. Default is `1`.
elevation : float, optional
Spiral elevation. Default is`0`.
material : str, optional
Spiral material. Default is `"copper"`.
name : str, optional
Spiral name. Default is `None`.
Returns
-------
:class:`pyaedt.modeler.Object3d.Polyline`
Polyline object.
"""
assert internal_radius > 0, "Internal Radius must be greater than 0."
assert faces > 0, "Faces must be greater than 0."
dtheta = 2 * pi / faces
theta = pi / 2
pts = [(internal_radius, 0, elevation), (internal_radius, internal_radius * tan(dtheta / 2), elevation)]
rin = internal_radius * tan(dtheta / 2) * 2
x = rin
r = rin
for i in range(faces):
r += 1
theta += dtheta
x = x + r * cos(theta)
dr = (width + spacing) / (x - rin)
for i in range(turns * faces - int(faces / 2) - 1):
rin += dr
theta += dtheta
x0, y0 = pts[-1][:2]
x1, y1 = x0 + rin * cos(theta), y0 + rin * sin(theta)
pts.append((x1, y1, elevation))
pts.append((x1, 0, elevation))
p1 = self.create_polyline(
pts, xsection_type="Rectangle", xsection_width=width, xsection_height=thickness, matname=material
)
if name:
p1.name = name
return p1
@pyaedt_function_handler()
def insert_3d_component(self, compFile, geoParams=None, szMatParams="", szDesignParams="", targetCS="Global"):
"""Insert a new 3D component.
Parameters
----------
compFile : str
Name of the component file.
geoParams : dict, optional
Geometrical parameters.
szMatParams : str, optional
Material parameters. The default is ``""``.
szDesignParams : str, optional
Design parameters. The default is ``""``.
targetCS : str, optional
Target coordinate system. The default is ``"Global"``.
Returns
-------
str
Name of the created 3D component.
References
----------
>>> oEditor.Insert3DComponent
"""
vArg1 = ["NAME:InsertComponentData"]
sz_geo_params = ""
if not geoParams:
geometryparams = self._app.get_components3d_vars(compFile)
if geometryparams:
geoParams = geometryparams
if geoParams:
sz_geo_params = "".join(["{0}='{1}' ".format(par, val) for par, val in geoParams.items()])
vArg1.append("TargetCS:=")
vArg1.append(targetCS)
vArg1.append("ComponentFile:=")
vArg1.append(compFile)
vArg1.append("IsLocal:=")
vArg1.append(False)
vArg1.append("UniqueIdentifier:=")
vArg1.append("")
varg2 = ["NAME:InstanceParameters"]
varg2.append("GeometryParameters:=")
varg2.append(sz_geo_params)
varg2.append("MaterialParameters:=")
varg2.append(szMatParams)
varg2.append("DesignParameters:=")
varg2.append(szDesignParams)
vArg1.append(varg2)
new_object_name = self.oeditor.Insert3DComponent(vArg1)
# TODO return an object
self.refresh_all_ids()
return new_object_name
@pyaedt_function_handler()
def get_3d_component_object_list(self, componentname):
"""Retrieve all objects belonging to a 3D component.
Parameters
----------
componentname : str
Name of the 3D component.
Returns
-------
List
List of objects belonging to the 3D component.
References
----------
>>> oeditor.GetChildObject
"""
if self._app._is_object_oriented_enabled():
compobj = self.oeditor.GetChildObject(componentname)
if compobj:
return list(compobj.GetChildNames())
else:
self.logger.warning("Object Oriented Beta Option is not enabled in this Desktop.")
return []
@pyaedt_function_handler()
def _check_actor_folder(self, actor_folder):
if not os.path.exists(actor_folder):
self.logger.error("Folder {} does not exist.".format(actor_folder))
return False
if not any(fname.endswith(".json") for fname in os.listdir(actor_folder)) or not any(
fname.endswith(".a3dcomp") for fname in os.listdir(actor_folder)
):
self.logger.error("At least one json and one a3dcomp file is needed.")
return False
return True
@pyaedt_function_handler()
def _initialize_multipart(self):
if MultiPartComponent._t in self._app._variable_manager.independent_variable_names:
return True
else:
return MultiPartComponent.start(self._app)
@pyaedt_function_handler()
def add_person(
self,
actor_folder,
speed=0.0,
global_offset=[0, 0, 0],
yaw=0,
pitch=0,
roll=0,
relative_cs_name=None,
actor_name=None,
):
"""Add a Walking Person Multipart from 3D Components.
It requires a json file in the folder containing person
infos. An example json file follows:
.. code-block:: json
{
"name": "person3",
"version": 1,
"class":"person",
"stride":"0.76meter",
"xlim":["-.43",".43"],
"ylim":["-.25",".25"],
"parts": {
"arm_left": {
"comp_name": "arm_left.a3dcomp",
"rotation_cs":["-.04","0","1.37"],
"rotation":"-30deg",
"compensation_angle":"-15deg",
"rotation_axis":"Y"
},
"arm_right": {
"comp_name": "arm_right.a3dcomp",
"rotation_cs":["0","0","1.37"],
"rotation":"30deg",
"compensation_angle":"30deg",
"rotation_axis":"Y"
},
"leg_left": {
"comp_name": "leg_left.a3dcomp",
"rotation_cs":["0","0",".9"],
"rotation":"20deg",
"compensation_angle":"22.5deg",
"rotation_axis":"Y"
},
"leg_right": {
"comp_name": "leg_right.a3dcomp",
"rotation_cs":["-.04","0",".9375"],
"rotation":"-20deg",
"compensation_angle":"-22.5deg",
"rotation_axis":"Y"
},
"torso": {
"comp_name": "torso.a3dcomp",
"rotation_cs":null,
"rotation":null,
"compensation_angle":null,
"rotation_axis":null
}
}
}
Parameters
----------
actor_folder : str
Path to the actor folder. It must contain a json settings
file and a 3dcomponent (.a3dcomp).
speed : float, optional
Object movement speed with time (m_per_sec).
global_offset : list, optional
Offset from Global Coordinate System [x,y,z] in meters.
yaw : float, optional
Yaw Rotation from Global Coordinate System in deg.
pitch : float, optional
Pitch Rotation from Global Coordinate System in deg.
roll : float, optional
Roll Rotation from Global Coordinate System in deg.
relative_cs_name : str
Relative CS Name of the actor. ``None`` for Global CS.
actor_name : str
If provided, it overrides the actor name in the JSON.
Returns
-------
:class:`pyaedt.modeler.actors.Person`
References
----------
>>> oEditor.Insert3DComponent
"""
self._initialize_multipart()
if not self._check_actor_folder(actor_folder):
return False
person1 = Person(actor_folder, speed=speed, relative_cs_name=relative_cs_name)
if actor_name:
person1._name = actor_name
person1.offset = global_offset
person1.yaw = self._arg_with_dim(yaw, "deg")
person1.pitch = self._arg_with_dim(pitch, "deg")
person1.roll = self._arg_with_dim(roll, "deg")
person1.insert(self._app)
self.multiparts.append(person1)
return person1
@pyaedt_function_handler()
def add_vehicle(
self,
actor_folder,
speed=0,
global_offset=[0, 0, 0],
yaw=0,
pitch=0,
roll=0,
relative_cs_name=None,
actor_name=None,
):
"""Add a Moving Vehicle Multipart from 3D Components.
It requires a json file in the folder containing vehicle
infos. An example json file follows:
.. code-block:: json
{
"name": "vehicle3",
"version": 1,
"type":"mustang",
"class":"vehicle",
"xlim":["-1.94","2.8"],
"ylim":["-.91",".91"],
"parts": {
"wheels_front": {
"comp_name": "wheels_front.a3dcomp",
"rotation_cs":["1.8970271810532" ,"0" ,"0.34809664860487"],
"tire_radius":"0.349",
"rotation_axis":"Y"
},
"wheels_rear": {
"comp_name": "wheels_rear.a3dcomp",
"rotation_cs":["-0.82228746728897" ,"0","0.34809664860487"],
"tire_radius":"0.349",
"rotation_axis":"Y"
},
"body": {
"comp_name": "body.a3dcomp",
"rotation_cs":null,
"tire_radius":null,
"rotation_axis":null
}
}
}
Parameters
----------
actor_folder : str
Path to the actor directory. It must contain a json settings file
and a 3dcomponent (``.a3dcomp`` file).
speed : float, optional
Object movement speed with time (m_per_sec).
global_offset : list, optional
Offset from Global Coordinate System [x,y,z] in meters.
yaw : float, optional
Yaw Rotation from Global Coordinate System in deg.
pitch : float, optional
Pitch Rotation from Global Coordinate System in deg.
roll : float, optional
Roll Rotation from Global Coordinate System in deg.
relative_cs_name : str
Relative CS Name of the actor. ``None`` for Global CS.
Returns
-------
:class:`pyaedt.modeler.actors.Vehicle`
References
----------
>>> oEditor.Insert3DComponent
"""
self._initialize_multipart()
if not self._check_actor_folder(actor_folder):
return False
vehicle = Vehicle(actor_folder, speed=speed, relative_cs_name=relative_cs_name)
if actor_name:
vehicle._name = actor_name
vehicle.offset = global_offset
vehicle.yaw = self._arg_with_dim(yaw, "deg")
vehicle.pitch = self._arg_with_dim(pitch, "deg")
vehicle.roll = self._arg_with_dim(roll, "deg")
vehicle.insert(self._app)
self.multiparts.append(vehicle)
return vehicle
@pyaedt_function_handler()
def add_bird(
self,
actor_folder,
speed=0,
global_offset=[0, 0, 0],
yaw=0,
pitch=0,
roll=0,
flapping_rate=50,
relative_cs_name=None,
actor_name=None,
):
"""Add a Bird Multipart from 3D Components.
It requires a json file in the folder containing bird infos. An example json file is showed here.
.. code-block:: json
{
"name": "bird1",
"version": 1,
"class":"bird",
"xlim":["-.7","2.75"],
"ylim":["-1.2","1.2"],
"parts": {
"body": {
"comp_name": "body.a3dcomp",
"rotation_cs":null,
"rotation":null,
"rotation_axis":null
},
"wing_right": {
"comp_name": "wing_left.a3dcomp",
"rotation_cs":[".001778" ,".00508" ,".00762"],
"rotation":"-45deg",
"rotation_axis":"X"
},
"wing_left": {
"comp_name": "wing_right.a3dcomp",
"rotation_cs":[".001778" ,"-.00508" ,".00762"],
"rotation":"45deg",
"rotation_axis":"X"
},
"tail": {
"comp_name": "tail.a3dcomp",
"rotation_cs":null,
"rotation":null,
"rotation_axis":null
},
"beak": {
"comp_name": "beak.a3dcomp",
"rotation_cs":null,
"rotation":null,
"rotation_axis":null
}
}
}
Parameters
----------
actor_folder : str
Path to the actor directory. It must contain a json settings file and a
3dcomponent (``.a3dcomp`` file)
speed : float, optional
Object movement speed with time (m_per_sec).
global_offset : list, optional
Offset from Global Coordinate System [x,y,z] in meters.
yaw : float, optional
Yaw Rotation from Global Coordinate System in deg.
pitch : float, optional
Pitch Rotation from Global Coordinate System in deg.
roll : float, optional
Roll Rotation from Global Coordinate System in deg.
flapping_rate : float, optional
Motion flapping rate in Hz.
relative_cs_name : str
Relative CS Name of the actor. ``None`` for Global CS.
Returns
-------
:class:`pyaedt.modeler.actors.Bird`
References
----------
>>> oEditor.Insert3DComponent
Examples
--------
>>> from pyaedt import Hfss
>>> app = Hfss()
>>> bird_dir = "path/to/bird/directory"
>>> bird1 = app.modeler.add_bird(bird_dir, 1.0, [19, 4, 3], 120, -5, flapping_rate=30)
"""
self._initialize_multipart()
if not self._check_actor_folder(actor_folder):
return False
bird = Bird(
actor_folder,
speed=speed,
flapping_rate=self._arg_with_dim(flapping_rate, "Hz"),
relative_cs_name=relative_cs_name,
)
if actor_name:
bird._name = actor_name
bird.offset = global_offset
bird.yaw = self._arg_with_dim(yaw, "deg")
bird.pitch = self._arg_with_dim(pitch, "deg")
bird.roll = self._arg_with_dim(roll, "deg")
bird.insert(self._app)
self.multiparts.append(bird)
return bird
@pyaedt_function_handler()
def add_environment(
self, env_folder, global_offset=[0, 0, 0], yaw=0, pitch=0, roll=0, relative_cs_name=None, environment_name=None
):
"""Add an Environment Multipart Component from Json file.
.. code-block:: json
{
"name": "open1",
"version": 1,
"class":"environment",
"xlim":["-5","95"],
"ylim":["-60","60"],
"parts": {
"open_area": {
"comp_name": "open1.a3dcomp",
"offset":null,
"rotation_cs":null,
"rotation":null,
"rotation_axis":null,
"duplicate_number":null,
"duplicate_vector":null
}
}
}
Parameters
----------
env_folder : str
Path to the actor directory. It must contain a json
settings file and a 3dcomponent (``.a3dcomp`` file).
global_offset : list, optional
Offset from Global Coordinate System [x,y,z] in meters.
yaw : float, optional
Yaw Rotation from Global Coordinate System in deg.
pitch : float, optional
Pitch Rotation from Global Coordinate System in deg.
roll : float, optional
Roll Rotation from Global Coordinate System in deg.
relative_cs_name : str
Relative CS Name of the actor. ``None`` for Global CS.
Returns
-------
:class:`pyaedt.modeler.multiparts.Environment`
References
----------
>>> oEditor.Insert3DComponent
"""
self._initialize_multipart()
if not self._check_actor_folder(env_folder):
return False
environment = Environment(env_folder, relative_cs_name=relative_cs_name)
if environment_name:
environment._name = environment_name
environment.offset = global_offset
environment.yaw = self._arg_with_dim(yaw, "deg")
environment.pitch = self._arg_with_dim(pitch, "deg")
environment.roll = self._arg_with_dim(roll, "deg")
environment.insert(self._app)
self.multiparts.append(environment)
return environment
@pyaedt_function_handler()
def create_choke(self, json_file):
"""Create a chock from json setting file.
Parameters
----------
json_file : str
Full path of the json file return for the function check_choke_values.
Returns
-------
List of
bool
``True`` when successful, ``False`` when failed.
:class:`pyaedt.modeler.Object3d.Object3d`
3D object core.
list of
:class:`pyaedt.modeler.Object3d.Object3d`
3D object winding.
list
list of point coordinates of the winding.
for each winding.
[bool, core_obj, [first_winding_obj, first_winding_point_list],
[second_winding_obj, second_winding_point_list], etc...]
Examples
--------
Json file has to be like the following example.
>>> from pyaedt import Hfss
>>> hfss = Hfss()
>>> dictionary_values = hfss.modeler.check_choke_values("C:/Example/Of/Path/myJsonFile.json")
>>> mychoke = hfss.modeler.create_choke("C:/Example/Of/Path/myJsonFile_Corrected.json")
"""
with open(json_file, "r") as read_file:
values = json.load(read_file)
self.logger.info("CHOKE INFO: " + str(values))
security_factor = 1.1
sr = security_factor
segment_number = 0
if values["Wire Section"]["Hexagon"]:
segment_number = 6
section = "Circle"
elif values["Wire Section"]["Octagon"]:
segment_number = 8
section = "Circle"
elif values["Wire Section"]["Circle"]:
section = "Circle"
else:
section = None
sep_layer = values["Layer Type"]["Separate"]
name_core = values["Core"]["Name"]
material_core = values["Core"]["Material"]
in_rad_core = values["Core"]["Inner Radius"]
out_rad_core = values["Core"]["Outer Radius"]
height_core = values["Core"]["Height"]
chamfer = values["Core"]["Chamfer"]
name_wind = values["Outer Winding"]["Name"]
material_wind = values["Outer Winding"]["Material"]
in_rad_wind = values["Outer Winding"]["Inner Radius"]
out_rad_wind = values["Outer Winding"]["Outer Radius"]
height_wind = values["Outer Winding"]["Height"]
w_dia = values["Outer Winding"]["Wire Diameter"]
turns = values["Outer Winding"]["Turns"]
turns2 = values["Mid Winding"]["Turns"]
turns3 = values["Inner Winding"]["Turns"]
teta = values["Outer Winding"]["Coil Pit(deg)"]
teta2 = values["Mid Winding"]["Coil Pit(deg)"]
teta3 = values["Inner Winding"]["Coil Pit(deg)"]
chamf = self._make_winding_follow_chamfer(chamfer, sr, w_dia, 1)
returned_list = [
self._make_core(name_core, material_core, in_rad_core, out_rad_core, height_core, chamfer),
]
if values["Layer"]["Double"]:
if values["Layer Type"]["Linked"]:
list_object = self._make_double_linked_winding(
name_wind,
material_wind,
in_rad_wind,
out_rad_wind,
height_wind,
w_dia,
teta,
teta2,
turns,
turns2,
chamfer,
chamf,
sr,
)
print("make_double_linked_winding")
else:
list_object = self._make_double_winding(
name_wind,
material_wind,
in_rad_wind,
out_rad_wind,
height_wind,
w_dia,
teta,
teta2,
turns,
turns2,
chamfer,
chamf,
sr,
sep_layer,
)
print("make_double_winding")
elif values["Layer"]["Triple"]:
if values["Layer Type"]["Linked"]:
list_object = self._make_triple_linked_winding(
name_wind,
material_wind,
in_rad_wind,
out_rad_wind,
height_wind,
w_dia,
teta,
teta2,
teta3,
turns,
turns2,
turns3,
chamfer,
chamf,
sr,
)
print("make_triple_linked_winding")
else:
list_object = self._make_triple_winding(
name_wind,
material_wind,
in_rad_wind,
out_rad_wind,
height_wind,
w_dia,
teta,
teta2,
teta3,
turns,
turns2,
turns3,
chamfer,
chamf,
sr,
sep_layer,
)
print("make_triple_winding")
else:
list_object = self._make_winding(
name_wind, material_wind, in_rad_wind, out_rad_wind, height_wind, teta, turns, chamf, sep_layer
)
print("make_winding")
list_duplicated_object = []
if type(list_object[0]) == list:
for i in range(len(list_object)):
success = list_object[i][0].set_crosssection_properties(
type=section, width=w_dia, num_seg=segment_number
)
returned_list = returned_list + list_object
else:
success = list_object[0].set_crosssection_properties(type=section, width=w_dia, num_seg=segment_number)
returned_list.append(list_object)
for key in values["Number of Windings"].keys():
if values["Number of Windings"][key]:
number_duplication = int(key)
if number_duplication >= 2:
if values["Mode"]["Common"] and number_duplication == 2:
if type(list_object[0]) == list:
for i in range(len(list_object)):
duplication = self.create_polyline(
position_list=list_object[i][1], name=name_wind, matname=material_wind
)
duplication.mirror([0, 0, 0], [-1, 0, 0])
duplication_points = self.get_vertices_of_line(duplication.name)
success = duplication.set_crosssection_properties(
type=section, width=w_dia, num_seg=segment_number
)
list_duplicated_object.append([duplication, duplication_points])
else:
duplication = self.create_polyline(
position_list=list_object[1], name=name_wind, matname=material_wind
)
duplication.mirror([0, 0, 0], [-1, 0, 0])
duplication_points = self.get_vertices_of_line(duplication.name)
success = duplication.set_crosssection_properties(type=section, width=w_dia, num_seg=segment_number)
list_duplicated_object.append([duplication, duplication_points])
else:
if type(list_object[0]) == list:
for j in range(number_duplication - 1):
for i in range(len(list_object)):
duplication = self.create_polyline(
position_list=list_object[i][1], name=name_wind, matname=material_wind
)
duplication.rotate("Z", (j + 1) * 360 / number_duplication)
duplication_points = self.get_vertices_of_line(duplication.name)
success = duplication.set_crosssection_properties(
type=section, width=w_dia, num_seg=segment_number
)
list_duplicated_object.append([duplication, duplication_points])
else:
for j in range(number_duplication - 1):
duplication = self.create_polyline(
position_list=list_object[1], name=name_wind, matname=material_wind
)
duplication.rotate("Z", (j + 1) * 360 / number_duplication)
duplication_points = self.get_vertices_of_line(duplication.name)
success = duplication.set_crosssection_properties(
type=section, width=w_dia, num_seg=segment_number
)
list_duplicated_object.append([duplication, duplication_points])
returned_list = returned_list + list_duplicated_object
returned_list.insert(0, success)
return returned_list
@pyaedt_function_handler()
def _make_winding(self, name, material, in_rad, out_rad, height, teta, turns, chamf, sep_layer):
teta_r = radians(teta)
points_list1 = [
[in_rad * cos(teta_r), -in_rad * sin(teta_r), height / 2 - chamf],
[(in_rad + chamf) * cos(teta_r), -(in_rad + chamf) * sin(teta_r), height / 2],
[out_rad - chamf, 0, height / 2],
[out_rad, 0, height / 2 - chamf],
[out_rad, 0, -height / 2 + chamf],
[out_rad - chamf, 0, -height / 2],
[(in_rad + chamf) * cos(teta_r), (in_rad + chamf) * sin(teta_r), -height / 2],
[in_rad * cos(teta_r), in_rad * sin(teta_r), -height / 2 + chamf],
[in_rad * cos(teta_r), in_rad * sin(teta_r), height / 2 - chamf],
]
polyline = self.create_polyline(position_list=points_list1, name=name, matname=material)
union_polyline1 = [polyline.name]
if turns > 1:
union_polyline2 = polyline.duplicate_around_axis(
cs_axis="Z", angle=2 * teta, nclones=turns, create_new_objects=True
)
else:
union_polyline2 = []
union_polyline = union_polyline1 + union_polyline2
list_positions = []
for i in range(len(union_polyline)):
list_positions = list_positions + self.get_vertices_of_line(union_polyline[i])
self.delete(union_polyline)
if sep_layer:
for i in range(4):
list_positions.pop()
list_positions.insert(0, [list_positions[0][0], list_positions[0][1], -height])
list_positions.append([list_positions[-1][0], list_positions[-1][1], -height])
true_polyline = self.create_polyline(position_list=list_positions, name=name, matname=material)
true_polyline.rotate("Z", 180 - (turns - 1) * teta)
list_positions = self.get_vertices_of_line(true_polyline.name)
return [true_polyline, list_positions]
return list_positions
@pyaedt_function_handler()
def _make_double_linked_winding(
self,
name,
material,
in_rad,
out_rad,
height,
w_dia,
teta,
teta_in_wind,
turns,
turns_in_wind,
chamfer,
chamf_in_wind,
sr,
):
list_object = self._make_double_winding(
name,
material,
in_rad,
out_rad,
height,
w_dia,
teta,
teta_in_wind,
turns,
turns_in_wind,
chamfer,
chamf_in_wind,
sr,
False,
)
points_out_wind = list_object[0]
points_in_wind = list_object[1]
for i in range(2):
points_out_wind.pop(0)
points_out_wind.pop()
points_out_wind.pop()
points_out_wind[-1] = [points_out_wind[-2][0], points_out_wind[-2][1], -height]
points_in_wind.insert(0, [points_in_wind[0][0], points_in_wind[0][1], -height])
points_in_wind[-1] = [points_in_wind[-2][0], points_in_wind[-2][1], points_out_wind[1][2]]
points_in_wind.append([points_in_wind[-3][0], points_in_wind[-3][1], points_out_wind[0][2]])
outer_polyline = self.create_polyline(position_list=points_out_wind, name=name, matname=material)
outer_polyline.rotate("Z", 180 - (turns - 1) * teta)
inner_polyline = self.create_polyline(position_list=points_in_wind, name=name, matname=material)
inner_polyline.rotate("Z", 180 - (turns_in_wind - 1) * teta_in_wind)
outer_polyline.mirror([0, 0, 0], [0, -1, 0])
outer_polyline.rotate("Z", turns_in_wind * teta_in_wind - turns * teta)
list_polyline = [inner_polyline.name, outer_polyline.name]
list_positions = []
for i in range(len(list_polyline)):
list_positions = list_positions + self.get_vertices_of_line(list_polyline[i])
self.delete(list_polyline)
true_polyline = self.create_polyline(position_list=list_positions, name=name, matname=material)
return [true_polyline, list_positions]
@pyaedt_function_handler()
def _make_triple_linked_winding(
self,
name,
material,
in_rad,
out_rad,
height,
w_dia,
teta,
teta_mid_wind,
teta_in_wind,
turns,
turns_mid_wind,
turns_in_wind,
chamfer,
chamf_in_wind,
sr,
):
list_object = self._make_triple_winding(
name,
material,
in_rad,
out_rad,
height,
w_dia,
teta,
teta_mid_wind,
teta_in_wind,
turns + 1,
turns_mid_wind,
turns_in_wind,
chamfer,
chamf_in_wind,
sr,
False,
)
points_out_wind = list_object[0]
points_mid_wind = list_object[1]
points_in_wind = list_object[2]
for i in range(3):
points_out_wind.pop(0)
points_out_wind.pop(0)
points_out_wind.pop()
points_out_wind[-1] = [points_out_wind[-2][0], points_out_wind[-2][1], -height]
for i in range(2):
points_mid_wind.pop(0)
points_mid_wind.pop()
points_mid_wind.pop()
points_mid_wind[-1] = [points_mid_wind[-2][0], points_mid_wind[-2][1], points_out_wind[1][2]]
points_mid_wind.append([points_mid_wind[-4][0], points_mid_wind[-4][1], points_out_wind[0][2]])
points_in_wind.insert(0, [points_in_wind[0][0], points_in_wind[0][1], -height])
points_in_wind[-1] = [points_in_wind[-2][0], points_in_wind[-2][1], points_mid_wind[1][2]]
points_in_wind.append([points_in_wind[-3][0], points_in_wind[-3][1], points_mid_wind[0][2]])
outer_polyline = self.create_polyline(position_list=points_out_wind, name=name, matname=material)
outer_polyline.rotate("Z", 180 - (turns - 1) * teta)
mid_polyline = self.create_polyline(position_list=points_mid_wind, name=name, matname=material)
mid_polyline.rotate("Z", 180 - (turns_mid_wind - 1) * teta_mid_wind)
inner_polyline = self.create_polyline(position_list=points_in_wind, name=name, matname=material)
inner_polyline.rotate("Z", 180 - (turns_in_wind - 1) * teta_in_wind)
mid_polyline.mirror([0, 0, 0], [0, -1, 0])
outer_polyline.rotate("Z", turns * teta - turns_mid_wind * teta_mid_wind)
mid_polyline.rotate("Z", turns_in_wind * teta_in_wind - turns_mid_wind * teta_mid_wind)
outer_polyline.rotate("Z", turns_in_wind * teta_in_wind - turns_mid_wind * teta_mid_wind)
list_polyline = [inner_polyline.name, mid_polyline.name, outer_polyline.name]
list_positions = []
for i in range(len(list_polyline)):
list_positions = list_positions + self.get_vertices_of_line(list_polyline[i])
self.delete(list_polyline)
true_polyline = self.create_polyline(position_list=list_positions, name=name, matname=material)
return [true_polyline, list_positions]
@pyaedt_function_handler()
def _make_double_winding(
self,
name,
material,
in_rad,
out_rad,
height,
w_dia,
teta,
teta_in_wind,
turns,
turns_in_wind,
chamfer,
chamf_in_wind,
sr,
sep_layer,
):
chamf = self._make_winding_follow_chamfer(chamfer, sr, w_dia, 3)
in_rad_in_wind = in_rad + sr * w_dia
out_rad_in_wind = out_rad - sr * w_dia
height_in_wind = height - 2 * sr * w_dia
list_object = [
self._make_winding(name, material, in_rad, out_rad, height, teta, turns, chamf, sep_layer),
self._make_winding(
name,
material,
in_rad_in_wind,
out_rad_in_wind,
height_in_wind,
teta_in_wind,
turns_in_wind,
chamf_in_wind,
sep_layer,
),
]
return list_object
@pyaedt_function_handler()
def _make_triple_winding(
self,
name,
material,
in_rad,
out_rad,
height,
w_dia,
teta,
teta_mid_wind,
teta_in_wind,
turns,
turns_mid_wind,
turns_in_wind,
chamfer,
chamf_in_wind,
sr,
sep_layer,
):
chamf = self._make_winding_follow_chamfer(chamfer, sr, w_dia, 5)
chamf_mid_wind = self._make_winding_follow_chamfer(chamfer, sr, w_dia, 3)
in_rad_in_wind = in_rad + 2 * sr * w_dia
in_rad_mid_wind = in_rad + sr * w_dia
out_rad_in_wind = out_rad - 2 * sr * w_dia
out_rad_mid_wind = out_rad - sr * w_dia
height_in_wind = height - 4 * sr * w_dia
height_mid_wind = height - 2 * sr * w_dia
list_object = [
self._make_winding(name, material, in_rad, out_rad, height, teta, turns, chamf, sep_layer),
self._make_winding(
name,
material,
in_rad_mid_wind,
out_rad_mid_wind,
height_mid_wind,
teta_mid_wind,
turns_mid_wind,
chamf_mid_wind,
sep_layer,
),
self._make_winding(
name,
material,
in_rad_in_wind,
out_rad_in_wind,
height_in_wind,
teta_in_wind,
turns_in_wind,
chamf_in_wind,
sep_layer,
),
]
return list_object
@pyaedt_function_handler()
def _make_core(self, name, material, in_rad, out_rad, height, chamfer):
tool = self.create_cylinder("Z", [0, 0, -height / 2], in_rad, height, 0, "Tool", matname=material)
core = self.create_cylinder("Z", [0, 0, -height / 2], out_rad, height, 0, name=name, matname=material)
core.subtract(tool, False)
for n in core.edges:
n.chamfer(chamfer)
return core
@pyaedt_function_handler()
def check_choke_values(self, json_file, create_another_file=True):
"""Verify the values in the json file and create another one with corrected values next to the first one.
Parameters
----------
json_file : str
Full path to json file;
Specific json file containing all the parameters to design your on choke.
create_another_file : bool
Create another file next to the first one in adding _Corrected to the file name if it is True
else truncate the existing file
Returns
-------
List
``True`` when successful, ``False`` when failed.
dictionary : class : 'dict'
Examples
--------
Dictionary of the Json file has to be like the following example :
dictionary = {
"Number of Windings": {"1": True, "2": False, "3": False, "4": False},
"Layer": {"Simple": True, "Double": False, "Triple": False},
"Layer Type": {"Separate": True, "Linked": False},
"Similar Layer": {"Similar": True, "Different": False},
"Mode": {"Differential": True, "Common": False},
"Wire Section": {"None": False, "Hexagon": False, "Octagon": True, "Circle": False},
"Core": {"Name": "Core", "Material": "ferrite", "Inner Radius": 11, "Outer Radius": 17, "Height": 7,
"Chamfer": 0.8},
"Outer Winding": {"Name": "Winding", "Material": "copper", "Inner Radius": 12, "Outer Radius": 16,
"Height": 8, "Wire Diameter": 1, "Turns": 10, "Coil Pit(deg)": 9, "Occupation(%)": 0},
"Mid Winding": {"Turns": 8, "Coil Pit(deg)": 0.1, "Occupation(%)": 0},
"Inner Winding": {"Turns": 12, "Coil Pit(deg)": 0.1, "Occupation(%)": 0}
}
>>> import json
>>> with open("C:/Example/Of/Path/myJsonFile.json", "w") as outfile:
>>> json.dump(dictionary, outfile)
>>> from pyaedt import Hfss
>>> hfss = Hfss()
>>> dictionary_values = hfss.modeler.check_choke_values("C:/Example/Of/Path/myJsonFile.json")
"""
dictionary_model = {
"Number of Windings": {"1": True, "2": False, "3": False, "4": False},
"Layer": {"Simple": True, "Double": False, "Triple": False},
"Layer Type": {"Separate": True, "Linked": False},
"Similar Layer": {"Similar": True, "Different": False},
"Mode": {"Differential": True, "Common": False},
"Wire Section": {"None": False, "Hexagon": False, "Octagon": True, "Circle": False},
"Core": {
"Name": "Core",
"Material": "ferrite",
"Inner Radius": 11,
"Outer Radius": 17,
"Height": 7,
"Chamfer": 0.8,
},
"Outer Winding": {
"Name": "Winding",
"Material": "copper",
"Inner Radius": 12,
"Outer Radius": 16,
"Height": 8,
"Wire Diameter": 1,
"Turns": 10,
"Coil Pit(deg)": 9,
"Occupation(%)": 0,
},
"Mid Winding": {"Turns": 8, "Coil Pit(deg)": 0.1, "Occupation(%)": 0},
"Inner Winding": {"Turns": 12, "Coil Pit(deg)": 0.1, "Occupation(%)": 0},
}
are_inequations_checkable = True
security_factor = 1.1
sr = security_factor
with open(json_file, "r") as read_file:
values = json.load(read_file)
for key, value in dictionary_model.items():
if key not in values:
self.logger.error("Missing or incorrect key {}.".format(key))
return [False, values]
if isinstance(value, dict):
for k, v in value.items():
if k not in values[key]:
self.logger.error("Missing or incorrect key {}.".format(k))
return [False, values]
for f_key in values.keys():
count_true = False
if (
f_key == "Number of Windings"
or f_key == "Layer"
or f_key == "Layer Type"
or f_key == "Similar Layer"
or f_key == "Mode"
or f_key == "Wire Section"
):
for s_key in values[f_key].keys():
if type(values[f_key][s_key]) == bool:
if count_true:
values[f_key][s_key] = False
if values[f_key][s_key]:
count_true = True
else:
self.logger.error(
"A character entered is invalid. The values of the dictionary %s must be boolean" % f_key
)
are_inequations_checkable = False
break
try:
core_name = str(values["Core"]["Name"])
if len(core_name) > 0:
values["Core"]["Name"] = core_name
except:
self.logger.warning("Core Name must be a non-null string. A default name Core has been set.")
values["Core"]["Name"] = "Core"
try:
core_material = str(values["Core"]["Material"])
if len(core_material) > 0:
if self.materials.checkifmaterialexists(core_material):
values["Core"]["Material"] = self.materials._get_aedt_case_name(core_material)
else:
self.logger.error(
"%s is not in the material library."
" It can be add using the method add_material" % core_material
)
values["Core"]["Material"] = "ferrite"
except:
self.logger.warning("Core Material must be a non-null string. A default material Core has been set.")
values["Core"]["Material"] = "ferrite"
try:
winding_name = str(values["Outer Winding"]["Name"])
if len(winding_name) > 0:
values["Outer Winding"]["Name"] = winding_name
except:
self.logger.warning("Outer Winding Name must be a non-null string. A default name Winding has been set.")
values["Outer Winding"]["Name"] = "Winding"
try:
winding_material = str(values["Outer Winding"]["Material"])
if len(winding_material) > 0:
if self.materials.checkifmaterialexists(winding_material):
values["Outer Winding"]["Material"] = self.materials._get_aedt_case_name(winding_material)
else:
self.logger.error(
"%s is not in the material library."
" It can be add using the method add_material" % winding_material
)
values["Outer Winding"]["Material"] = "copper"
except:
self.logger.warning(
"Outer Winding Material must be a non-null string." " A default material Winding has been set."
)
values["Outer Winding"]["Material"] = "copper"
in_rad_core, are_inequations_checkable = self._check_value_type(
values["Core"]["Inner Radius"],
float,
are_inequations_checkable,
"Inner Radius(Core)",
"a strictly positive float",
)
out_rad_core, are_inequations_checkable = self._check_value_type(
values["Core"]["Outer Radius"],
float,
are_inequations_checkable,
"Outer Radius(Core)",
"a strictly positive float",
)
height_core, are_inequations_checkable = self._check_value_type(
values["Core"]["Height"], float, are_inequations_checkable, "Height(Core)", "a strictly positive float"
)
try:
core_chamfer = float(values["Core"]["Chamfer"])
if core_chamfer < 0:
self.logger.error(
"The character entered is invalid. Chamfer must be a positive float." " It must be changed"
)
are_inequations_checkable = False
except:
self.logger.error(
"The character entered is invalid. Chamfer must be a positive float." " It must be changed"
)
are_inequations_checkable = False
in_rad_wind, are_inequations_checkable = self._check_value_type(
values["Outer Winding"]["Inner Radius"],
float,
are_inequations_checkable,
"Inner Radius(Winding)",
"a strictly positive float",
)
out_rad_wind, are_inequations_checkable = self._check_value_type(
values["Outer Winding"]["Outer Radius"],
float,
are_inequations_checkable,
"Outer Radius(Winding)",
"a strictly positive float",
)
height_wind, are_inequations_checkable = self._check_value_type(
values["Outer Winding"]["Height"],
float,
are_inequations_checkable,
"Height(Winding)",
"a strictly positive float",
)
turns, are_inequations_checkable = self._check_value_type(
values["Outer Winding"]["Turns"],
int,
are_inequations_checkable,
"Turns(Outer Winding)",
"a strictly positive integer",
)
wind_pit, are_inequations_checkable = self._check_value_type(
values["Outer Winding"]["Coil Pit(deg)"],
float,
are_inequations_checkable,
"Coil Pit(Outer Winding)",
"a strictly positive float",
)
dia_wire, are_inequations_checkable = self._check_value_type(
values["Outer Winding"]["Wire Diameter"],
float,
are_inequations_checkable,
"Wire Diameter",
"a strictly positive float",
)
turns2, are_inequations_checkable = self._check_value_type(
values["Mid Winding"]["Turns"],
int,
are_inequations_checkable,
"Turns(Mid Winding)",
"a strictly positive integer",
)
wind_pit2, are_inequations_checkable = self._check_value_type(
values["Mid Winding"]["Coil Pit(deg)"],
float,
are_inequations_checkable,
"Coil Pit(Mid Winding)",
"a strictly positive float",
)
turns3, are_inequations_checkable = self._check_value_type(
values["Inner Winding"]["Turns"],
int,
are_inequations_checkable,
"Turns(Inner Winding)",
"a strictly positive integer",
)
wind_pit3, are_inequations_checkable = self._check_value_type(
values["Inner Winding"]["Coil Pit(deg)"],
float,
are_inequations_checkable,
"Coil Pit(Inner Winding)",
"a strictly positive float",
)
if are_inequations_checkable:
teta = radians(wind_pit)
teta2 = radians(wind_pit2)
teta3 = radians(wind_pit3)
nb_wind = 1
if values["Number of Windings"]["2"]:
nb_wind = 2
if values["Number of Windings"]["3"]:
nb_wind = 3
if values["Number of Windings"]["4"]:
nb_wind = 4
nb_lay = 0
if values["Layer"]["Double"]:
nb_lay = 2
if values["Layer"]["Triple"]:
nb_lay = 4
if in_rad_wind > in_rad_core - (nb_lay + 1) * sr * dia_wire / 2:
in_rad_wind = in_rad_core - (nb_lay + 1) * sr * dia_wire / 2
values["Outer Winding"]["Inner Radius"] = in_rad_wind
self.logger.warning("Inner Radius of the winding is too high. The maximum value has been set instead.")
if out_rad_wind < out_rad_core + (nb_lay + 1) * sr * dia_wire / 2:
out_rad_wind = out_rad_core + (nb_lay + 1) * sr * dia_wire / 2
values["Outer Winding"]["Outer Radius"] = out_rad_wind
self.logger.warning("Outer Radius of the winding is too low. The minimum value has been set instead.")
if height_wind < height_core + (nb_lay + 1) * sr * dia_wire:
height_wind = height_core + (nb_lay + 1) * sr * dia_wire
values["Outer Winding"]["Height"] = height_wind
self.logger.warning("Height of the winding is too low. The minimum value has been set instead.")
if asin((sr * dia_wire / 2) / in_rad_wind) > pi / nb_wind / turns:
turns = int(pi / nb_wind / asin((sr * dia_wire / 2) / in_rad_wind))
values["Outer Winding"]["Turns"] = turns
self.logger.warning(
"Number of turns of the winding is too high. The maximum value has been set instead."
)
if teta > pi / nb_wind / turns:
teta = GeometryOperators.degrees_default_rounded(pi / nb_wind / turns, 3)
values["Outer Winding"]["Coil Pit(deg)"] = teta
self.logger.warning("Winding Pit is too high. The maximum value has been set instead.")
elif teta < asin((sr * dia_wire / 2) / in_rad_wind):
teta = GeometryOperators.degrees_over_rounded(asin((sr * dia_wire / 2) / in_rad_wind), 3)
values["Outer Winding"]["Coil Pit(deg)"] = teta
self.logger.warning("Winding Pit is too low. The minimum value has been set instead.")
else:
teta = degrees(teta)
occ = 100 * turns * teta / (180 / nb_wind)
if occ == 100:
teta = teta - 0.0003
values["Outer Winding"]["Coil Pit(deg)"] = teta
if teta < asin((sr * dia_wire / 2) / in_rad_wind) and turns > 1:
turns = turns - 1
occ = 100 * turns * teta / (180 / nb_wind)
values["Outer Winding"]["Occupation(%)"] = occ
if values["Similar Layer"]["Different"]:
if values["Layer"]["Double"] or values["Layer"]["Triple"]:
if asin((sr * dia_wire / 2) / (in_rad_wind + sr * dia_wire)) > pi / nb_wind / turns2:
turns2 = int(pi / nb_wind / asin((sr * dia_wire / 2) / (in_rad_wind + sr * dia_wire)))
values["Mid Winding"]["Turns"] = turns2
self.logger.warning(
"Number of turns of the winding of the second layer is too high. "
"The maximum value has been set instead."
)
if turns2 < turns:
turns2 = turns
values["Mid Winding"]["Turns"] = turns2
self.logger.warning(
"Number of turns of the winding of the second layer should be "
"at least equal to those of the first layer."
)
if teta2 > pi / nb_wind / turns2:
teta2 = GeometryOperators.degrees_default_rounded(pi / nb_wind / turns2, 3)
values["Mid Winding"]["Coil Pit(deg)"] = teta2
self.logger.warning(
"Winding Pit of the second layer is too high. The maximum value has been set instead."
)
elif teta2 < asin((sr * dia_wire / 2) / (in_rad_wind + sr * dia_wire)):
teta2 = GeometryOperators.degrees_over_rounded(
asin((sr * dia_wire / 2) / (in_rad_wind + sr * dia_wire)), 3
)
values["Mid Winding"]["Coil Pit(deg)"] = teta2
self.logger.warning(
"Winding Pit of the second layer is too low. The minimum value has been set instead."
)
else:
teta2 = degrees(teta2)
values["Mid Winding"]["Coil Pit(deg)"] = teta2
occ2 = 100 * turns2 * teta2 / (180 / nb_wind)
if occ2 < occ:
teta2 = ceil(turns * teta / turns2 * 1000) / 1000
values["Mid Winding"]["Coil Pit(deg)"] = teta2
occ2 = 100 * turns2 * teta2 / (180 / nb_wind)
self.logger.warning(
"Occupation of the second layer should be at least equal to that of the first layer."
)
if occ2 == 100:
teta2 = teta2 - 0.0002
values["Mid Winding"]["Coil Pit(deg)"] = teta2
occ2 = 100 * turns2 * teta2 / (180 / nb_wind)
values["Mid Winding"]["Occupation(%)"] = occ2
# TODO if occ2 == 100: method can be improve
if values["Layer"]["Triple"]:
if asin((sr * dia_wire / 2) / (in_rad_wind + 2 * sr * dia_wire)) > pi / nb_wind / turns3:
turns3 = int(pi / nb_wind / asin((sr * dia_wire / 2) / (in_rad_wind + 2 * sr * dia_wire)))
values["Inner Winding"]["Turns"] = turns3
self.logger.warning(
"Number of turns of the winding of the third layer is too high. "
"The maximum value has been set instead."
)
if turns3 < turns2:
turns3 = turns2
values["Inner Winding"]["Turns"] = turns3
self.logger.warning(
"Number of turns of the winding of the third layer should be "
"at least equal to those of the second layer."
)
if teta3 > pi / nb_wind / turns3:
teta3 = GeometryOperators.degrees_default_rounded(pi / nb_wind / turns3, 3)
values["Inner Winding"]["Coil Pit(deg)"] = teta3
self.logger.warning(
"Winding Pit of the third layer is too high. The maximum value has been set instead."
)
elif teta3 < asin((sr * dia_wire / 2) / (in_rad_wind + 2 * sr * dia_wire)):
teta3 = GeometryOperators.degrees_over_rounded(
asin((sr * dia_wire / 2) / (in_rad_wind + 2 * sr * dia_wire)), 3
)
values["Inner Winding"]["Coil Pit(deg)"] = teta3
self.logger.warning(
"Winding Pit of the third layer is too low. The minimum value has been set instead."
)
else:
teta3 = degrees(teta3)
values["Inner Winding"]["Coil Pit(deg)"] = teta3
occ3 = 100 * turns3 * teta3 / (180 / nb_wind)
if occ3 < occ2:
teta3 = ceil(turns2 * teta2 / turns3 * 1000) / 1000
values["Inner Winding"]["Coil Pit(deg)"] = teta3
occ3 = 100 * turns3 * teta3 / (180 / nb_wind)
if occ3 == 100:
teta3 = teta3 - 0.0001
values["Inner Winding"]["Coil Pit(deg)"] = teta3
occ3 = 100 * turns3 * teta3 / (180 / nb_wind)
values["Inner Winding"]["Occupation(%)"] = occ3
# TODO if occ3 == 100: method can be improve
else:
values["Mid Winding"]["Coil Pit(deg)"] = teta
values["Inner Winding"]["Coil Pit(deg)"] = teta
values["Mid Winding"]["Turns"] = turns
values["Inner Winding"]["Turns"] = turns
values["Mid Winding"]["Occupation(%)"] = occ
values["Inner Winding"]["Occupation(%)"] = occ
if create_another_file:
root_path, extension_path = os.path.splitext(json_file)
new_path = root_path + "_Corrected" + extension_path
with open(new_path, "w") as outfile:
json.dump(values, outfile)
else:
with open(json_file, "w") as outfile:
json.dump(values, outfile)
return [are_inequations_checkable, values]
@pyaedt_function_handler()
def _make_winding_follow_chamfer(self, chamfer, security_factor, wire_diameter, layer_number):
sr = security_factor
w_rad_inc = layer_number * sr * wire_diameter / 2
distance = sqrt(2 * w_rad_inc**2) - w_rad_inc + sqrt(2 * chamfer**2) / 2
return sqrt(2) * distance
@pyaedt_function_handler()
def _check_value_type(self, taken_value, value_type, are_inequations_checkable, part_message1, part_message2):
are_inequations_checkable = are_inequations_checkable
if value_type == int:
try:
receiving_variable = int(taken_value)
if receiving_variable <= 0:
self.logger.error(
"The character entered is invalid. "
+ part_message1
+ " must be "
+ part_message2
+ ". It must be changed"
)
are_inequations_checkable = False
except:
receiving_variable = None
self.logger.error(
"The character entered is invalid. "
+ part_message1
+ " must be "
+ part_message2
+ ". It must be changed"
)
are_inequations_checkable = False
elif value_type == float:
try:
receiving_variable = float(taken_value)
if receiving_variable <= 0:
self.logger.error(
"The character entered is invalid. "
+ part_message1
+ " must be "
+ part_message2
+ ". It must be changed"
)
are_inequations_checkable = False
except:
receiving_variable = None
self.logger.error(
"The character entered is invalid. "
+ part_message1
+ " must be "
+ part_message2
+ ". It must be changed"
)
are_inequations_checkable = False
return receiving_variable, are_inequations_checkable
|
98195
|
from arekit.common.news.objects_parser import SentenceObjectsParserPipelineItem
from arekit.contrib.source.ruattitudes.text_object import TextObject
class RuAttitudesTextEntitiesParser(SentenceObjectsParserPipelineItem):
def __init__(self):
super(RuAttitudesTextEntitiesParser, self).__init__(
iter_objs_func=self.__iter_subs_values_with_bounds)
@staticmethod
def __iter_subs_values_with_bounds(sentence):
for text_object in sentence.iter_objects():
assert(isinstance(text_object, TextObject))
entity = text_object.to_entity(lambda sent_id: sentence.get_doc_level_text_object_id(sent_id))
yield entity, text_object.Bound
|
98202
|
from .tresnet import tresnet_m, tresnet_l, tresnet_xl, tresnet_m_448, tresnet_l_448, tresnet_xl_448
__all__ = ['tresnet_m', 'tresnet_l', 'tresnet_xl', 'tresnet_m_448', 'tresnet_l_448', 'tresnet_xl_448']
__version__ = '1.0.8'
__authors__ = '<NAME>'
|
98205
|
import traceback
import sys
import depgraph
from common import utils
from common.constants import UNKNOWN_LABEL, VOID, LOC_VAR, FUN_ARG, INT
from common.constants import ENUM_DW_FORM_exprloc, ENUM_ABBREV_CODE, TTYPES
from elements.ttype import Ttype
from elements.givs import Node
class Offset(Node):
total = 0
known = 0
unknown = 0
inf = 0
giv = 0
tp_1p = 0
fp_1p = 0
tn_1p = 0
fn_1p = 0
correct = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __repr__(self):
return 'Offset'
def __str__(self):
return repr(self)
def stat(self):
Offset.total += 1
class GivOffset(Offset):
total = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.binary = kwargs['binary']
self.offset = kwargs['offset']
self.access = kwargs['access']
self.exp = kwargs['exp']
self.name = 'GivOffset'
def __repr__(self):
return '[GivOffset {}]'.format(repr(self.offset))
def __str__(self):
return repr(self)
def stat(self):
super().stat()
GivOffset.total += 1
Offset.giv += 1
class TempOffset(Offset):
total = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.binary = kwargs['binary']
self.base_pointer = kwargs['base_pointer']
self.offset = kwargs['offset']
self.pcs = set()
def __repr__(self):
return '[TempOffset {} {}]'.format(self.base_pointer, self.offset)
def __str__(self):
return repr(self)
def add_pc(self, pc):
self.pcs.add(pc)
def stat(self):
super().stat()
TempOffset.total += 1
Offset.giv += 1
class DirectOffset(Offset):
total = 0
known = 0
unknown = 0
inf = 0
giv = 0
correct = 0
ttype_total = 0
ttype_known = 0
ttype_unknown = 0
ttype_inf = 0
ttype_tp_1p = 0
ttype_fp_1p = 0
ttype_tn_1p = 0
ttype_fn_1p = 0
ttype_correct = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.binary = kwargs['binary']
self.offset = kwargs['offset']
self.access = kwargs['access']
self.name = '@DO'
self.is_name_given = False
self.ttype = Ttype(owner=self)
self.n2p_type = self.binary.config.INF
self.train_name = UNKNOWN_LABEL
self.test_name = UNKNOWN_LABEL
self.var_type = LOC_VAR
def __repr__(self):
return '[DirectOffset {} {}]'.format(format(self.offset, '02x'), repr(self.access))
def __str__(self):
if self.test_name == self.train_name or self.is_name_given:
return '[DirectOffset {} {}]'.format(self.train_name, str(self.ttype))
else:
if self.train_name == UNKNOWN_LABEL:
return '[DirectOffset (WRONGU {} {}) {}]'.format(self.train_name, self.test_name, str(self.ttype))
else:
return '[DirectOffset (WRONGK {} {}) {}]'.format(self.train_name, self.test_name, str(self.ttype))
def train_info(self, die, ttype):
origin = self.binary.debug_info.get_name_origin(die)
name_attr = origin.attributes.get('DW_AT_name', None)
if name_attr is not None:
name = name_attr.value.decode('ascii')
if self.train_name == UNKNOWN_LABEL:
self.ttype.train_info(ttype)
self.train_name = name
else:
if self.ttype.train_name in (UNKNOWN_LABEL, VOID) and ttype != UNKNOWN_LABEL:
self.ttype.train_info(ttype)
self.train_name == name
else:
if self.train_name > name:
self.train_name = name
self.ttype.train_info(ttype)
else:
pass
def stat(self):
super().stat()
DirectOffset.total += 1
if self.is_name_given:
DirectOffset.giv += 1
Offset.giv += 1
else:
DirectOffset.inf += 1
Offset.inf += 1
if self.train_name != UNKNOWN_LABEL:
DirectOffset.known += 1
Offset.known += 1
Offset.tp_1p += 1
else:
DirectOffset.unknown += 1
Offset.unknown += 1
Offset.fp_1p += 1
def debug_info(self):
bs = bytearray()
bs.append(ENUM_ABBREV_CODE['VARIABLE'])
# name
bs.extend(map(ord, self.test_name))
bs.append(0x00)
if self.test_name not in TTYPES \
and self.test_name != UNKNOWN_LABEL \
and self.test_name not in self.binary.sections.symbol_names:
self.binary.predicted.add(self.test_name)
bs.append(self.binary.config.ADDRESS_BYTE_SIZE + 1)
bs.append(ENUM_DW_FORM_exprloc['DW_OP_addr'])
bs += utils.encode_address(self.offset, self.binary)
if self.ttype.test_name is None \
or self.ttype.test_name in (UNKNOWN_LABEL, VOID) \
or self.ttype.test_name not in TTYPES:
bs += utils.encode_kbytes(self.binary.types.get_offset(INT), 4)
else:
bs += utils.encode_kbytes(self.binary.types.get_offset(self.ttype.test_name), 4)
return bs
class StringArrayOffset(DirectOffset):
total = 0
known = 0
unknown = 0
inf = 0
giv = 0
correct = 0
ttype_total = 0
ttype_known = 0
ttype_unknown = 0
ttype_inf = 0
ttype_tp_1p = 0
ttype_fp_1p = 0
ttype_tn_1p = 0
ttype_fn_1p = 0
ttype_correct = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.name = '@SA'
self.strings = kwargs['strings']
self.access = kwargs['access']
def __repr__(self):
return '[StringArray {} ({}) {}]'.format(format(self.offset, '02x'), ', '.join(map(repr, self.strings)), str(self.access))
def __str__(self):
if self.test_name == self.train_name:
return '[StringArray {} {}]'.format(self.train_name, str(self.ttype))
else:
if self.train_name == UNKNOWN_LABEL:
return '[StringArray (WRONGU {} {}) {}]'.format(self.train_name, self.test_name, str(self.ttype))
else:
return '[StringArray (WRONGK {} {}) {}]'.format(self.train_name, self.test_name, str(self.ttype))
def stat(self):
super().stat()
StringArrayOffset.total += 1
if self.is_name_given:
StringArrayOffset.giv += 1
else:
StringArrayOffset.inf += 1
if self.train_name != UNKNOWN_LABEL:
StringArrayOffset.known += 1
else:
StringArrayOffset.unknown += 1
class IndirectOffset(Offset):
total = 0
known = 0
unknown = 0
inf = 0
tp_1p = 0
fp_1p = 0
tn_1p = 0
fn_1p = 0
correct = 0
ttype_total = 0
ttype_known = 0
ttype_unknown = 0
ttype_inf = 0
ttype_tp_1p = 0
ttype_fp_1p = 0
ttype_tn_1p = 0
ttype_fn_1p = 0
ttype_correct = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.function = kwargs['function']
self.binary = self.function.binary
self.base_pointer = kwargs['base_pointer']
self.offset = kwargs['offset']
self.index = kwargs['index']
self.name = '{}:S:{}'.format(self.base_pointer, self.offset)
self.ttype = Ttype(owner=self)
self.n2p_type = self.binary.config.INF
self.train_name = UNKNOWN_LABEL
self.test_name = UNKNOWN_LABEL
self.low_pc = None
self.high_pc = None
self.pcs = set()
self.blks = set()
self.features = set()
if self.binary.config.MACHINE_ARCH == 'x86':
if self.base_pointer == 'EBP' and self.offset >= 0:
self.var_type = FUN_ARG
else:
self.var_type = LOC_VAR
elif self.binary.config.MACHINE_ARCH == 'x64':
if self.base_pointer == 'RBP' and self.offset >= 0:
self.var_type = FUN_ARG
else:
self.var_type = LOC_VAR
elif self.binary.config.MACHINE_ARCH == 'ARM':
self.var_type = LOC_VAR
def __repr__(self):
return '[IndirectOffset {} {}]'.format(self.base_pointer, self.offset)
def __str__(self):
if self.test_name == self.train_name:
return '[IndirectOffset {} {}]'.format(self.train_name, str(self.ttype))
else:
if self.train_name == UNKNOWN_LABEL:
return '[IndirectOffset (WRONGU {} {}) {}]'.format(self.train_name, self.test_name, str(self.ttype))
else:
return '[IndirectOffset (WRONGK {} {}) {}]'.format(self.train_name, self.test_name, str(self.ttype))
def init_features(self):
coarse = depgraph.infos.coarse
fine = depgraph.infos.fine
self.features.add(coarse(self))
self.features.add(fine(self))
self.features.add('blk[{}][{}]'.format(len(self.blks), coarse(self)))
self.features.add('blk[{}][{}]'.format(len(self.blks), fine(self)))
def add_pc(self, pc):
self.pcs.add(pc)
if self.low_pc is not None:
self.low_pc = min(pc, self.low_pc)
else:
self.low_pc = pc
if self.high_pc is not None:
self.high_pc = max(pc, self.high_pc)
else:
self.high_pc = pc
def train_info(self, die, ttype):
origin = self.binary.debug_info.get_name_origin(die)
name_attr = origin.attributes.get('DW_AT_name', None)
if name_attr is not None:
name = name_attr.value.decode('ascii')
if self.train_name == UNKNOWN_LABEL:
self.ttype.train_info(ttype)
self.train_name = name
else:
if self.ttype.train_name in (UNKNOWN_LABEL, VOID) and ttype != UNKNOWN_LABEL:
self.ttype.train_info(ttype)
self.train_name == name
else:
if self.train_name > name:
self.train_name = name
self.ttype.train_info(ttype)
else:
pass
def stat(self):
super().stat()
IndirectOffset.total += 1
if self.train_name != UNKNOWN_LABEL:
IndirectOffset.known += 1
Offset.known += 1
else:
IndirectOffset.unknown += 1
Offset.unknown += 1
if self.n2p_type == self.binary.config.INF:
IndirectOffset.inf += 1
Offset.inf += 1
if self.train_name == UNKNOWN_LABEL:
IndirectOffset.fp_1p += 1
Offset.fp_1p += 1
else:
IndirectOffset.tp_1p += 1
Offset.tp_1p += 1
elif self.n2p_type == self.binary.config.GIV:
if self.train_name == UNKNOWN_LABEL:
IndirectOffset.tn_1p += 1
Offset.tn_1p += 1
else:
IndirectOffset.fn_1p += 1
Offset.fn_1p += 1
def debug_info(self):
bs = bytearray()
if self.var_type == FUN_ARG:
bs.append(ENUM_ABBREV_CODE['FUN_ARG'])
elif self.var_type == LOC_VAR:
bs.append(ENUM_ABBREV_CODE['VARIABLE'])
# name
bs.extend(map(ord, self.test_name))
bs.append(0x00)
if self.test_name not in TTYPES and self.test_name != UNKNOWN_LABEL:
self.binary.predicted.add(self.test_name)
loc_expr = bytearray()
loc_expr.append(self.binary.config.REG_MAPPING[self.base_pointer] +
ENUM_DW_FORM_exprloc['DW_OP_breg0'])
loc_expr += utils.encode_sleb128(self.offset)
bs += utils.encode_uleb128(len(loc_expr))
bs += loc_expr
if self.ttype.test_name is None \
or self.ttype.test_name in (UNKNOWN_LABEL, VOID) \
or self.ttype.test_name not in TTYPES:
bs += utils.encode_kbytes(self.binary.types.get_offset(INT), 4)
else:
bs += utils.encode_kbytes(self.binary.types.get_offset(self.ttype.test_name), 4)
return bs
|
98232
|
import cv2
import numpy as np
from paz.backend.image.draw import put_text, draw_rectangle
from paz.backend.image.draw import GREEN
def draw_box(image, coordinates, class_name, score,
color=GREEN, scale=0.7, weighted=False):
x_min, y_min, x_max, y_max = coordinates
if weighted:
color = [int(channel * score) for channel in color]
text = '{:0.2f}, {}'.format(score, class_name)
put_text(image, text, (x_min, y_min - 10), scale, color, 1)
draw_rectangle(image, (x_min, y_min), (x_max, y_max), color, 2)
return image
def draw_square(image, center_x, center_y, size, color):
x_min, y_min = center_x - size, center_y - size
x_max, y_max = center_x + size, center_y + size
cv2.rectangle(image, (x_min, y_min), (x_max, y_max), color, -1)
return image
def draw_circle(image, center_x, center_y, size, color):
cv2.circle(image, (center_x, center_y), size, color, -1)
return image
def draw_triangle(image, center_x, center_y, size, color):
vertex_A = (center_x, center_y - size)
vertex_B = (center_x - size, center_y + size)
vertex_C = (center_x + size, center_y + size)
points = np.array([[vertex_A, vertex_B, vertex_C]], dtype=np.int32)
cv2.fillPoly(image, points, color)
return image
def resize_image_with_nearest_neighbors(image, size):
"""Resize image using nearest neighbors interpolation.
# Arguments
image: Numpy array.
size: List of two ints.
# Returns
Numpy array.
"""
if(type(image) != np.ndarray):
raise ValueError(
'Recieved Image is not of type numpy array', type(image))
else:
return cv2.resize(image, size, interpolation=cv2.INTER_NEAREST)
|
98322
|
import unittest
import os.path
import numpy as np
import numpy.lib.recfunctions as rfn
from geodepy.convert import (hp2dec, dec2hp, rect2polar, polar2rect,
grid2geo, llh2xyz, DMSAngle)
from geodepy.geodesy import vincinv, vincdir, vincinv_utm, vincdir_utm, enu2xyz, xyz2enu
class TestGeodesy(unittest.TestCase):
def test_enu2xyz(self):
MOBS_MGA2020 = (55, 321820.085, 5811181.510, 40.570)
MOBS_MGA1994 = (55, 321819.594, 5811180.038, 40.659)
# Convert UTM Projection Coordinates to Geographic Coordinates
MOBS_GDA2020 = grid2geo(MOBS_MGA2020[0], MOBS_MGA2020[1], MOBS_MGA2020[2])
MOBS_GDA1994 = grid2geo(MOBS_MGA1994[0], MOBS_MGA1994[1], MOBS_MGA1994[2])
# Convert Geographic Coordinates to Cartesian XYZ Coordinates
MOBS_GDA2020_XYZ = llh2xyz(MOBS_GDA2020[0], MOBS_GDA2020[1], MOBS_MGA2020[3])
MOBS_GDA1994_XYZ = llh2xyz(MOBS_GDA1994[0], MOBS_GDA1994[1], MOBS_MGA1994[3])
# Generate Vector Between UTM Projection Coordinates
mga_vector = [MOBS_MGA2020[1] - MOBS_MGA1994[1],
MOBS_MGA2020[2] - MOBS_MGA1994[2],
MOBS_MGA2020[3] - MOBS_MGA1994[3]]
# Generate Vector Between Cartesian XYZ Coordinates
xyz_vector = (MOBS_GDA2020_XYZ[0] - MOBS_GDA1994_XYZ[0],
MOBS_GDA2020_XYZ[1] - MOBS_GDA1994_XYZ[1],
MOBS_GDA2020_XYZ[2] - MOBS_GDA1994_XYZ[2])
# Rotate UTM Projection Vector by Grid Convergence
grid_dist, grid_brg = rect2polar(mga_vector[0], mga_vector[1])
local_east, local_north = polar2rect(grid_dist, grid_brg - MOBS_GDA2020[3])
local_vector = (local_east, local_north, mga_vector[2])
# Calculate XYZ Vector using Local Vector Components
x, y, z = enu2xyz(MOBS_GDA2020[0], MOBS_GDA2020[1], *local_vector)
self.assertAlmostEqual(x, xyz_vector[0], 4)
self.assertAlmostEqual(y, xyz_vector[1], 4)
self.assertAlmostEqual(z, xyz_vector[2], 4)
# Calculate Local Vector using XYZ Vector Components
e, n, u = xyz2enu(MOBS_GDA2020[0], MOBS_GDA2020[1], *xyz_vector)
self.assertAlmostEqual(e, local_vector[0], 4)
self.assertAlmostEqual(n, local_vector[1], 4)
self.assertAlmostEqual(u, local_vector[2], 4)
def test_vincinv(self):
# Flinders Peak
lat1 = hp2dec(-37.57037203)
lon1 = hp2dec(144.25295244)
lat1_DMS = DMSAngle(-37, 57, 3.7203)
lon1_DMS = DMSAngle(144, 25, 29.5244)
# Buninyong
lat2 = hp2dec(-37.39101561)
lon2 = hp2dec(143.55353839)
lat2_DMS = DMSAngle(-37, 39, 10.1561)
lon2_DMS = DMSAngle(143, 55, 35.3839)
# Test Decimal Degrees Input
ell_dist, azimuth1to2, azimuth2to1 = vincinv(lat1, lon1, lat2, lon2)
self.assertEqual(round(ell_dist, 3), 54972.271)
self.assertEqual(round(dec2hp(azimuth1to2), 6), 306.520537)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 127.102507)
# additional test case:
pl1 = (-29.85, 140.71666666666667)
pl2 = (-29.85, 140.76666666666667)
ell_dist, azimuth1to2, azimuth2to1 = vincinv(pl1[0], pl1[1], pl2[0], pl2[1])
self.assertEqual(round(ell_dist, 3), 4831.553)
self.assertEqual(round(dec2hp(azimuth1to2), 6), 90.004480)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 269.591520)
test2 = vincinv(lat1, lon1, lat1, lon1)
self.assertEqual(test2, (0, 0, 0))
# Test DMSAngle Input
ell_dist, azimuth1to2, azimuth2to1 = vincinv(lat1_DMS, lon1_DMS,
lat2_DMS, lon2_DMS)
self.assertEqual(round(ell_dist, 3), 54972.271)
self.assertEqual(round(dec2hp(azimuth1to2), 6), 306.520537)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 127.102507)
test2 = vincinv(lat1_DMS, lon1_DMS, lat1_DMS, lon1_DMS)
self.assertEqual(test2, (0, 0, 0))
# Test DDMAngle Input
(ell_dist,
azimuth1to2,
azimuth2to1) = vincinv(lat1_DMS.ddm(), lon1_DMS.ddm(),
lat2_DMS.ddm(), lon2_DMS.ddm())
self.assertEqual(round(ell_dist, 3), 54972.271)
self.assertEqual(round(dec2hp(azimuth1to2), 6), 306.520537)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 127.102507)
test2 = vincinv(lat1_DMS.ddm(), lon1_DMS.ddm(),
lat1_DMS.ddm(), lon1_DMS.ddm())
self.assertEqual(test2, (0, 0, 0))
def test_vincdir(self):
# Flinders Peak
lat1 = hp2dec(-37.57037203)
lon1 = hp2dec(144.25295244)
lat1_DMS = DMSAngle(-37, 57, 3.7203)
lon1_DMS = DMSAngle(144, 25, 29.5244)
# To Buninyong
azimuth1to2 = hp2dec(306.520537)
azimuth1to2_DMS = DMSAngle(306, 52, 5.37)
ell_dist = 54972.271
# Test Decimal Degrees Input
lat2, lon2, azimuth2to1 = vincdir(lat1, lon1, azimuth1to2, ell_dist)
self.assertEqual(round(dec2hp(lat2), 8), -37.39101561)
self.assertEqual(round(dec2hp(lon2), 8), 143.55353839)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 127.102507)
# Test DMSAngle Input
lat2, long2, azimuth2to1 = vincdir(lat1_DMS, lon1_DMS,
azimuth1to2_DMS, ell_dist)
self.assertEqual(round(dec2hp(lat2), 8), -37.39101561)
self.assertEqual(round(dec2hp(long2), 8), 143.55353839)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 127.102507)
# Test DDMAngle Input
lat2, long2, azimuth2to1 = vincdir(lat1_DMS.ddm(), lon1_DMS.ddm(),
azimuth1to2_DMS.ddm(), ell_dist)
self.assertEqual(round(dec2hp(lat2), 8), -37.39101561)
self.assertEqual(round(dec2hp(long2), 8), 143.55353839)
self.assertEqual(round(dec2hp(azimuth2to1), 6), 127.102507)
def test_vincinv_utm(self):
# Flinders Peak (UTM 55)
zone1 = 55
east1 = 273741.2966
north1 = 5796489.7769
# Buninyong (UTM 55)
zone2 = 55
east2 = 228854.0513
north2 = 5828259.0384
# Buninyong (UTM 54)
zone3 = 54
east3 = 758173.7973
north3 = 5828674.3402
# Test Coordinates in Zone 55 only
grid_dist, grid1to2, grid2to1, lsf = vincinv_utm(zone1, east1, north1,
zone2, east2, north2)
self.assertAlmostEqual(lsf, 1.00036397, 8)
self.assertAlmostEqual(grid_dist, 54992.279, 3)
self.assertAlmostEqual(dec2hp(grid1to2), 305.17017259, 7)
self.assertAlmostEqual(dec2hp(grid2to1), 125.17418518, 7)
# Test Coordinates in Different Zones (55 and 54)
# (Point 2 Grid Bearing Different (Zone 54 Grid Bearing))
grid_dist, grid1to2, grid2to1, lsf = vincinv_utm(zone1, east1, north1,
zone3, east3, north3)
self.assertAlmostEqual(lsf, 1.00036397, 8)
self.assertAlmostEqual(grid_dist, 54992.279, 3)
self.assertAlmostEqual(dec2hp(grid1to2), 305.17017259, 7)
self.assertAlmostEqual(dec2hp(grid2to1), 128.57444307, 7)
def test_vincdir_utm(self):
# Flinders Peak (UTM 55)
zone1 = 55
east1 = 273741.2966
north1 = 5796489.7769
# Grid Dimensions to Point 2 (Buninyong)
grid_dist = 54992.279
grid1to2 = hp2dec(305.17017259)
grid1to2_DMS = DMSAngle(305, 17, 1.7259)
# Test Decimal Degrees Input
(zone2, east2, north2,
grid2to1, lsf) = vincdir_utm(zone1, east1, north1,
grid1to2, grid_dist)
self.assertEqual(zone2, zone1)
self.assertAlmostEqual(east2, 228854.0513, 3)
self.assertAlmostEqual(north2, 5828259.0384, 3)
self.assertAlmostEqual(dec2hp(grid2to1), 125.17418518, 7)
self.assertAlmostEqual(lsf, 1.00036397, 8)
# Test DMSAngle Input
(zone2, east2, north2,
grid2to1, lsf) = vincdir_utm(zone1, east1, north1,
grid1to2_DMS, grid_dist)
self.assertEqual(zone2, zone1)
self.assertAlmostEqual(east2, 228854.0513, 3)
self.assertAlmostEqual(north2, 5828259.0384, 3)
self.assertAlmostEqual(dec2hp(grid2to1), 125.17418518, 7)
self.assertAlmostEqual(lsf, 1.00036397, 8)
def test_equality_vincentys(self):
# Test multiple point-to-point vincinv calculations
abs_path = os.path.abspath(os.path.dirname(__file__))
test_geo_coords =\
np.genfromtxt(os.path.join(abs_path,
'resources/Test_Conversion_Geo.csv'),
delimiter=',',
dtype='S4,f8,f8',
names=['site', 'lat1', 'long1'],
usecols=('lat1', 'long1'))
test_geo_coord2 = \
np.genfromtxt(os.path.join(abs_path,
'resources/Test_Conversion_Geo.csv'),
delimiter=',',
dtype='S4,f8,f8',
names=['site', 'lat2', 'long2'],
usecols=('lat2', 'long2'))
# Form array with point pairs from test file
test_pairs = rfn.merge_arrays([test_geo_coords, np.roll(test_geo_coord2, 1)], flatten=True)
# Calculate Vincenty's Inverse Result using Lat, Long Pairs
vincinv_result = np.array(list(vincinv(*x) for x in test_pairs[['lat1', 'long1', 'lat2', 'long2']]))
# Calculate Vincenty's Direct Result using Results from Inverse Function
vincdir_input = rfn.merge_arrays([test_geo_coords, vincinv_result[:, 1], vincinv_result[:, 0]], flatten=True)
vincdir_input.dtype.names = ['lat1', 'long1', 'az1to2', 'ell_dist']
vincdir_result = np.array(list(vincdir(*x) for x in vincdir_input[['lat1', 'long1', 'az1to2', 'ell_dist']]))
np.testing.assert_almost_equal(test_pairs['lat2'],
vincdir_result[:, 0], decimal=8)
np.testing.assert_almost_equal(test_pairs['long2'],
vincdir_result[:, 1], decimal=8)
np.testing.assert_almost_equal(vincinv_result[:, 2],
vincdir_result[:, 2])
def test_vincinv_edgecases(self):
lat1 = -32.153892
lon1 = -15.394827
lat2 = -31.587369
lon2 = -13.487739
gdist, az12, az21 = vincinv(lat1, lon1, lat2, lon2)
lon1 = lon1 + 14
lon2 = lon2 + 14
gdist_2, az12_2, az21_2 = vincinv(lat1, lon1, lat2, lon2)
self.assertEqual(gdist, gdist_2)
self.assertEqual(az12, az12_2)
self.assertEqual(az21, az21_2)
if __name__ == '__main__':
unittest.main()
|
98348
|
import unittest
import warnings
from gmplot.utility import StringIO, _format_LatLng
from gmplot.writer import _Writer
from gmplot.drawables.route import _Route
from gmplot.google_map_plotter import GoogleMapPlotter
class GMPlotTest(unittest.TestCase):
def test_format_LatLng(self):
self.assertEqual(_format_LatLng(45.123456, -80.987654, 6), 'new google.maps.LatLng(45.123456, -80.987654)')
self.assertEqual(_format_LatLng(45.123456, -80.987654, 4), 'new google.maps.LatLng(45.1235, -80.9877)')
self.assertEqual(_format_LatLng(45.1, -80.9, 3), 'new google.maps.LatLng(45.100, -80.900)')
# Note: This test only ensures that Route's functions can be called without failing,
# it doesn't test if the resulting output can actually be rendered properly in a browser.
class RouteTest(unittest.TestCase):
def test_write(self):
route = _Route((37.770776,-122.461689), (37.780776,-122.461689), 6)
with StringIO() as f:
with _Writer(f) as writer:
route.write(writer)
def test_write_waypoints(self):
route = _Route((37.770776,-122.461689), (37.780776,-122.461689), 6, waypoints=[(37.431257,-122.133121)])
with StringIO() as f:
with _Writer(f) as writer:
route.write(writer)
# Note: This test only ensures that GoogleMapPlotter's functions can be called without failing,
# it doesn't test if the resulting map can actually be rendered properly in a browser.
class GoogleMapPlotterTest(unittest.TestCase):
PATH_1 = [(37.429, 37.428, 37.427, 37.427, 37.427),
(-122.145, -122.145, -122.145, -122.146, -122.146)]
PATH_2 = [[i+.01 for i in PATH_1[0]], [i+.02 for i in PATH_1[1]]]
PATH_3 = [(37.433302, 37.431257, 37.427644, 37.430303), (-122.14488, -122.133121, -122.137799, -122.148743)]
PATH_4 = [(37.423074, 37.422700, 37.422410, 37.422188, 37.422274, 37.422495, 37.422962, 37.423552, 37.424387, 37.425920, 37.425937),
(-122.150288, -122.149794, -122.148936, -122.148142, -122.146747, -122.14561, -122.144773, -122.143936, -122.142992, -122.147863, -122.145953)]
def test_get(self):
bounds = {'north':37.832285, 'south': 37.637336, 'west': -122.520364, 'east': -122.346922}
map = GoogleMapPlotter(37.428, -122.145, 16, fit_bounds=bounds)
# Test marker:
map.marker(37.427, -122.145, color="yellow")
map.marker(37.428, -122.146, color="cornflowerblue")
map.marker(37.429, -122.144, color="k", title='Here')
map.marker(37.430, -122.142, color="red", label='A')
# Test circle:
map.circle(37.429, -122.145, 100, color="#FF0000", ew=2)
# Test plot:
map.plot(self.PATH_1[0], self.PATH_1[1], color="plum", edge_width=10)
map.plot(self.PATH_2[0], self.PATH_2[1], color="red")
# Test directions:
map.directions((37.770776,-122.461689), (37.780776,-122.461689), waypoints=[(37.431257,-122.133121)])
# Test polygon:
map.polygon(self.PATH_3[0], self.PATH_3[1], edge_color="cyan", edge_width=5, face_color="blue", face_alpha=0.1)
# Test heatmap:
map.heatmap(self.PATH_4[0], self.PATH_4[1], radius=40, weights=[1, 1, 1, 0.5, 0.5, 0.5, 1, 1, 1, 2, 2])
map.heatmap(self.PATH_3[0], self.PATH_3[1], radius=40, dissipating=False, gradient=[(30,30,30,0), (30,30,30,1), (50, 50, 50, 1)])
# Test scatter:
map.scatter(self.PATH_3[0], self.PATH_3[1], c='r', marker=[True, False, False, True])
map.scatter(self.PATH_4[0], self.PATH_4[1], size=[1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2], symbol='x')
map.scatter(self.PATH_4[0], self.PATH_4[1], s=90, marker=False, alpha=0.9, symbol='+', c='red', edge_width=4)
map.scatter(self.PATH_3[0], self.PATH_3[1],
color=['r','g','b','k'],
precision=[1,2,3,4],
marker=[True, True, False, True],
title=['First', 'Second', 'Third', 'Fourth'],
label=['A','B','C','D'],
size=[10,20,30,40],
symbol=['+','o','x','x']
)
# Test ground overlay:
bounds_dict = {'north':37.832285, 'south': 37.637336, 'west': -122.520364, 'east': -122.346922}
map.ground_overlay('http://explore.museumca.org/creeks/images/TopoSFCreeks.jpg', bounds_dict, opacity=0.5)
map.get()
def test_scatter_length_mismatch(self):
map = GoogleMapPlotter(37.428, -122.145, 16)
with self.assertRaises(ValueError):
map.scatter(self.PATH_3[0], self.PATH_3[1],
color=['r','g','b'],
precision=[1,2],
marker=[True],
title=['First', 'Second'],
label=['A','B','C','D','E'],
size=[10,20],
symbol=['+','o','x','x','o']
)
def test_invalid_symbol(self):
map = GoogleMapPlotter(37.428, -122.145, 16)
with self.assertRaises(KeyError):
map.scatter(self.PATH_4[0], self.PATH_4[1], s=90, marker=False, alpha=0.9, symbol='z', c='red', edge_width=4)
map.get()
def test_grid(self):
map = GoogleMapPlotter(37.428, -122.145, 16)
bounds = {'north': 37.43, 'south': 37.42, 'east': -122.14, 'west': -122.15}
map.grid(bounds, 0.001, 0.001)
map.get()
def test_map_styles(self):
map_styles = [
{
'featureType': 'all',
'stylers': [
{'saturation': -80},
{'lightness': 60},
]
}
]
map = GoogleMapPlotter(37.428, -122.145, 16, map_type='satellite', map_styles=map_styles, tilt=0, scale_control=True)
map.get()
def test_unsupported_marker_color(self):
map = GoogleMapPlotter(37.428, -122.145, 16)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
map.marker(37.428, -122.146, color="#123456") # (valid but unsupported color)
self.assertEqual(len(w), 1, "'get()' should raise a single warning")
self.assertTrue(issubclass(w[-1].category, UserWarning), "'get()' should raise a 'UserWarning'")
map.get()
|
98368
|
import json
from datetime import datetime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import financespy.account as account
from financespy.memory_backend import MemoryBackend
from financespy.sql_backend import SQLBackend
from financespy.sql_backend import db_object
from financespy.sql_backend import read_account_metadata
from financespy.sql_backend import transaction_class, account_class
from financespy.transaction import parse_transaction
from tests.test_utils import get_categories_as_list
records_ = """2019-09-04;20.0, withdrawal
2019-09-05;20.58, rewe
2019-09-06;49.28, aldi
2019-09-08;17.05, müller
2019-09-08;97.2, monthly_ticket
2019-09-11;50.0, withdrawal
2019-09-13;50.0, lidl
2019-09-19;40.0, h_&_m
2019-09-20;55.58, lidl
2019-09-21;50.0, withdrawal
2019-09-21;25.0, train_ticket"""
base = declarative_base()
db = db_object(base)
Transaction = transaction_class(db)
Account = account_class(db)
categories = get_categories_as_list()
def parse_date(dt):
return datetime.strptime(dt, "%Y-%m-%d").date()
def records(cats):
recs = (tuple(line.split(";")) for line in records_.split("\n"))
return [
(parse_date(date), parse_transaction(trans, cats))
for date, trans in recs
]
def open_sql_account():
engine = create_engine('sqlite:///:memory:', echo=True)
base.metadata.create_all(engine)
session_factory = sessionmaker(bind=engine)
session = session_factory()
test_account = Account(
name="savings",
currency="eur",
categories=json.dumps(categories),
user_id=1
)
session.add(test_account)
session.commit()
account_data = read_account_metadata(session, 1, Account)
backend = SQLBackend(
session=session,
account_id=1,
transaction_class=Transaction
)
return account.Account(backend, account_data)
def total_iterator(iterator):
weeks = [
sum(t.value for t in element.records()) for element in iterator
]
return weeks
def test_month_iterator():
account = open_sql_account()
backend = account.backend
cats = backend.categories
memory_backend = MemoryBackend(cats)
for date, rec in records(cats):
backend.insert_record(date, rec)
memory_backend.insert_record(date, rec)
weeks1 = backend.month("sep", 2019).weeks()
weeks2 = memory_backend.month("sep", 2019).weeks()
assert total_iterator(weeks1) == total_iterator(weeks2)
month1 = backend.month("sep", 2019).days()
month2 = memory_backend.month("sep", 2019).days()
assert total_iterator(month1) == total_iterator(month2)
|
98371
|
base_rf.fit(X_train, y_train)
under_rf.fit(X_train, y_train)
over_rf.fit(X_train, y_train)
plot_roc_and_precision_recall_curves([
("original", base_rf),
("undersampling", under_rf),
("oversampling", over_rf),
])
|
98397
|
import numpy as np
from mlflow import log_metric, log_param, set_experiment, active_run, log_metrics
from yaaf import mkdir, flatten_dict
class MLFlowLogger:
def __init__(self, params, metrics=None, experiment_name=None):
if experiment_name is not None: set_experiment(f"{experiment_name}")
self._step = 0
self._episode = 0
for param in params:
log_param(param, params[param])
self._metrics = metrics or []
run = active_run()
self._runid = run.info.run_id
self._session = run.info.experiment_id
self.directory = f"mlruns/{self._session}/{self._runid}"
def __call__(self, timestep):
self._step += 1
self._episode += 1 if timestep.is_terminal else 0
log_metric("timestep", self._step, step=self._step)
log_metric("episode", self._episode, step=self._step)
[log_metric(metric.name, metric(timestep), step=self._step) for metric in self._metrics]
info = flatten_dict(timestep.info, separator=" ")
log_metrics(info, step=self._step)
def reset(self):
self._step = 0
self._episode = 0
[metric.reset() for metric in self._metrics]
def save_numpy(self):
mkdir(f"mlruns/{self._session}/{self._runid}/numpy")
[np.save(f"mlruns/{self._session}/{self._runid}/numpy/{metric.name.lower().replace(' ', '_')}", metric.result()) for metric in self._metrics]
|
98409
|
import unittest
import os
import logging
from flask_testing import LiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.support.ui as ui
from webapp.routes import create_app
class TestBase(LiveServerTestCase):
def create_app(self):
logging.basicConfig(level=logging.ERROR)
try:
workdir = os.path.dirname(os.path.realpath(__file__))
os.remove(os.path.join(workdir, '..', 'providers', 'mock', 'scoop-installed-apps.txt'))
except OSError:
pass
config_name = 'testing'
app = create_app(config_name)
app.config.update(
# Change the port that the liveserver listens on
LIVESERVER_PORT=8943
)
return app
def setUp(self):
"""Setup the test driver and create test users"""
self.driver = webdriver.Firefox()
#self.driver = webdriver.Firefox(executable_path = './tests/geckodriver.exe')
self.driver.get(self.get_server_url())
def tearDown(self):
self.driver.quit()
class SimpleTests(TestBase):
def test_search(self):
driver = self.driver
wait = ui.WebDriverWait(driver,10)
self.assertIn("Scoop", driver.title)
wait.until(lambda driver: "Loading" not in driver.page_source)
elem = driver.find_element_by_css_selector("#navbarSupportedContent > div > input")
elem.clear()
elem.send_keys("<PASSWORD>-app-02")
elem.send_keys(Keys.RETURN)
wait.until(lambda driver: "Loading" not in driver.page_source)
self.assertTrue("example-app-01" not in driver.page_source)
self.assertTrue("example-app-02" in driver.page_source)
def test_install(self):
driver = self.driver
wait = ui.WebDriverWait(driver,10)
wait.until(lambda driver: "Loading" not in driver.page_source)
elem = driver.find_element_by_css_selector("#app > div > div > div > main > div > div > div:nth-child(1) > div > div > div:nth-child(1) > div > div.col-sm-3.text-right > button")
elem.click()
wait.until(lambda driver: "Installed" in driver.page_source)
self.assertTrue("Installed" in driver.page_source)
if __name__ == '__main__':
unittest.main()
|
98428
|
import json
import boto3
import tempfile
import zipfile
import uuid
import time
sc = boto3.client("servicecatalog")
code_pipeline = boto3.client('codepipeline')
s3 = boto3.client("s3")
sts = boto3.client("sts")
ssm = boto3.client("ssm")
def get_file(artifact, f_name):
bucket = artifact["location"]["s3Location"]["bucketName"]
key = artifact["location"]["s3Location"]["objectKey"]
print(f"{bucket}/{key}")
with tempfile.NamedTemporaryFile() as tmp_file:
s3.download_file(bucket, key, tmp_file.name)
with zipfile.ZipFile(tmp_file.name, 'r') as z:
return json.loads(z.read(f_name))
def get_role_arn():
return "/".join(sts.get_caller_identity()["Arn"].replace("assumed-role", "role").replace("sts", "iam").split("/")[0:-1])
def associated_role(portfolio_id):
role_arn = get_role_arn()
print(f"associating the lambda execution role {role_arn} with the portfolio {portfolio_id}")
r = sc.associate_principal_with_portfolio(
PortfolioId=portfolio_id,
PrincipalARN=role_arn,
PrincipalType='IAM'
)
print(r)
def provision_product(product_id, product_name, provisioning_artifact_id, provisioning_parameters):
print(f"launching the product {product_id}")
r = sc.provision_product(
ProductId=product_id,
ProvisioningArtifactId=provisioning_artifact_id,
ProvisionedProductName=product_name.replace(" ", "_").lower() + "-" + str(uuid.uuid4()).split("-")[0],
ProvisioningParameters=provisioning_parameters
)
print(r)
print(f"ProvisionedProductId: {r['RecordDetail']['ProvisionedProductId']}")
ssm.put_parameter(
Name=f"/ds-product-catalog/{product_id}/provisioned-product-id",
Description=f"Provisioned product id for product_id: {product_id}",
Value=r['RecordDetail']['ProvisionedProductId'],
Type="String",
Overwrite=True
)
def lambda_handler(event, context):
try:
job_id = event['CodePipeline.job']['id']
job_data = event['CodePipeline.job']['data']
user_param = json.loads(job_data["actionConfiguration"]["configuration"]["UserParameters"])
data = get_file(job_data["inputArtifacts"][0], user_param.get("FileName"))
print(user_param)
if user_param.get("Operation") == "associate-role":
associated_role(data["PortfolioId"])
else:
provision_product(
data["ProductId"],
data["ProductName"],
data["ProvisioningArtifactIds"],
user_param["ProvisioningParameters"]
)
code_pipeline.put_job_success_result(jobId=job_id)
except Exception as e:
print(f"exception: {str(e)}")
code_pipeline.put_job_failure_result(jobId=job_id, failureDetails={'message': str(e), 'type': 'JobFailed'})
|
98430
|
import pytest
from django.core.cache import cache
from rest_framework.test import APIClient
from environments.identities.models import Identity
from environments.identities.traits.models import Trait
from environments.models import Environment
from features.feature_types import MULTIVARIATE
from features.models import Feature
from features.multivariate.models import MultivariateFeatureOption
from features.value_types import STRING
from organisations.models import Organisation, OrganisationRole
from projects.models import Project
from segments.models import EQUAL, Condition, Segment, SegmentRule
from users.models import FFAdminUser
trait_key = "key1"
trait_value = "value1"
@pytest.fixture()
def admin_client(admin_user):
client = APIClient()
client.force_authenticate(user=admin_user)
return client
@pytest.fixture()
def organisation(db, admin_user):
org = Organisation.objects.create(name="Test Org")
admin_user.add_organisation(org, role=OrganisationRole.ADMIN)
return org
@pytest.fixture()
def project(organisation):
return Project.objects.create(name="Test Project", organisation=organisation)
@pytest.fixture()
def environment(project):
return Environment.objects.create(name="Test Environment", project=project)
@pytest.fixture()
def identity(environment):
return Identity.objects.create(identifier="test_identity", environment=environment)
@pytest.fixture()
def trait(identity):
return Trait.objects.create(
identity=identity, trait_key=trait_key, string_value=trait_value
)
@pytest.fixture()
def multivariate_feature(project):
feature = Feature.objects.create(
name="feature", project=project, type=MULTIVARIATE, initial_value="control"
)
for percentage_allocation in (30, 30, 40):
string_value = f"multivariate option for {percentage_allocation}% of users."
MultivariateFeatureOption.objects.create(
feature=feature,
default_percentage_allocation=percentage_allocation,
type=STRING,
string_value=string_value,
)
return feature
@pytest.fixture()
def identity_matching_segment(project, trait):
segment = Segment.objects.create(name="Matching segment", project=project)
matching_rule = SegmentRule.objects.create(
segment=segment, type=SegmentRule.ALL_RULE
)
Condition.objects.create(
rule=matching_rule,
property=trait.trait_key,
operator=EQUAL,
value=trait.trait_value,
)
return segment
@pytest.fixture()
def api_client():
return APIClient()
@pytest.fixture()
def feature(project, environment):
return Feature.objects.create(name="Test Feature1", project=project)
@pytest.fixture()
def user_password():
return FFAdminUser.objects.make_random_password()
@pytest.fixture()
def reset_cache():
# https://groups.google.com/g/django-developers/c/zlaPsP13dUY
# TL;DR: Use this if your test interacts with cache since django
# does not clear cache after every test
cache.clear()
yield
cache.clear()
|
98451
|
from django.http import HttpResponse
from django.middleware.csrf import get_token
from django.template import Context, RequestContext, Template
from django.template.context_processors import csrf
from django.views.decorators.csrf import ensure_csrf_cookie
def post_form_view(request):
"""Return a POST form (without a token)."""
return HttpResponse(content="""
<html><body><h1>\u00a1Unicode!<form method="post"><input type="text"></form></body></html>
""", mimetype='text/html')
@ensure_csrf_cookie
def ensure_csrf_cookie_view(request):
# Doesn't insert a token or anything.
return HttpResponse()
def token_view(request):
context = RequestContext(request, processors=[csrf])
template = Template('{% csrf_token %}')
return HttpResponse(template.render(context))
def non_token_view_using_request_processor(request):
"""Use the csrf view processor instead of the token."""
context = RequestContext(request, processors=[csrf])
template = Template('')
return HttpResponse(template.render(context))
def csrf_token_error_handler(request, **kwargs):
"""This error handler accesses the CSRF token."""
template = Template(get_token(request))
return HttpResponse(template.render(Context()), status=599)
|
98463
|
import json
from os.path import join
from tarfile import TarFile
from enot.action import action_factory
from enot.action.release import Release
from enot.compiler.compiler_type import Compiler
from enot.packages.config.config import ConfigFile, get_dep_info_from_hex
from enot.packages.dep import Dep
from enot.utils.file_utils import read_file
def parse_deps(deps: list) -> dict:
found = {}
for dep in deps:
name = dep['name']
if 'url' not in dep:
found[name] = get_dep_info_from_hex(name, dep['tag'])
else:
found[name] = Dep(dep['url'], dep.get('branch', None), tag=dep.get('tag', None))
return found
class EnotConfig(ConfigFile):
def __init__(self, config: dict, url=None, name=None):
super().__init__()
self._name = config.get('name', name)
self._drop_unknown = config.get('drop_unknown_deps', True)
self._with_source = config.get('with_source', True)
self.__parse_build_vars(config)
self._deps = parse_deps(config.get('deps', {}))
self._test_deps = parse_deps(config.get('test_deps', {}))
self._conf_vsn = config.get('app_vsn', None)
self._git_tag = config.get('tag', None)
self._git_branch = config.get('branch', None)
self._link_all = config.get('link_all', self.link_all)
self._rescan_deps = config.get('rescan_deps', self.rescan_deps)
self._url = config.get('url', url)
self._erlang_versions = config.get('erlang', [])
self._auto_build_order = config.get('auto_build_order', True)
self._override_conf = config.get('override', False)
self._disable_prebuild = config.get('disable_prebuild', False)
self._fullname = config.get('fullname', None)
self._compare_versions = config.get('compare_versions', True)
self._prebuild = EnotConfig.parse_steps(config.get('prebuild', []))
self._install = EnotConfig.parse_steps(config.get('install', []))
self._is_release = False
for action in self.install:
if isinstance(action, Release):
self._is_release = True
self._uninstall = EnotConfig.parse_steps(config.get('uninstall', []))
@property
def is_release(self) -> bool:
return self._is_release
@classmethod
def from_path(cls, path: str, url=None) -> 'EnotConfig':
content = read_file(join(path, 'enot_config.json'))
name = path.split('/')[-1:] # use project dir name as a name if not set in config
return cls(json.loads(content), url=url, name=name)
@classmethod
def from_package(cls, package: TarFile, url: str, config: ConfigFile) -> 'EnotConfig':
f = package.extractfile('enot_config.json')
content = f.read()
conf = cls(json.loads(content.decode('utf-8')), url=url)
if config is not None:
if config.fullname: # overwrite fullname by package's fullname (from dep.config).
conf.fullname = config.fullname
return conf
def need_enotify(self):
return False
def get_compiler(self):
return Compiler.ENOT
def __parse_build_vars(self, parsed):
self._build_vars = parsed.get('build_vars', [])
self._c_build_vars = parsed.get('c_build_vars', [])
@staticmethod
def parse_steps(steps: list) -> list:
actions = []
for step in steps:
[(action_type, params)] = step.items()
actions.append(action_factory.get_action(action_type, params))
return actions
|
98473
|
import json
import pytest
from custom_components.hacs.validate.brands import Validator
from tests.sample_data import response_rate_limit_header
@pytest.mark.asyncio
async def test_added_to_brands(repository, aresponses):
aresponses.add(
"brands.home-assistant.io",
"/domains.json",
"get",
aresponses.Response(
body=json.dumps({"custom": ["test"]}),
headers=response_rate_limit_header,
),
)
repository.data.domain = "test"
check = Validator(repository)
await check.execute_validation()
assert not check.failed
@pytest.mark.asyncio
async def test_not_added_to_brands(repository, aresponses):
aresponses.add(
"brands.home-assistant.io",
"/domains.json",
"get",
aresponses.Response(
body=json.dumps({"custom": []}),
headers=response_rate_limit_header,
),
)
repository.data.domain = "test"
check = Validator(repository)
await check.execute_validation()
assert check.failed
|
98483
|
import urllib
import urllib2
import json
import sys
import urlparse
import os
import difflib
import shutil
import hashlib
import guido_utilities
from optparse import OptionParser
from filecmp import dircmp
'''
BELOW YOU CAN SET CERTAIN SCRIPT VARIABLES
THAT CANNOT BE SET FROM THE COMMAND LINE.
IN THEORY, THESE ARE THE ONLY VARIABLES THAT SHOULD
EVER NEED TO CHANGE TO MAKE THIS WORK.
'''
# The directory holding the GUIDO examples
EXAMPLE_DIRS = ['../../gmn-examples','../../regression-tests']
# THE EXTENSION OF A GMN FILE FOUND IN THIS DIRECTORY
# OR ITS SUB-DIRECTORIES
GMN_FILE_EXTENSION = 'gmn'
# PATH TO A LIST OF URLS THAT ARE TESTED FOR SCORES
SCORE_URLS_PATH = 'score_urls.txt'
# PATH TO A LIST OF URLS THAT ARE TESTED FOR THE SERVER
SERVER_URLS_PATH = 'server_urls.txt'
# FUNCTION FOR CREATING FILE-SYSTEM FRIENDLY URLS
#URL_TRANLSATION_FUNCTION = hashlib.sha1
URL_TRANSLATION_FUNCTION = guido_utilities.my_url_translation_function
####################################################
parser = OptionParser(usage = "Runs regtests on the guido server.\n"
"Must be invoked with either the command `baseline' or `check'. Meaning...\n"
" python test.py baseline\n"
"or\n"
" python test.py check\n"
"More detailed information should be in the README that came with this code.")
parser.add_option("-u", "--url", dest="url", help="url of the server [default: %default]", default = "http://localhost:8000")
parser.add_option("-b", "--baselinedir", dest="baselinedir", help="directory to stash the baseline results in [default: %default]", default = "baseline")
parser.add_option("-c", "--checkdir", dest="checkdir", help="directory to stash the check results in [default: %default]", default = "check")
parser.add_option("-v", "--verbose", dest="verbose", help="show URLs being checked", action = "store_true", default = False)
parser.add_option("-l", "--log", dest="log", help="Filename to write log to. If not specified, log will be printed to STDOUT.", default = None)
(OPTIONS, ARGS) = parser.parse_args()
MSG = '''You must call this script as either:
python test.py baseline
or
python test.py check
There are additional options that you can specify,
but at a minimum, you need one of these two commands.
To learn more about these options, run the command:
python test.py -h
'''
if len(ARGS) != 1 :
print MSG
sys.exit(1)
if ARGS[0] not in ['baseline', 'check'] :
print MSG
sys.exit(1)
BASELINE = {'baseline':True,'check':False}[ARGS[0]]
URL = OPTIONS.url
MYDIR = OPTIONS.baselinedir if BASELINE else OPTIONS.checkdir
print "Welcome to the GUIDO server regtests. Run this script with -h for help."
print "Running regtest in {0} mode.".format("baseline" if BASELINE else "check")
print " If you meant to run this test in {0} mode instead,".format("baseline" if not BASELINE else "check")
print " please run this script with the word `{0}' afterwards".format("baseline" if not BASELINE else "check")
print " (i.e. python test.py {0}).".format("baseline" if not BASELINE else "check")
print " "
if (not BASELINE) and (not os.path.exists(OPTIONS.baselinedir)) :
print "Cowardly refusing to check the regtests without a baseline."
print "Run:"
print " python test.py baseline"
print "first."
sys.exit(1)
###########
## TESTS ##
###########
# checking to see if directories exist
if (not os.path.exists(MYDIR)) :
os.makedirs(MYDIR)
else :
print "Cowardly refusing to overwrite the directory `{0}'.".format(MYDIR)
print "There are two options:"
print " 1) Stash this directory somewhere and then run this script again."
print " 2) Specify a different directory for the regtest {0}. This can be done with the {1} option.".format('baseline' if BASELINE else 'check', '-b' if BASELINE else '-c')
print " "
print "For a full list of options, run this script with the flag -h."
sys.exit(1)
if not BASELINE :
if (OPTIONS.log) :
if os.path.exists(OPTIONS.log) :
print "Cowardly refusing to do the check because the logfile {0} exists already.".format(OPTIONS.log)
print "Please stash it somewhere and run again."
sys.exit(1)
try :
urllib2.urlopen(URL, 'data={0}'.format(urllib.quote_plus('[e f g a]')))
except urllib2.URLError :
print "Could not compile a simple test on the server with url {0}. Run this script with the -h flag to see how to specify a URL of the server.".format(URL)
sys.exit(1)
def guidourl(end='', url=URL) :
return urlparse.urljoin(url, end)
def gulp(fn) :
infile = file(fn, 'r')
s = infile.read()
return s
def my_urlopen(url, data=None) :
res = None
try :
res = urllib2.urlopen(url, data)
except urllib2.HTTPError as e :
res = e
return res
def get_extension(tp) :
if tp == 'image/xml+svg' :
return 'svg'
if tp == 'image/png' :
return 'png'
if tp == 'image/gif' :
return 'gif'
if tp == 'image/jpeg' :
return 'jpg'
if tp == 'application/json' :
return 'json'
raise ValueError('Server is not supposed to return {0}. This is bad.'.format(tp))
def startscomment(x) :
x = x.replace(' ','')
x = x.replace('\t','')
if x == '' : return True
return x[0] == '#'
def remove_extraneous(l) :
return filter(lambda x : (x != '') & (not startscomment(x)), l)
SCORE_URLS = remove_extraneous(gulp(SCORE_URLS_PATH).split('\n'))
SERVER_URLS = remove_extraneous(gulp(SERVER_URLS_PATH).split('\n'))
SCORE_URLHEXS = [URL_TRANSLATION_FUNCTION(URL) for URL in SCORE_URLS]
SERVER_URLHEXS = [URL_TRANSLATION_FUNCTION(URL) for URL in SERVER_URLS]
TESTFILES = []
for DIR in EXAMPLE_DIRS :
TESTTREE = os.walk(DIR)
for entry in TESTTREE :
for filename in entry[2] :
if filename.split('.')[-1] == GMN_FILE_EXTENSION :
TESTFILES.append(os.path.join(entry[0], filename))
class Result(object) :
def __init__(self, url, tp, code, data) :
self.url = url
self.type = tp
self.code = code
self.data = data
PATH_TO_URL = {}
# SERVER TEST
for i in range(len(SERVER_URLS)) :
URL = SERVER_URLS[i]
HEX = SERVER_URLHEXS[i]
localpath = os.path.join(MYDIR, 'server-test', HEX)
os.makedirs(localpath)
myurl = URL
PATH_TO_URL[localpath] = myurl
if OPTIONS.verbose :
print "CHECKING", guidourl(myurl)
res = my_urlopen(guidourl(myurl))
kres = Result(myurl, res.info().gettype(), res.getcode(), res.read())
res.close()
ext = get_extension(kres.type)
# write the info to files in the file tree
for key in kres.__dict__.keys() :
F = file(os.path.join(localpath, HEX+'.'+key), 'w')
'''
# too much...
if OPTIONS.verbose :
print key, str(getattr(kres, key))
'''
F.write(str(getattr(kres, key)))
F.close()
# SCORE TEST
for FILE in TESTFILES :
naked = '.'.join(FILE.replace('../../','').split('.')[:-1])
mypath = os.path.join(MYDIR, naked)
os.makedirs(mypath)
shutil.copy(FILE, mypath)
'''
for the initial test, we assume that the mime type is JSON and we get
an identifier.
a lack of identifier means that the GMN is bad or the server is broken.
'''
res = my_urlopen(guidourl(),
'data={0}'.format(urllib.quote_plus(gulp(FILE))))
SHA = ''
try :
SHA = str(json.loads(res.read())['ID'])
except :
continue
# we loop through the tests, running them and writing info
BASE = os.path.split(naked)[1]
for i in range(len(SCORE_URLS)) :
URL = SCORE_URLS[i]
HEX = SCORE_URLHEXS[i]
localpath = os.path.join(mypath, HEX)
os.makedirs(localpath)
myurl = SHA + '/' + URL
PATH_TO_URL[localpath] = myurl
if OPTIONS.verbose :
print "CHECKING", guidourl(myurl)
res = my_urlopen(guidourl(myurl))
kres = Result(myurl, res.info().gettype(), res.getcode(), res.read())
res.close()
ext = get_extension(kres.type)
# write the info to files in the file tree
for key in kres.__dict__.keys() :
F = file(os.path.join(localpath, BASE+'_'+HEX+'.'+key), 'w')
'''
# too much
if OPTIONS.verbose :
print key, str(getattr(kres, key))
'''
F.write(str(getattr(kres, key)))
F.close()
def identical_dirs(dcmp):
if (dcmp.left_only + dcmp.right_only) != [] :
return False
for sub_dcmp in dcmp.subdirs.values():
if not identical_dirs(sub_dcmp) :
return False
return True
def make_report(dcmp, baselinedir, checkdir, currentdir = '') :
UD = ''
for FILE in dcmp.diff_files :
old_fn = os.path.join(baselinedir, currentdir, FILE)
new_fn = os.path.join(checkdir, currentdir, FILE)
maybehex = currentdir.rpartition('/')[-1]
# ugh .. code dup
if maybehex in (SCORE_URLHEXS+SERVER_URLHEXS) :
UD += 'URL sent to the server: {0}\n'.format(PATH_TO_URL[os.path.join(checkdir,currentdir)])
UD += ''.join([line+'\n' for line in difflib.unified_diff(gulp(old_fn).split('\n'), gulp(new_fn).split('\n'), old_fn, new_fn)])
for key in dcmp.subdirs.keys():
UD += make_report(dcmp.subdirs[key], baselinedir, checkdir, os.path.join(currentdir, key))
return UD
if not BASELINE :
dcmp = dircmp(OPTIONS.baselinedir, OPTIONS.checkdir)
if not identical_dirs(dcmp) :
print "WARNING: The structure of your baseline directory (called `{0}') differs from that of your check (called `{1}') directory.".format(OPTIONS.baselinedir, OPTIONS.checkdir)
print "Files that are present in one but not the other will not be checked against each other."
report = make_report(dcmp, OPTIONS.baselinedir, OPTIONS.checkdir)
if report != "" :
report = "Below are diffs between the baseline and the check.\n" + report
else :
report = "There were no differences between the baseline and the check."
if not OPTIONS.log :
print report
else :
F = file(OPTIONS.log, 'w')
F.write(report)
F.close()
print "Log written to `{0}'.".format(OPTIONS.log)
if BASELINE :
print "REGTEST BASELINE COMPLETED"
else :
print "REGTEST CHECK COMPLETED"
|
98503
|
import os
import sys
import pickle
from tqdm import tqdm
import numpy as np
import cv2
from fsgan.utils.bbox_utils import scale_bbox, crop_img
from fsgan.utils.video_utils import Sequence
def main(input_path, output_dir=None, cache_path=None, seq_postfix='_dsfd_seq.pkl', resolution=256, crop_scale=2.0,
select='all', disable_tqdm=False, encoder_codec='avc1'):
cache_path = os.path.splitext(input_path)[0] + seq_postfix if cache_path is None else cache_path
if output_dir is None:
output_dir = os.path.splitext(input_path)[0]
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
# Verification
if not os.path.isfile(input_path):
raise RuntimeError('Input video does not exist: ' + input_path)
if not os.path.isfile(cache_path):
raise RuntimeError('Cache file does not exist: ' + cache_path)
if not os.path.isdir(output_dir):
raise RuntimeError('Output directory does not exist: ' + output_dir)
print('=> Cropping video sequences from video: "%s"...' % os.path.basename(input_path))
# Load sequences from file
with open(cache_path, "rb") as fp: # Unpickling
seq_list = pickle.load(fp)
# Select sequences
if select == 'longest':
selected_seq_index = np.argmax([len(s) for s in seq_list])
seq = seq_list[selected_seq_index]
seq.id = 0
seq_list = [seq]
# Open input video file
cap = cv2.VideoCapture(input_path)
if not cap.isOpened():
raise RuntimeError('Failed to read video: ' + input_path)
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
fps = cap.get(cv2.CAP_PROP_FPS)
input_vid_width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
input_vid_height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# For each sequence initialize output video file
out_vids = []
fourcc = cv2.VideoWriter_fourcc(*encoder_codec)
for seq in seq_list:
curr_vid_name = os.path.splitext(os.path.basename(input_path))[0] + '_seq%02d.mp4' % seq.id
curr_vid_path = os.path.join(output_dir, curr_vid_name)
out_vids.append(cv2.VideoWriter(curr_vid_path, fourcc, fps, (resolution, resolution)))
# For each frame in the target video
cropped_detections = [[] for seq in seq_list]
cropped_landmarks = [[] for seq in seq_list]
pbar = range(total_frames) if disable_tqdm else tqdm(range(total_frames), file=sys.stdout)
for i in pbar:
ret, frame = cap.read()
if frame is None:
continue
# For each sequence
for s, seq in enumerate(seq_list):
if i < seq.start_index or (seq.start_index + len(seq) - 1) < i:
continue
det = seq[i - seq.start_index]
# Crop frame
bbox = np.concatenate((det[:2], det[2:] - det[:2]))
bbox = scale_bbox(bbox, crop_scale)
frame_cropped = crop_img(frame, bbox)
frame_cropped = cv2.resize(frame_cropped, (resolution, resolution), interpolation=cv2.INTER_CUBIC)
# Write cropped frame to output video
out_vids[s].write(frame_cropped)
# Add cropped detection to list
orig_size = bbox[2:]
axes_scale = np.array([resolution, resolution]) / orig_size
det[:2] -= bbox[:2]
det[2:] -= bbox[:2]
det[:2] *= axes_scale
det[2:] *= axes_scale
cropped_detections[s].append(det)
# Add cropped landmarks to list
if hasattr(seq, 'landmarks'):
curr_landmarks = seq.landmarks[i - seq.start_index]
curr_landmarks[:, :2] -= bbox[:2]
# 3D landmarks case
if curr_landmarks.shape[1] == 3:
axes_scale = np.append(axes_scale, axes_scale.mean())
curr_landmarks *= axes_scale
cropped_landmarks[s].append(curr_landmarks)
# For each sequence write cropped sequence to file
for s, seq in enumerate(seq_list):
# seq.detections = np.array(cropped_detections[s])
# if hasattr(seq, 'landmarks'):
# seq.landmarks = np.array(cropped_landmarks[s])
# seq.start_index = 0
# TODO: this is a hack to change class type (remove this later)
out_seq = Sequence(0)
out_seq.detections = np.array(cropped_detections[s])
if hasattr(seq, 'landmarks'):
out_seq.landmarks = np.array(cropped_landmarks[s])
out_seq.id, out_seq.obj_id, out_seq.size_avg = seq.id, seq.obj_id, seq.size_avg
# Write to file
curr_out_name = os.path.splitext(os.path.basename(input_path))[0] + '_seq%02d%s' % (out_seq.id, seq_postfix)
curr_out_path = os.path.join(output_dir, curr_out_name)
with open(curr_out_path, "wb") as fp: # Pickling
pickle.dump([out_seq], fp)
if __name__ == "__main__":
# Parse program arguments
import argparse
parser = argparse.ArgumentParser('crop_video_sequences')
parser.add_argument('input', metavar='VIDEO',
help='path to input video')
parser.add_argument('-o', '--output', metavar='DIR',
help='output directory')
parser.add_argument('-c', '--cache', metavar='PATH',
help='path to sequence cache file')
parser.add_argument('-sp', '--seq_postfix', default='_dsfd_seq.pkl', metavar='POSTFIX',
help='input sequence file postfix')
parser.add_argument('-r', '--resolution', default=256, type=int, metavar='N',
help='output video resolution (default: 256)')
parser.add_argument('-cs', '--crop_scale', default=2.0, type=float, metavar='F',
help='crop scale relative to bounding box (default: 2.0)')
parser.add_argument('-s', '--select', default='all', metavar='STR',
help='selection method [all|longest]')
parser.add_argument('-dt', '--disable_tqdm', dest='disable_tqdm', action='store_true',
help='if specified disables tqdm progress bar')
parser.add_argument('-ec', '--encoder_codec', default='avc1', metavar='STR',
help='encoder codec code')
args = parser.parse_args()
main(args.input, args.output, args.cache, args.seq_postfix, args.resolution, args.crop_scale, args.select,
args.disable_tqdm, args.encoder_codec)
|
98508
|
import click
from picomc.cli.utils import pass_global_config
@click.group()
def config_cli():
"""Configure picomc."""
pass
@config_cli.command()
@pass_global_config
def show(cfg):
"""Print the current config."""
for k, v in cfg.bottom.items():
if k not in cfg:
print("[default] {}: {}".format(k, v))
for k, v in cfg.items():
print("{}: {}".format(k, v))
@config_cli.command("set")
@click.argument("key")
@click.argument("value")
@pass_global_config
def _set(cfg, key, value):
"""Set a global config value."""
cfg[key] = value
@config_cli.command()
@click.argument("key")
@pass_global_config
def get(cfg, key):
"""Print a global config value."""
try:
print(cfg[key])
except KeyError:
print("No such item.")
@config_cli.command()
@click.argument("key")
@pass_global_config
def delete(cfg, key):
"""Delete a key from the global config."""
try:
del cfg[key]
except KeyError:
print("No such item.")
def register_config_cli(picomc_cli):
picomc_cli.add_command(config_cli, name="config")
|
98522
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import pty, os, tty, termios, time, sys, base64, struct, signal
from fcntl import fcntl, F_GETFL, F_SETFL, ioctl
class TerminalServer:
def __init__(self):
self.closed = False
self.pid, self.fd = pty.fork()
if self.pid == pty.CHILD:
# we are in the forked process
# blow it away and replace with a shell
os.execvp('bash',['bash'])
else:
tty.setraw(self.fd, termios.TCSANOW)
#open the shell process file descriptor as read-write
if sys.version_info >= (3, 0):
self.file = os.fdopen(self.fd,'wb+', buffering=0)
else:
#python 2 compatible code
self.file = os.fdopen(self.fd,'wb+', 0)
#set the file reads to be nonblocking
flags = fcntl(self.file, F_GETFL)
fcntl(self.file, F_SETFL, flags | os.O_NONBLOCK)
def transmit(self,data):
# data in the "channel" is b64 encoded so that control characters
# don't get lost
os.write(self.fd, base64.b64decode(data))
self.receive()
def receive(self):
try:
data = os.read(self.fd, 8192)
except OSError:
data = b''
sys.stdout.write(base64.b64encode(data))
def update_window_size(self, rows, cols):
#notify that the pty size should change to match xterm.js
TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561)
s = struct.pack('HHHH', rows, cols, 0, 0)
ioctl(self.fd, TIOCSWINSZ, s)
self.receive()
def close(self):
if not self.closed:
#send hang up to bash since the xterm is closing
os.kill(self.pid, signal.SIGHUP)
def __del__(self):
self.close()
|
98548
|
from sqlalchemy import (create_engine, Column, Integer, String, DateTime,
Boolean, BigInteger)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from datetime import datetime
import os
"""A cache for storing account details as they are fetched to help deduplicate
results.
"""
engine = create_engine(os.environ.get('DB_PATH', 'sqlite:///twitter.db'))
Base = declarative_base()
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
class Account(Base):
"""A minimal representation of a Twitter account.
This model is used to store account ID's as they are found to help make
sure we don't request details for the same account twice.
"""
__tablename__ = 'accounts'
id = Column(BigInteger, primary_key=True)
id_str = Column(String(255))
screen_name = Column(String(255))
created_date = Column(DateTime)
found_date = Column(DateTime, default=datetime.now)
fetched_tweets_date = Column(DateTime)
fetched_tweets = Column(Boolean, default=False)
protected = Column(Boolean)
tweet_count = Column(Integer)
source = Column(String(1024))
language = Column(String(32))
@classmethod
def from_dict(cls, account):
"""Loads an account from a valid JSON dict returned from the Twitter API
Arguments:
account {dict} -- The JSON formatted User object from the Twitter
API
source {str} -- The source of the profile (e.g. "tweets", "enum",
etc.)
Returns:
cache.Account -- The Account instance representing this user
"""
return Account(
id=account.get('id'),
id_str=account.get('id_str'),
screen_name=account.get('screen_name'),
created_date=datetime.strptime(
account.get('created_at'), '%a %b %d %H:%M:%S %z %Y'),
protected=account.get('protected'),
tweet_count=account.get('statuses_count'),
language=account.get('lang'),
source=account.get('_tbsource'))
@classmethod
def from_tweepy(cls, account):
return Account(
id=account.id,
id_str=account.id_str,
screen_name=account.screen_name,
created_at=account.created_at,
protected=account.protected,
tweet_count=account.statuses_count,
language=account.lang,
source=account._tbsource)
@classmethod
def exists(cls, id):
return Session.query(Account).get(id) is not None
def summary_dict(self):
return {
'id': self.id,
'id_str': self.id_str,
'screen_name': self.screen_name
}
def save(self, commit=True):
"""Saves an account to the database
Keyword Arguments:
commit {bool} -- Whether or not (default: {True})
Returns:
[type] -- [description]
"""
Session.add(self)
if commit:
Session.commit()
return self
Base.metadata.create_all(engine)
|
98569
|
import sys
import numpy as np
import pandas as pd
from sklearn.cluster import KMeans
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.utils import check_random_state
from sklearn.decomposition import PCA
import keras
class LIMESimpleModel:
def __init__(self, cluster_num, cluster_method=KMeans, random_state=None):
self.random_state = check_random_state(random_state)
self.cluster_num = cluster_num
self.cluster_method = cluster_method(
n_clusters=cluster_num, random_state=self.random_state)
self.models = []
def fit(self, X, y, predict_fn, labels_num):
self.cluster_labels = self.cluster_method.fit_predict(X)
self.labels_num = labels_num
for i in range(self.cluster_num):
inds = np.where(self.cluster_labels == i)
simplified_models = LinearRegression()
simplified_models.fit(X[inds], y[inds])
coef_ = simplified_models.coef_.T
intercept_ = simplified_models.intercept_
self.models.append((coef_, intercept_))
def predict(self, x):
cluster_result = self.cluster_method.predict(x)
prediction_result = np.zeros(x.shape[0])
for i in range(self.cluster_num):
inds = np.where(cluster_result == i)
if not len(inds[0]):
continue
predict_values = np.dot(x[inds],
self.models[i][0]) + self.models[i][1]
prediction_result[inds] = np.argmax(predict_values, axis=1)
return prediction_result
def predict_reg(self, x):
cluster_result = self.cluster_method.predict(x)
predict_values = np.zeros((x.shape[0], self.labels_num))
for i in range(self.cluster_num):
inds = np.where(cluster_result == i)
if not len(inds[0]):
continue
predict_values[inds] = np.dot(
x[inds], self.models[i][0]) + self.models[i][1]
return predict_values
def _create_long_network():
in_layer = keras.Input(shape=(143, ))
curr_layer = keras.layers.Dense(
300, kernel_initializer="glorot_uniform",
activation="sigmoid")(in_layer)
curr_layer_bn = keras.layers.BatchNormalization()(curr_layer)
# output layer
out_layer = keras.layers.Dense(
36, kernel_initializer="glorot_uniform",
activation="softmax")(curr_layer_bn)
# return an instance of the Model class
return keras.Model(inputs=in_layer, outputs=out_layer)
if __name__ == "__main__":
long_features = pd.read_csv(
"./features/long_features.csv", header=None, nrows=160000)
long_features = long_features.dropna().values
with open(f"./lime_extended_performance_{sys.argv[1]}.csv", "w") as FILE:
for j in range(20):
for i in range(0, 50, 1):
model = _create_long_network()
model.load_weights(
f"./weights/tmp_long_rla_weights")
feature_train, feature_test = train_test_split(
long_features, test_size=0.2)
action_train = model.predict(feature_train)
action_test = model.predict(feature_test)
pca = PCA(n_components=48)
feature_train = pca.fit_transform(feature_train)
feature_test = pca.transform(feature_test)
lime_model = LIMESimpleModel(cluster_num=i + 1)
lime_model.fit(
X=feature_train,
y=action_train,
predict_fn=model.predict,
labels_num=36)
lime_action_train = lime_model.predict(feature_train)
lime_action_test = lime_model.predict(feature_test)
train_accuracy = np.mean(
np.int32(
lime_action_train == np.argmax(action_train, axis=1)))
train_rmse = np.mean(
np.int32(
lime_action_test == np.argmax(action_test, axis=1)))
test_accuracy = np.sqrt(
np.mean(
np.square(lime_action_train -
np.argmax(action_train, axis=1))))
test_rmse = np.sqrt(
np.mean(
np.square(lime_action_test -
np.argmax(action_test, axis=1))))
FILE.write(f"{i + 1}, {train_accuracy}, {test_accuracy}"
f", {train_rmse}, {test_rmse}\n")
|
98595
|
class BatmanQuotes(object):
@staticmethod
def get_quote(quotes, hero):
index = int(sorted(hero)[0])
return {'B':'Batman: ','R':'Robin: ','J':'Joker: '}[hero[0]] + quotes[index]
|
98640
|
from mtree.tests.fixtures.generator import ADD, REMOVE, QUERY
"""
actions = '12a12r12a12r'
dimensions = 4
remove_chance = 0.1
"""
DIMENSIONS = 4
def PERFORM(callback):
callback(ADD((98, 6, 62, 83), QUERY((76, 5, 88, 66), 26.243784265073067, 3)))
callback(ADD((50, 28, 1, 72), QUERY((90, 40, 27, 38), 47.355142939013646, 5)))
callback(ADD((83, 39, 36, 21), QUERY((95, 65, 84, 65), 78.02485346868268, 5)))
callback(ADD((21, 7, 20, 64), QUERY((61, 44, 96, 83), 64.53253290079154, 8)))
callback(ADD((79, 25, 9, 34), QUERY((91, 94, 1, 48), 17.84898946852568, 5)))
callback(ADD((45, 37, 47, 43), QUERY((89, 11, 94, 52), 30.248761285953847, 10)))
callback(ADD((55, 44, 87, 68), QUERY((48, 46, 27, 61), 19.147531561226543, 5)))
callback(ADD((40, 33, 32, 64), QUERY((15, 54, 44, 38), 28.412904157476582, 13)))
callback(ADD((14, 48, 98, 26), QUERY((57, 27, 58, 39), 63.33140377696694, 9)))
callback(ADD((99, 72, 71, 27), QUERY((23, 45, 44, 100), 73.42898604393157, 14)))
callback(ADD((99, 71, 30, 67), QUERY((83, 16, 41, 83), 17.71043842471812, 8)))
callback(ADD((83, 17, 15, 67), QUERY((26, 40, 75, 20), 63.40651543806896, 12)))
callback(REMOVE((45, 37, 47, 43), QUERY((100, 88, 1, 5), 33.904351925689305, 8)))
callback(REMOVE((50, 28, 1, 72), QUERY((100, 34, 36, 34), 10.242668328439963, 13)))
callback(REMOVE((21, 7, 20, 64), QUERY((18, 82, 61, 54), 20.616666250047288, 3)))
callback(REMOVE((99, 72, 71, 27), QUERY((66, 11, 93, 35), 59.410243323955534, 4)))
callback(REMOVE((83, 39, 36, 21), QUERY((46, 12, 13, 28), 79.24075397007265, 4)))
callback(REMOVE((98, 6, 62, 83), QUERY((100, 56, 82, 9), 7.8183440455728626, 8)))
callback(REMOVE((99, 71, 30, 67), QUERY((53, 42, 7, 90), 62.723078948044034, 11)))
callback(REMOVE((14, 48, 98, 26), QUERY((53, 39, 27, 71), 40.93889368459419, 4)))
callback(REMOVE((55, 44, 87, 68), QUERY((51, 63, 53, 64), 73.74902370719796, 8)))
callback(REMOVE((79, 25, 9, 34), QUERY((93, 79, 64, 35), 19.19760269418014, 4)))
callback(REMOVE((83, 17, 15, 67), QUERY((2, 12, 40, 5), 11.605814870121787, 6)))
callback(REMOVE((40, 33, 32, 64), QUERY((2, 76, 19, 68), 33.00626548642569, 4)))
callback(ADD((86, 89, 93, 13), QUERY((54, 55, 70, 10), 45.87164734218477, 6)))
callback(ADD((17, 14, 75, 14), QUERY((47, 45, 50, 30), 8.693289174369454, 5)))
callback(ADD((44, 78, 90, 82), QUERY((21, 30, 64, 98), 1.1800345643200583, 6)))
callback(ADD((46, 34, 32, 68), QUERY((13, 44, 89, 55), 20.20050348392597, 3)))
callback(ADD((48, 2, 78, 53), QUERY((86, 22, 17, 100), 13.012457699744244, 2)))
callback(ADD((12, 96, 67, 93), QUERY((2, 26, 11, 38), 33.92528287095654, 1)))
callback(ADD((27, 1, 86, 66), QUERY((0, 29, 36, 65), 40.12302243206612, 6)))
callback(ADD((45, 48, 5, 57), QUERY((69, 90, 8, 31), 55.50293178441844, 6)))
callback(ADD((3, 56, 75, 40), QUERY((57, 68, 35, 58), 51.85734456476708, 13)))
callback(ADD((25, 9, 42, 33), QUERY((55, 60, 9, 7), 48.82086535780479, 5)))
callback(ADD((51, 10, 62, 13), QUERY((31, 67, 100, 63), 39.79281561802228, 16)))
callback(ADD((81, 76, 54, 33), QUERY((37, 13, 54, 48), 47.107623428345, 8)))
callback(REMOVE((45, 48, 5, 57), QUERY((58, 58, 1, 45), 76.02731211909581, 10)))
callback(REMOVE((3, 56, 75, 40), QUERY((55, 18, 70, 76), 1.3579996852625253, 9)))
callback(REMOVE((81, 76, 54, 33), QUERY((86, 94, 10, 63), 39.880370702624006, 14)))
callback(REMOVE((86, 89, 93, 13), QUERY((39, 14, 15, 41), 31.040906656941154, 5)))
callback(REMOVE((51, 10, 62, 13), QUERY((81, 58, 3, 8), 71.22765784979687, 11)))
callback(REMOVE((25, 9, 42, 33), QUERY((45, 9, 18, 17), 33.54120155100786, 2)))
callback(REMOVE((27, 1, 86, 66), QUERY((9, 55, 98, 33), 20.60150481705851, 11)))
callback(REMOVE((48, 2, 78, 53), QUERY((29, 83, 5, 78), 37.70513355852923, 8)))
callback(REMOVE((46, 34, 32, 68), QUERY((73, 96, 56, 10), 62.073279532426554, 6)))
callback(REMOVE((44, 78, 90, 82), QUERY((32, 78, 34, 46), 52.87506545618484, 6)))
callback(REMOVE((17, 14, 75, 14), QUERY((27, 15, 6, 72), 64.54547466103696, 5)))
callback(REMOVE((12, 96, 67, 93), QUERY((45, 61, 53, 56), 52.776587554699134, 3)))
|
98687
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.pumps import PumpVariableSpeedCondensate
log = logging.getLogger(__name__)
class TestPumpVariableSpeedCondensate(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_pumpvariablespeedcondensate(self):
pyidf.validation_level = ValidationLevel.error
obj = PumpVariableSpeedCondensate()
# alpha
var_name = "Name"
obj.name = var_name
# node
var_inlet_node_name = "node|Inlet Node Name"
obj.inlet_node_name = var_inlet_node_name
# node
var_outlet_node_name = "node|Outlet Node Name"
obj.outlet_node_name = var_outlet_node_name
# real
var_rated_flow_rate = 0.0001
obj.rated_flow_rate = var_rated_flow_rate
# real
var_rated_pump_head = 5.5
obj.rated_pump_head = var_rated_pump_head
# real
var_rated_power_consumption = 6.6
obj.rated_power_consumption = var_rated_power_consumption
# real
var_motor_efficiency = 0.50005
obj.motor_efficiency = var_motor_efficiency
# real
var_fraction_of_motor_inefficiencies_to_fluid_stream = 0.5
obj.fraction_of_motor_inefficiencies_to_fluid_stream = var_fraction_of_motor_inefficiencies_to_fluid_stream
# real
var_coefficient_1_of_the_part_load_performance_curve = 9.9
obj.coefficient_1_of_the_part_load_performance_curve = var_coefficient_1_of_the_part_load_performance_curve
# real
var_coefficient_2_of_the_part_load_performance_curve = 10.1
obj.coefficient_2_of_the_part_load_performance_curve = var_coefficient_2_of_the_part_load_performance_curve
# real
var_coefficient_3_of_the_part_load_performance_curve = 11.11
obj.coefficient_3_of_the_part_load_performance_curve = var_coefficient_3_of_the_part_load_performance_curve
# real
var_coefficient_4_of_the_part_load_performance_curve = 12.12
obj.coefficient_4_of_the_part_load_performance_curve = var_coefficient_4_of_the_part_load_performance_curve
# object-list
var_pump_flow_rate_schedule_name = "object-list|Pump Flow Rate Schedule Name"
obj.pump_flow_rate_schedule_name = var_pump_flow_rate_schedule_name
# object-list
var_zone_name = "object-list|Zone Name"
obj.zone_name = var_zone_name
# real
var_skin_loss_radiative_fraction = 0.5
obj.skin_loss_radiative_fraction = var_skin_loss_radiative_fraction
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.pumpvariablespeedcondensates[0].name, var_name)
self.assertEqual(idf2.pumpvariablespeedcondensates[0].inlet_node_name, var_inlet_node_name)
self.assertEqual(idf2.pumpvariablespeedcondensates[0].outlet_node_name, var_outlet_node_name)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].rated_flow_rate, var_rated_flow_rate)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].rated_pump_head, var_rated_pump_head)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].rated_power_consumption, var_rated_power_consumption)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].motor_efficiency, var_motor_efficiency)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].fraction_of_motor_inefficiencies_to_fluid_stream, var_fraction_of_motor_inefficiencies_to_fluid_stream)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].coefficient_1_of_the_part_load_performance_curve, var_coefficient_1_of_the_part_load_performance_curve)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].coefficient_2_of_the_part_load_performance_curve, var_coefficient_2_of_the_part_load_performance_curve)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].coefficient_3_of_the_part_load_performance_curve, var_coefficient_3_of_the_part_load_performance_curve)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].coefficient_4_of_the_part_load_performance_curve, var_coefficient_4_of_the_part_load_performance_curve)
self.assertEqual(idf2.pumpvariablespeedcondensates[0].pump_flow_rate_schedule_name, var_pump_flow_rate_schedule_name)
self.assertEqual(idf2.pumpvariablespeedcondensates[0].zone_name, var_zone_name)
self.assertAlmostEqual(idf2.pumpvariablespeedcondensates[0].skin_loss_radiative_fraction, var_skin_loss_radiative_fraction)
|
98694
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os, torch
from tqdm import tqdm
from silence_tensorflow import silence_tensorflow
silence_tensorflow()
import tensorflow.compat.v1 as tf
from metrics.FVD.FVD import Embedder, preprocess, calculate_fvd
import numpy as np
def compute_fvd(real_videos, fake_videos, device=0):
devs = tf.config.experimental.get_visible_devices("GPU")
target_dev = [d for d in devs if d.name.endswith(str(device))][0]
tf.config.experimental.set_visible_devices(target_dev, 'GPU')
with tf.device("/gpu:0"):
with tf.Graph().as_default():
# construct graph
sess = tf.Session()
input_real = tf.placeholder(dtype=tf.float32, shape=(*real_videos[0].shape[:2], real_videos[0].shape[3],
real_videos[0].shape[4], real_videos[0].shape[2]))
input_fake = tf.placeholder(dtype=tf.float32, shape=(*real_videos[0].shape[:2], real_videos[0].shape[3],
real_videos[0].shape[4], real_videos[0].shape[2]))
real_pre = preprocess(input_real, (224, 224))
emb_real = Embedder(real_pre)
embed_real = emb_real.create_id3_embedding(real_pre)
fake_pre = preprocess(input_fake, (224, 224))
emb_fake = Embedder(fake_pre)
embed_fake = emb_fake.create_id3_embedding(fake_pre)
sess.run(tf.global_variables_initializer())
sess.run(tf.tables_initializer())
real, fake = [], []
for rv, fv in tqdm(zip(real_videos, fake_videos)):
real_batch = ((rv + 1.) * 127.5).permute(0, 1, 3, 4, 2).cpu().numpy()
fake_batch = ((fv + 1.) * 127.5).permute(0, 1, 3, 4, 2).cpu().numpy()
feed_dict = {input_real: real_batch, input_fake: fake_batch}
r, f = sess.run([embed_fake, embed_real], feed_dict)
real.append(r); fake.append(f)
print('Compute FVD score')
real = np.concatenate(real, axis=0)
fake = np.concatenate(fake, axis=0)
embed_real = tf.placeholder(dtype=tf.float32, shape=(real.shape[0], 400))
embed_fake = tf.placeholder(dtype=tf.float32, shape=(real.shape[0], 400))
result = calculate_fvd(embed_real, embed_fake)
feed_dict = {embed_real: real, embed_fake: fake}
result = sess.run(result, feed_dict)
sess.close()
tf.reset_default_graph()
return result
def get_embeddings(fake_videos, device=0):
devs = tf.config.experimental.get_visible_devices("GPU")
target_dev = [d for d in devs if d.name.endswith(str(device))][0]
tf.config.experimental.set_visible_devices(target_dev, 'GPU')
with tf.device("/gpu:0"):
with tf.Graph().as_default():
# construct graph
sess = tf.Session()
input_fake = tf.placeholder(dtype=tf.float32, shape=(*fake_videos[0].shape[:2], fake_videos[0].shape[3],
fake_videos[0].shape[4], fake_videos[0].shape[2]))
fake_pre = preprocess(input_fake, (224, 224))
emb_fake = Embedder(fake_pre)
embed_fake = emb_fake.create_id3_embedding(fake_pre)
sess.run(tf.global_variables_initializer())
sess.run(tf.tables_initializer())
real, fake = [], []
for fv in tqdm(fake_videos):
fake_batch = ((fv + 1.) * 127.5).permute(0, 1, 3, 4, 2).cpu().numpy()
feed_dict = {input_fake: fake_batch}
f = sess.run([embed_fake], feed_dict)
fake.append(f)
fake = np.concatenate(fake, axis=0)
sess.close()
tf.reset_default_graph()
return fake
|
98716
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from Regressor import Regressor
from utils import *;
class Model(nn.Module):
def __init__(self, input_dim, embed_dim, output_dim, data=None):
super(Model, self).__init__()
self.input_dim = input_dim;
self.embed_dim = embed_dim;
self.output_dim = output_dim;
self.embedding1 = nn.Linear(input_dim, embed_dim);
self.embedding2 = nn.Linear(embed_dim, embed_dim);
self.rnn = nn.LSTM(embed_dim, output_dim);
self.loss = 'mul-Gaussian@20'
self.regressor = Regressor(self.loss, embed_dim, input_dim);
self.final1 = nn.Linear(output_dim, embed_dim);
self.final2 = nn.Linear(embed_dim, embed_dim);
self.dropout = nn.Dropout(0.);
def forward(self, inputs):
x, y, mask = inputs;
x = F.relu(self.embedding1(x));
x = F.relu(self.embedding2(x));
output, hn = self.rnn(x);
output = self.dropout(output);
output = F.relu(self.final1(output));
output = F.relu(self.final2(output));
outputs = self.regressor(output);
loss = LogLikelihood(y, outputs, self.loss);
loss = loss.sum(-1);
loss = (loss * mask).sum(0);
loss = loss.mean();
return -loss
|
98725
|
from .utils import *
from .mobject.geometry import *
from .scene import SceneGL
from manim_express.backend.manimgl.express.eager import EagerModeScene
|
98730
|
from flatdata.generator.tree.nodes.node import Node
from flatdata.generator.tree.nodes.references import ResourceReference, FieldReference, StructureReference
class ExplicitReference(Node):
def __init__(self, name, properties=None):
super().__init__(name=name, properties=properties)
@staticmethod
def create(properties):
destination = properties.destination
field = Node.jointwo(properties.source_type, properties.source_field)
result = ExplicitReference(
name="er_{field}_{destination}".format(field=field.replace(Node.PATH_SEPARATOR, '_'),
destination=destination.replace(
Node.PATH_SEPARATOR, '_')),
properties=properties)
result.insert(ResourceReference(name=destination))
result.insert(FieldReference(name=field))
result.insert(StructureReference(name=properties.source_type))
return result
@property
def destination(self):
result = self.children_like(ResourceReference)
assert len(result) == 1
return result[0]
@property
def field(self):
result = self.children_like(FieldReference)
assert len(result) == 1
return result[0]
@property
def structure(self):
result = self.children_like(StructureReference)
assert len(result) == 1
return result[0]
|
98752
|
from sklearn.model_selection import StratifiedKFold, KFold
from skopt.utils import use_named_args
from .pipes_and_transformers import MidasEnsembleClassifiersWithPipeline, wrap_pipeline, get_metadata_fit, _MidasIdentity
from imblearn.pipeline import Pipeline
from sklearn.calibration import CalibratedClassifierCV
from copy import deepcopy
from .metrics import evaluate_metrics, average_metrics
from .reporting import (
write_train_report,
prop_minority_to_rest_class,
MetadataFit,
averaged_metadata_list, create_report_dfs,
)
import numpy as np
from collections import Counter
def has_resampler(pipeline):
return any(
[
hasattr(step[1], "fit_resample")
for step in pipeline.steps
]
)
def train_model_without_undersampling(model, X, y, exists_resampler):
fold_model = deepcopy(model)
fold_model.fit(X, y)
if exists_resampler: # In this case, model is a pipeline with a resampler.
metadata = get_metadata_fit(fold_model)
else:
metadata = MetadataFit(len(y), prop_minority_to_rest_class(Counter(y)))
return fold_model, metadata
def train_ensemble_model_with_undersampling(model, X, y, exists_resampler, max_k_undersampling):
# See https://github.com/w4k2/umce/blob/master/method.py
# Firstly we analyze the training set to find majority class and to
# establish the imbalance ratio
counter_classes = Counter(y)
minority_class_key = counter_classes.most_common()[-1][0]
minority_class_idxs = np.where(y == minority_class_key)[0]
rest_class_idxs = np.where(y != minority_class_key)[0]
# K is the imbalanced ratio round to int (with a minimum of 2 and a max of max_k_undersamling)
imbalance_ratio = (
len(rest_class_idxs) / len(minority_class_idxs)
)
k_majority_class = int(np.around(imbalance_ratio))
k_majority_class = k_majority_class if k_majority_class < max_k_undersampling else max_k_undersampling
k_majority_class = k_majority_class if k_majority_class > 2 else 2
fold_models = []
list_metadata = []
kf = KFold(n_splits=k_majority_class)
for _, index in kf.split(rest_class_idxs):
fold_model = deepcopy(model)
fold_idx = np.concatenate([minority_class_idxs, rest_class_idxs[index]])
X_train_f, y_train_f = X[fold_idx], y[fold_idx]
fold_model.fit(X_train_f, y_train_f)
fold_models.append(fold_model)
if exists_resampler: # In this case, model is a pipeline with a resampler.
list_metadata.append(get_metadata_fit(fold_model))
else:
list_metadata.append(
MetadataFit(len(y_train_f), prop_minority_to_rest_class(Counter(y_train_f)))
)
ensemble_model = MidasEnsembleClassifiersWithPipeline(None, fold_models)
return ensemble_model, averaged_metadata_list(list_metadata)
def ensemble_model_with_resampling(X, y, pipeline_post_process,
model, loss_metric, peeking_metrics,
k_inner_fold, skip_inner_folds, undersampling_majority_class,
max_k_undersampling, calibrated
):
pipeline_post_process = wrap_pipeline(pipeline_post_process)
complete_steps = pipeline_post_process.steps + [("model", model)]
complete_pipeline = Pipeline(complete_steps)
fold_models = []
fold_metrics = []
list_metadata = []
comments = {}
comments["option"] = "build model with resampling"
inner_cv = StratifiedKFold(n_splits=k_inner_fold)
for k, (train_index, test_index) in enumerate(inner_cv.split(X, y)):
if k not in skip_inner_folds:
X_train, y_train, X_test, y_test = (
X[train_index],
y[train_index],
X[test_index],
y[test_index],
)
if undersampling_majority_class:
(
fold_base_model,
fold_metadata,
) = train_ensemble_model_with_undersampling(
complete_pipeline, X_train, y_train, True, max_k_undersampling
)
else:
fold_base_model, fold_metadata = train_model_without_undersampling(
complete_pipeline, X_train, y_train, True
)
list_metadata.append(fold_metadata)
fold_final_model = fold_base_model
if calibrated:
fold_final_model = CalibratedClassifierCV(
fold_base_model, method="isotonic", cv="prefit"
)
fold_final_model.fit(X_test, y_test)
fold_models.append(fold_final_model)
y_proba = fold_final_model.predict_proba(X_test)[:, 1]
fold_metrics.append(
evaluate_metrics(y_test, y_proba, loss_metric, peeking_metrics)
)
averaged_metrics = average_metrics(fold_metrics)
complete_model = MidasEnsembleClassifiersWithPipeline(None, fold_models)
metadata = averaged_metadata_list(list_metadata)
comments["number of folds"] = len(fold_models)
comments[
"average size of training set before resampling"
] = metadata.get_num_init_samples_bf()
comments[
"average prop of minority class before resampling"
] = metadata.get_prop_minority_class_bf()
comments[
"average size of training set after resampling"
] = metadata.get_num_init_samples_af()
comments[
"average prop of minority class after resampling"
] = metadata.get_prop_minority_class_af()
return complete_model, averaged_metrics, comments
def ensemble_model_without_resampling(X, y, pipeline_post_process,
model, loss_metric, peeking_metrics,
k_inner_fold, skip_inner_folds, undersampling_majority_class,
max_k_undersampling, calibrated
):
list_metadata = []
fold_models = [] # List of all models builded in this k-fold
fold_metrics = [] # List of all metrics
comments = {} # Dict of comments, used for reporting
comments["option"] = "build model without resampling"
inner_cv = StratifiedKFold(n_splits=k_inner_fold)
pipeline_post_process = deepcopy(pipeline_post_process)
# For efficiency we transform the data once, for all folds
X_t = pipeline_post_process.fit_transform(X, y)
y_t = y
for k, (train_index, test_index) in enumerate(inner_cv.split(X_t, y_t)):
if k not in skip_inner_folds:
X_train, y_train, X_test, y_test = (
X_t[train_index],
y_t[train_index],
X_t[test_index],
y_t[test_index],
)
if undersampling_majority_class:
fold_base_model, fold_metadata = train_ensemble_model_with_undersampling(
model, X_train, y_train, False, max_k_undersampling
)
else:
fold_base_model, fold_metadata = train_model_without_undersampling(
model, X_train, y_train, False
)
list_metadata.append(fold_metadata)
fold_final_model = fold_base_model
if calibrated:
fold_final_model = CalibratedClassifierCV(
fold_base_model, method="isotonic", cv="prefit"
)
fold_final_model.fit(X_test, y_test)
fold_models.append(fold_final_model)
y_proba = fold_final_model.predict_proba(X_test)[:, 1]
fold_metrics.append(
evaluate_metrics(y_test, y_proba, loss_metric, peeking_metrics)
)
averaged_metrics = average_metrics(fold_metrics)
metadata = averaged_metadata_list(list_metadata)
complete_model = MidasEnsembleClassifiersWithPipeline(
pipeline_post_process, fold_models
)
comments["number of folds"] = len(fold_models)
comments["average size of training set"] = metadata.get_num_init_samples_bf()
comments["average prop of minority class"] = metadata.get_prop_minority_class_bf()
return complete_model, averaged_metrics, comments
def find_best_model(list_models, list_metrics):
list_loss_metrics = [metric["loss_metric"] for metric in list_metrics]
index_best_model = list_loss_metrics.index(min(list_loss_metrics))
best_model = list_models[index_best_model]
return best_model, index_best_model
def train_inner_model(X, y, model_search_spaces,
X_hold_out, y_hold_out, k_inner_fold,
skip_inner_folds, n_initial_points, n_calls, ensemble,
calibrated, loss_metric, peeking_metrics,
skopt_func, verbose, report_doc):
list_params = []
list_models = []
list_metrics = []
list_holdout_metrics = []
list_comments = []
for key in model_search_spaces.keys():
pipeline_post_process = model_search_spaces[key]["pipeline_post_process"]
if not pipeline_post_process:
pipeline_post_process = Pipeline([("identity", _MidasIdentity())])
model_name = key
model = model_search_spaces[key]["model"]
complete_steps = pipeline_post_process.steps + [("model", model)]
complete_pipeline = Pipeline(complete_steps)
search_space = model_search_spaces[key]["search_space"]
exists_resampler = has_resampler(pipeline_post_process)
@use_named_args(search_space)
def func_to_minimize(**params):
copy_params = params.copy()
undersampling_majority_class = copy_params.pop(
"undersampling_majority_class", False
)
max_k_undersampling = copy_params.pop(
"max_k_undersampling", 0
)
complete_pipeline.set_params(**copy_params)
list_params.append({**params, **{"model": model_name}})
if verbose:
print(f"Optimizing model {model_name}\n")
print(f"With parameters {params}\n")
if exists_resampler:
ensemble_model, metrics, comments = ensemble_model_with_resampling(
X=X, y=y, pipeline_post_process=pipeline_post_process,
model=model, loss_metric=loss_metric, peeking_metrics=peeking_metrics,
k_inner_fold=k_inner_fold, skip_inner_folds=skip_inner_folds,
undersampling_majority_class=undersampling_majority_class,
max_k_undersampling=max_k_undersampling, calibrated=calibrated
)
else:
ensemble_model, metrics, comments = ensemble_model_without_resampling(
X=X, y=y, pipeline_post_process=pipeline_post_process,
model=model, loss_metric=loss_metric, peeking_metrics=peeking_metrics,
k_inner_fold=k_inner_fold, skip_inner_folds=skip_inner_folds,
undersampling_majority_class=undersampling_majority_class,
max_k_undersampling=max_k_undersampling, calibrated=calibrated
)
list_models.append(ensemble_model)
list_metrics.append(metrics)
list_comments.append(comments)
if verbose:
print(f"Metric is {metrics['loss_metric']}\n")
if len(y_hold_out) > 0:
y_hold_out_proba = ensemble_model.predict_proba(X_hold_out)[:, 1]
list_holdout_metrics.append(
evaluate_metrics(
y_hold_out, y_hold_out_proba, loss_metric, peeking_metrics
)
)
return metrics["loss_metric"]
# perform optimization
skopt_func(
func=func_to_minimize, dimensions=search_space,
n_initial_points=n_initial_points,
n_calls=n_calls,
)
best_model, index_best_model = find_best_model(list_models, list_metrics)
undersampling = list_params[index_best_model].get('undersampling_majority_class', False)
if not ensemble and undersampling:
if verbose:
print("Training final model with undersampling technique")
exists_resampler = has_resampler(best_model.get_complete_pipeline_to_fit())
max_k_undersampling = list_params[index_best_model].get('max_k_undersampling', 0)
best_model, _ = train_ensemble_model_with_undersampling(best_model.get_complete_pipeline_to_fit(),
X, y, exists_resampler, max_k_undersampling)
if not ensemble and not undersampling:
if verbose:
print("Training final model")
best_model = best_model.get_complete_pipeline_to_fit()
best_model.fit(X, y)
if verbose:
print("Best model found")
report_dfs = create_report_dfs(list_params, list_metrics, loss_metric)
if report_doc:
write_train_report(
report_doc=report_doc, list_params=list_params, list_metrics=list_metrics,
list_holdout_metrics=list_holdout_metrics, peeking_metrics=peeking_metrics,
list_comments=list_comments
)
return best_model, list_params[index_best_model], list_comments[index_best_model], report_dfs
|
98800
|
from importlib.util import find_spec
def module_available(module_path: str) -> bool:
"""Function to check whether the package is available or not.
Args:
module_path : The name of the module.
"""
try:
return find_spec(module_path) is not None
except AttributeError:
# Python 3.6
return False
except ModuleNotFoundError:
# Python 3.7+
return False
|
98823
|
import errno
import os
import requests
from messagebuf import MessageBuf
class ResException(Exception):
pass
class ResLayer(object):
def __init__(self, ltype, ldata):
self.ltype = ltype
self.ldata = ldata
class Resource(object):
def __init__(self, resname, resver, rawinfo):
self.resname = resname
self.resver = resver
buf = MessageBuf(rawinfo)
canon_sig = 'Haven Resource 1'
sig = buf.get_bytes(len(canon_sig))
if sig != canon_sig:
raise ResException('Wrong signature')
ver = buf.get_uint16()
if ver != self.resver:
raise ResException('Wrong version')
self.layers = []
while not buf.eom():
layer_type = buf.get_string()
layer_len = buf.get_int32()
layer_data = buf.get_bytes(layer_len)
self.layers.append(ResLayer(layer_type, layer_data))
class ResLoader(object):
res_map = {}
@staticmethod
def get(resid):
resinfo = ResLoader.get_map(resid)
if resinfo is None:
raise ResException('No mapping found')
resname = resinfo['resname']
resver = resinfo['resver']
try:
with open(ResLoader.__get_res_path(resname, resver), 'rb') as f:
return Resource(resname, resver, f.read())
except Exception:
pass
r = requests.get('http://game.havenandhearth.com/hres/' + resname + '.res')
if r.status_code != requests.codes.ok:
raise ResException('Unable to fetch resource')
res = Resource(resname, resver, r.content)
try:
os.makedirs(resname[:resname.rfind('/') + 1])
except OSError as exc:
if exc.errno != errno.EEXIST:
raise ResException('Unable to create directories')
pass
with open(ResLoader.__get_res_path(resname, resver), 'wb') as f:
f.write(r.content)
return res
@staticmethod
def add_map(resid, resname, resver):
ResLoader.res_map[resid] = {
'resname': resname,
'resver': resver
}
@staticmethod
def get_map(resid):
return ResLoader.res_map.get(resid)
@staticmethod
def __get_res_path(resname, resver):
program_path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(program_path, '{}_{}.res'.format(resname, resver))
|
98841
|
import sys
from benchmark_paths import get_result_path
from benchmark_tools import run
assert len(sys.argv) == 3, "Usage: python script.py scene_name scene_depth"
scene = sys.argv[1]
scene_depth = int(sys.argv[2])
replay_name = "small_edits"
num_threads = ["2", "4", "6"];
base_defines = [
("SCENE", "\"{}\"".format(scene)),
("SCENE_DEPTH", "{}".format(scene_depth)),
("REPLAY_NAME", "\"{}\"".format(replay_name)),
("USE_BLOOM_FILTER", "0"),
("EDITS_ENABLE_COLORS", "0"),
("EDITS_COUNTERS", "1"),
]
path = get_result_path("edits")
unthreaded=base_defines + [
("THREADED_EDITS", "0")
];
run(unthreaded, "scene={}_depth={}_nothread".format(scene, scene_depth), path)
for i in range(len(num_threads)):
nt = num_threads[i]
threaded=base_defines + [
("THREADED_EDITS", "1"),
("NUM_THREADS", num_threads[i])
];
run(threaded, "scene={}_depth={}_thread{}".format(scene, scene_depth, num_threads[i]), path)
pass;
|
98972
|
from functools import wraps
import inspect
from typing import Any, Callable, Union
from requests.adapters import Response
from mstrio import config
from mstrio.api.exceptions import MstrException, PartialSuccess, Success
from mstrio.utils.helper import get_default_args_from_func, response_handler
def get_args_and_bind_values(func: Callable[[Any], Any], *args, **kwargs):
signature = inspect.signature(func)
return signature.bind(*args, **kwargs).arguments
class ErrorHandler:
"""An easy-to-use class decorator designed to replace
logic responsible for displaying the error message in API wrappers.
Attributes:
err_msg(str): error message to be displayed in case of error
Usage:
to replace the code below
if not response.ok:
if error_msg is None:
error_msg = f'Error deleting Datasource Login with ID {id}'
response_handler(response, error_msg)
return response
use the decorator in a following way
@ErrorHandler(err_msg='Error deleting Datasource Login with ID {id}')
def func(connection, id):
...
the strings in curly braces will be replaced
with the appropriate values if they appear in the function arguments
"""
def __init__(self, err_msg: str):
self._err_msg = err_msg
def __call__(self, func: Callable[[Any], Any]):
@wraps(func)
def inner(*args, **kwargs):
response = func(*args, **kwargs)
error_msg = kwargs.get("error_msg") if kwargs.get("error_msg") else self._err_msg
if not response.ok:
handler_kwargs = self._get_resp_handler_kwargs(kwargs)
error_msg = self._replace_with_values(error_msg, func, *args, **kwargs)
response_handler(response, error_msg, **handler_kwargs)
return response
return inner
@staticmethod
def _replace_with_values(err_msg: str, func: Callable[[Any], Any], *args, **kwargs):
all_args = get_args_and_bind_values(func, *args, **kwargs)
for arg in all_args:
arg_name, arg_value = arg, all_args[arg]
err_msg = err_msg.replace(f'{{{arg_name}}}', str(arg_value))
return err_msg
@staticmethod
def _get_resp_handler_kwargs(decorated_func_kwargs):
default_args = get_default_args_from_func(response_handler)
for arg in default_args:
default_args[arg] = decorated_func_kwargs.get(arg, default_args[arg])
return default_args
def bulk_operation_response_handler(
response: Response,
unpack_value: str = None) -> Union[PartialSuccess, Success, MstrException]:
"""Handle partial success and other statuses from bulk operation."""
response_body = response.json()
if response.ok and unpack_value:
response_body = response_body[unpack_value]
if response.status_code == 200:
err = Success(response_body)
elif response.status_code == 207:
err = PartialSuccess(response_body)
else:
err = MstrException(response_body)
if config.verbose:
print(err)
return err
|
98973
|
B77;10003;0c#!python
import os, sys, string, time, BaseHTTPServer, getopt, re, subprocess, webbrowser
from operator import itemgetter
from utils import *
from preprocess import Preprocess
from assemble import Assemble
sys.path.append(INITIAL_UTILS)
from ruffus import *
_readlibs = []
_skipsteps = []
_settings = Settings()
_asm = None
_mapper = "bowtie"
def init(reads, skipsteps, asm,mapper):
global _readlibs
global _asm
global _skipsteps
_mapper = mapper
_readlibs = reads
_skipsteps = skipsteps
_asm = asm
@files("%s/Assemble/out/%s.bout"%(_settings.rundir,_settings.PREFIX))
#@posttask(create_symlink,touch_file("completed.flag"))
@follows(MapReads)
def CalcDist(input,output):
if "CalcDist" in _skipsteps or "calcdist" in _skipsteps:
return 0
#given read pairs mapped to contigs, calc insert length
|
98981
|
import torch
import torch.fx as fx
from torch.utils._pytree import tree_flatten
aten = torch.ops.aten
rand_ops = [aten.dropout, aten._fused_dropout, aten._standard_gamma,
aten.bernoulli, aten.multinomial, aten.native_dropout,
aten.normal, aten.poisson, aten.binomial, aten.rrelu,
aten.rand_like, aten.rand, aten.randint, aten.randn, aten.randperm]
# return a new copy of torch.fx.graph.Graph with CSE applied to the input graph
def fx_graph_cse(fx_g: torch.fx.graph.Graph):
new_graph = fx.Graph()
env = {} # map from node in the old graph to node in the new graph
hash_env = {} # map from hash to a node in the new graph
token_map = {} # map from hash to token
for n in fx_g.nodes:
# The placeholder, output, and get_attr nodes are copied to the new grpah without change
# do not CSE away random operations
if n.op == 'placeholder' or n.op == 'output' or n.op == 'get_attr' or n.target in rand_ops:
new_node = new_graph.node_copy(n, lambda x: env[x])
env[n] = new_node
else: # n.op == 'call_function', should never see n.op == 'call_module' or 'call_method'
# substitute args and kwargs memebrs to their mapping in env if exists
# specs can be used to reconstruct nested list/dictionaries
def substitute(arg_list):
arg_list, spec = tree_flatten(arg_list)
for i in range(len(arg_list)):
v = arg_list[i]
if isinstance(v, torch.fx.node.Node) and v in env:
arg_list[i] = env[v]
return tuple(arg_list), spec
args, args_spec = substitute(n.args)
kwargs, kwargs_spec = substitute(n.kwargs)
# each token corresponds to a unique node
# nodes with the same token can be substituted
token = {"target": n.target, "args": args, "args_spec": args_spec,
"kwargs": kwargs, "kwargs_spec": kwargs_spec}
# hash substituted args to a number, do not hash specs because specs are not hashable
hash_arg = hash((args, kwargs))
hash_val = (n.target, hash_arg)
# check if a node has a substitute and can be eliminated
hash_val_in_hash_env = hash_val in hash_env
if hash_val_in_hash_env and token_map[hash_val] == token:
env[n] = hash_env[hash_val]
continue
new_node = new_graph.node_copy(n, lambda x: env[x])
env[n] = new_node
if not hash_val_in_hash_env:
hash_env[hash_val] = new_node
token_map[hash_val] = token
return new_graph
|
99031
|
from rpython.jit.backend.llsupport.test.ztranslation_test import TranslationTestCallAssembler
class TestTranslationCallAssemblerPPC(TranslationTestCallAssembler):
pass
|
99034
|
from __future__ import division, print_function
import CoolProp
from CoolProp import unit_systems_constants
from CoolProp.CoolProp import Props, get_REFPROPname, IsFluidType, set_standard_unit_system
import CoolProp.CoolProp as CP
import numpy as np
modes = []
modes.append('pure')
#modes.append('pseudo-pure')
# Check if REFPROP is supported, the Props call should work without throwing exception if it is supported
## try:
## Props('D','T',300,'Q',1,'REFPROP-Propane')
## modes.append('REFPROP')
## except ValueError:
## pass
twophase_inputs = [('T','D'),('T','Q'),('P','Q'),('P','H'),('P','S'),('P','D'),('T','S'),('H','S')] #
singlephase_inputs = [('T','D'),('T','P'),('P','H'),('P','S'),('P','D'),('H','S'),('T','S')]
singlephase_outputs = ['T','P','H','S','A','O','C','G','V','L','C0','U']
## def test_subcrit_singlephase_consistency():
## for Fluid in sorted(CoolProp.__fluids__):
## T = (Props(Fluid,'Tmin')+Props(Fluid,'Tcrit'))/2.0
## for mode in modes:
## rhoL = Props('D','T',T,'Q',0,Fluid)
## rhoV = Props('D','T',T,'Q',1,Fluid)
## for rho in [rhoL+0.1, rhoV*0.9]:
## for inputs in singlephase_inputs:
## for unit_system in ['SI','kSI']:
## yield check_consistency,Fluid,mode,unit_system,T,rho,inputs
def test_subcrit_twophase_consistency():
for Fluid in reversed(sorted(CoolProp.__fluids__)):
Tmin = Props(Fluid,'Tmin')
Tcrit = Props(Fluid,'Tcrit')
for T in [Tmin + 1, (Tmin+Tcrit)/2.0, 0.95*Tcrit]:
for mode in modes:
rhoL = Props('D','T',T,'Q',0,Fluid)
rhoV = Props('D','T',T,'Q',1,Fluid)
for Q in [0.0, 0.5, 1.0]:
rho = 1/((1-Q)/rhoL+Q/rhoV)
for inputs in twophase_inputs:
for unit_system in ['kSI','SI']:
yield check_consistency,Fluid,mode,unit_system, T,rho,inputs
def check_consistency(Fluid,mode,unit_system,T,rho,inputs):
if unit_system == 'SI':
set_standard_unit_system(unit_systems_constants.UNIT_SYSTEM_SI)
elif unit_system == 'kSI':
set_standard_unit_system(unit_systems_constants.UNIT_SYSTEM_KSI)
else:
raise ValueError('invalid unit_system:'+str(unit_system))
if get_REFPROPname(Fluid) == 'N/A':
return
if mode == 'REFPROP':
Fluid = 'REFPROP-' + get_REFPROPname(Fluid)
if mode == 'pure' and not IsFluidType(Fluid,'PureFluid'):
return
# Evaluate the inputs; if inputs is ('T','P'), calculate the temperature and the pressure
Input1 = Props(inputs[0],'T',T,'D',rho,Fluid)
Input2 = Props(inputs[1],'T',T,'D',rho,Fluid)
# Evaluate using the inputs given --> back to T,rho
TEOS = Props('T',inputs[0],Input1,inputs[1],Input2,Fluid)
DEOS = Props('D',inputs[0],Input1,inputs[1],Input2,Fluid)
print('T',inputs[0],Input1,inputs[1],Input2,Fluid)
# Check they are consistent
if abs(TEOS -T) > 1e-1 or abs(DEOS/rho-1) > 0.05:
raise AssertionError("{T:g} K {D:g} kg/m^3 inputs: \"D\",'{ins1:s}',{in1:.12g},'{ins2:s}',{in2:.12g},\"{fluid:s}\" || T: {TEOS:g} D: {DEOS:g}".format(T = T,
D = rho,
TEOS = TEOS,
DEOS = DEOS,
inputs = str(inputs),
in1 = Input1,
in2 = Input2,
ins1 = inputs[0],
ins2 = inputs[1],
fluid = Fluid)
)
if __name__=='__main__':
import nose
nose.runmodule()
|
99079
|
import numpy as np
import matplotlib.pyplot as plt
import time
def completeRank1Matrix(observations,mask,PLOT=False):
# observations and mask are two 2D numpy arrays of the same size, where observations is a
# numerical matrix indicating the observed values of the matrix and mask is a boolean array
# indicating where observations occur.
if PLOT:
f, (ax1, ax2) = plt.subplots(2, sharex=True, sharey=True)
f.show()
done = False
while(done == False):
# Identify nodes that have paths of length 3 between them and not paths of length 1
maskInt = mask.astype(int)
Q = np.logical_and(np.logical_not(mask),
np.greater(np.dot(np.dot(maskInt, np.transpose(maskInt)),maskInt),0))
if not np.any(Q):
done = True
continue
# For each entry in Q solve for new node
solvable = np.argwhere(Q)
for fillPt in solvable:
# Need to find a length 3 path from the row to the column corresponding to entry.
# A simple approach is to traverse a single edge from a given row and column and then test
# if they are connected.
rowConnected = np.argwhere(mask[fillPt[0],:])
columnConnected = np.argwhere(mask[:,fillPt[1]])
pathFound = False
for j in rowConnected:
for i in columnConnected:
if mask[i,j]:
pathFound = True
break
if pathFound:
break
# pdb.set_trace()
assert(mask[i,fillPt[1]] and mask[fillPt[0],j] and mask[i,j] and not mask[fillPt[0],fillPt[1]])
# We now have two points that are "diagonal" in the square
observations[fillPt[0],fillPt[1]] = observations[i,fillPt[1]]*observations[fillPt[0],j]/observations[i,j]
mask[fillPt[0],fillPt[1]] = True
if PLOT:
ax1.imshow(observations, interpolation="nearest")
ax2.imshow(mask, interpolation="nearest")
f.canvas.draw()
time.sleep(0.5)
plt.close(f)
return [observations,mask]
|
99106
|
import asyncio
from asynctest import TestCase, mock
from aiohttp import client_exceptions, WSMsgType
from aiocometd.transports.websocket import WebSocketTransport, WebSocketFactory
from aiocometd.constants import ConnectionType
from aiocometd.exceptions import TransportConnectionClosed, TransportError
class TestWebSocketFactory(TestCase):
def setUp(self):
self.session = mock.CoroutineMock()
self.url = "http://example.com/"
self.factory = WebSocketFactory(self.session)
async def test_enter(self):
socket = object()
context = mock.MagicMock()
context.__aenter__ = mock.CoroutineMock(return_value=socket)
self.session.ws_connect.return_value = context
args = [object()]
kwargs = {"key": "value"}
result = await self.factory._enter(*args, **kwargs)
self.session.ws_connect.assert_called_with(*args, **kwargs)
self.assertEqual(self.factory._context, context)
self.assertEqual(result, socket)
async def test_exit(self):
socket = object()
context = mock.MagicMock()
context.__aexit__ = mock.CoroutineMock(return_value=socket)
self.factory._context = context
self.factory._socket = socket
await self.factory._exit()
context.__aexit__.assert_called()
self.assertIsNone(self.factory._context)
self.assertIsNone(self.factory._socket)
async def test_exit_none_context(self):
self.factory._context = None
await self.factory._exit()
async def test_close(self):
self.factory._exit = mock.CoroutineMock()
await self.factory.close()
self.factory._exit.assert_called()
async def test_close_supresses_errors(self):
self.factory._exit = mock.CoroutineMock(side_effect=AttributeError())
await self.factory.close()
self.factory._exit.assert_called()
async def test_call_socket_creates_socket(self):
self.factory._enter = mock.CoroutineMock()
args = [object()]
kwargs = {"key": "value"}
await self.factory(*args, **kwargs)
self.factory._enter.assert_called_with(*args, **kwargs)
self.assertEqual(self.factory._socket,
self.factory._enter.return_value)
async def test_call_socket_returns_open_socket(self):
self.factory._enter = mock.CoroutineMock()
socket = mock.MagicMock()
socket.closed = False
self.factory._socket = socket
result = await self.factory()
self.assertEqual(result, socket)
self.factory._enter.assert_not_called()
async def test_call_socket_creates_new_if_closed(self):
self.factory._exit = mock.CoroutineMock()
socket = mock.MagicMock()
socket.closed = True
self.factory._socket = socket
await self.factory()
self.factory._exit.assert_called()
class TestWebSocketTransport(TestCase):
def setUp(self):
self.http_session = object()
self.transport = WebSocketTransport(url="example.com/cometd",
incoming_queue=None,
http_session=self.http_session,
loop=None)
def test_connection_type(self):
self.assertEqual(self.transport.connection_type,
ConnectionType.WEBSOCKET)
async def test_get_socket(self):
expected_socket = object()
self.transport._socket_factory = mock.CoroutineMock(
return_value=expected_socket
)
headers = object()
result = await self.transport._get_socket(headers)
self.assertIs(result, expected_socket)
self.transport._socket_factory.assert_called_with(
self.transport._url,
ssl=self.transport.ssl,
headers=headers,
receive_timeout=self.transport.request_timeout,
autoping=True
)
@mock.patch("aiocometd.transports.websocket.asyncio")
async def test_close(self, asyncio_obj):
self.transport._socket_factory_short = mock.MagicMock()
self.transport._socket_factory_short.close = mock.CoroutineMock()
self.transport._socket_factory = mock.MagicMock()
self.transport._socket_factory.close = mock.CoroutineMock()
self.transport._close_http_session = mock.CoroutineMock()
self.transport._receive_task = mock.MagicMock()
self.transport._receive_task.done.return_value = False
asyncio_obj.wait = mock.CoroutineMock()
await self.transport.close()
self.transport._receive_task.cancel.assert_called()
asyncio_obj.wait.assert_called_with([self.transport._receive_task])
self.transport._socket_factory.close.assert_called()
@mock.patch("aiocometd.transports.websocket.asyncio")
async def test_close_on_done_receive_task(self, asyncio_obj):
self.transport._socket_factory_short = mock.MagicMock()
self.transport._socket_factory_short.close = mock.CoroutineMock()
self.transport._socket_factory = mock.MagicMock()
self.transport._socket_factory.close = mock.CoroutineMock()
self.transport._close_http_session = mock.CoroutineMock()
self.transport._receive_task = mock.MagicMock()
self.transport._receive_task.done.return_value = True
asyncio_obj.wait = mock.CoroutineMock()
await self.transport.close()
self.transport._receive_task.cancel.assert_not_called()
asyncio_obj.wait.assert_not_called()
self.transport._socket_factory.close.assert_called()
@mock.patch("aiocometd.transports.websocket.asyncio")
async def test_close_on_no_receive_task(self, asyncio_obj):
self.transport._socket_factory_short = mock.MagicMock()
self.transport._socket_factory_short.close = mock.CoroutineMock()
self.transport._socket_factory = mock.MagicMock()
self.transport._socket_factory.close = mock.CoroutineMock()
self.transport._close_http_session = mock.CoroutineMock()
self.transport._receive_task = None
await self.transport.close()
self.transport._socket_factory.close.assert_called()
async def test_send_socket_payload(self):
payload = object()
socket = mock.MagicMock()
socket.send_json = mock.CoroutineMock()
expected_result = object()
future = asyncio.Future(loop=self.loop)
future.set_result(expected_result)
exchange_result = future
self.transport._create_exhange_future = mock.MagicMock(
return_value=exchange_result
)
self.transport._start_receive_task = mock.MagicMock()
result = await self.transport._send_socket_payload(socket, payload)
self.transport._create_exhange_future.assert_called_with(payload)
socket.send_json.assert_called_with(payload,
dumps=self.transport._json_dumps)
self.transport._start_receive_task.assert_called()
self.assertEqual(result, expected_result)
async def test_send_socket_payload_creates_receive_task(self):
payload = object()
socket = mock.MagicMock()
socket.send_json = mock.CoroutineMock()
expected_result = object()
future = asyncio.Future(loop=self.loop)
future.set_result(expected_result)
exchange_result = future
self.transport._create_exhange_future = mock.MagicMock(
return_value=exchange_result
)
self.transport._start_receive_task = mock.MagicMock()
result = await self.transport._send_socket_payload(socket, payload)
self.transport._create_exhange_future.assert_called_with(payload)
socket.send_json.assert_called_with(payload,
dumps=self.transport._json_dumps)
self.transport._start_receive_task.assert_called()
self.assertEqual(result, expected_result)
async def test_send_socket_payload_on_send_error(self):
payload = [{"id": 0}]
socket = mock.MagicMock()
error = ValueError()
socket.send_json = mock.CoroutineMock(side_effect=error)
future = asyncio.Future(loop=self.loop)
exchange_result = asyncio.Future(loop=self.loop)
exchange_result.set_result(future)
self.transport._create_exhange_future = mock.MagicMock(
return_value=exchange_result
)
self.transport._start_receive_task = mock.MagicMock()
self.transport._set_exchange_errors = mock.MagicMock()
with self.assertRaises(ValueError):
await self.transport._send_socket_payload(socket, payload)
self.transport._create_exhange_future.assert_called_with(payload)
socket.send_json.assert_called_with(payload,
dumps=self.transport._json_dumps)
self.transport._set_exchange_errors.assert_called_with(error)
self.transport._start_receive_task.assert_not_called()
def test_start_receive_task_if_doesnt_exists(self):
socket = object()
receive_task = mock.MagicMock()
self.transport._loop = mock.MagicMock()
self.transport._loop.create_task = mock.MagicMock(
return_value=receive_task
)
self.transport._receive = mock.MagicMock()
self.transport._receive_task = None
self.transport._start_receive_task(socket)
self.transport._loop.create_task.assert_called_with(
self.transport._receive.return_value
)
self.transport._receive.assert_called_with(socket)
receive_task.add_done_callback.assert_called_with(
self.transport._receive_done
)
self.assertEqual(self.transport._receive_task, receive_task)
def test_start_receive_task_if_exists(self):
socket = object()
existing_receive_task = object()
receive_task = mock.MagicMock()
self.transport._loop = mock.MagicMock()
self.transport._loop.create_task = mock.MagicMock(
return_value=receive_task
)
self.transport._receive = mock.MagicMock()
self.transport._receive_task = existing_receive_task
self.transport._start_receive_task(socket)
self.transport._loop.create_task.assert_not_called()
self.transport._receive.assert_not_called()
receive_task.add_done_callback.assert_not_called()
self.assertEqual(self.transport._receive_task, existing_receive_task)
async def test_send_final_payload(self):
payload = object()
socket = object()
response = object()
self.transport._get_socket = mock.CoroutineMock(return_value=socket)
self.transport._send_socket_payload = \
mock.CoroutineMock(return_value=response)
headers = object()
result = await self.transport._send_final_payload(payload,
headers=headers)
self.assertEqual(result, response)
self.transport._get_socket.assert_called_with(headers)
self.transport._send_socket_payload.assert_called_with(socket,
payload)
async def test_send_final_payload_transport_error(self):
payload = object()
socket = object()
exception = client_exceptions.ClientError("message")
self.transport._get_socket = mock.CoroutineMock(return_value=socket)
self.transport._send_socket_payload = \
mock.CoroutineMock(side_effect=exception)
headers = object()
with self.assertLogs(WebSocketTransport.__module__,
level="DEBUG") as log:
with self.assertRaisesRegex(TransportError, str(exception)):
await self.transport._send_final_payload(payload,
headers=headers)
log_message = "WARNING:{}:Failed to send payload, {}"\
.format(WebSocketTransport.__module__, exception)
self.assertEqual(log.output, [log_message])
self.transport._get_socket.assert_called_with(headers)
self.transport._send_socket_payload.assert_called_with(socket,
payload)
async def test_send_final_payload_connection_closed_error(self):
payload = object()
socket = object()
socket2 = object()
response = object()
self.transport._get_socket = mock.CoroutineMock(
side_effect=[socket, socket2])
error = TransportConnectionClosed()
self.transport._send_socket_payload = \
mock.CoroutineMock(side_effect=[error, response])
headers = object()
result = await self.transport._send_final_payload(payload,
headers=headers)
self.assertEqual(result, response)
self.transport._get_socket.assert_has_calls([
mock.call(headers), mock.call(headers)])
self.transport._send_socket_payload.assert_has_calls([
mock.call(socket, payload), mock.call(socket2, payload)
])
async def test_send_final_payload_connection_timeout_error(self):
payload = object()
socket = object()
self.transport._get_socket = mock.CoroutineMock(return_value=socket)
error = asyncio.TimeoutError()
self.transport._send_socket_payload = \
mock.CoroutineMock(side_effect=error)
headers = object()
self.transport._reset_socket = mock.CoroutineMock()
with self.assertRaises(asyncio.TimeoutError):
await self.transport._send_final_payload(payload, headers=headers)
self.transport._get_socket.assert_called_with(headers)
self.transport._send_socket_payload.assert_called_with(socket, payload)
self.transport._reset_socket.assert_called()
@mock.patch("aiocometd.transports.websocket.WebSocketFactory")
async def test_reset_socket(self, ws_factory_cls):
socket_factory = object()
ws_factory_cls.return_value = socket_factory
old_factory = mock.MagicMock()
old_factory.close = mock.CoroutineMock()
self.transport._socket_factory = old_factory
await self.transport._reset_socket()
old_factory.close.assert_called()
self.assertIs(self.transport._socket_factory, socket_factory)
ws_factory_cls.assert_called_with(self.http_session)
@mock.patch("aiocometd.transports.websocket.asyncio.Future")
async def test_create_exhange_future(self, future_cls):
future = object()
future_cls.return_value = future
payload = [{"id": 42}]
result = self.transport._create_exhange_future(payload)
self.assertEqual(result, future)
future_cls.assert_called_with(loop=self.transport._loop)
self.assertEqual(self.transport._pending_exhanges, {42: future})
async def test_receive_done_with_result(self):
future = mock.MagicMock()
result = object()
future.result.return_value = result
self.transport._receive_task = object()
with self.assertLogs("aiocometd.transports.websocket", "DEBUG") as log:
self.transport._receive_done(future)
self.transport._receive_task = None
self.assertEqual(log.output, [
f"DEBUG:aiocometd.transports.websocket:"
f"Recevie task finished with: {result!r}"
])
async def test_receive_done_with_error(self):
future = mock.MagicMock()
result = ValueError()
future.result.side_effect = result
self.transport._receive_task = object()
with self.assertLogs("aiocometd.transports.websocket", "DEBUG") as log:
self.transport._receive_done(future)
self.transport._receive_task = None
self.assertEqual(log.output, [
f"DEBUG:aiocometd.transports.websocket:"
f"Recevie task finished with: {result!r}"
])
def test_set_exchange_errors(self):
error = ValueError()
future = asyncio.Future(loop=self.loop)
self.transport._pending_exhanges = {0: future}
self.transport._set_exchange_errors(error)
self.assertEqual(future.exception(), error)
self.assertEqual(self.transport._pending_exhanges, dict())
def test_set_exchange_errors_skips_completed_futures(self):
error = ValueError()
result = object()
future = asyncio.Future(loop=self.loop)
future.set_result(result)
self.transport._pending_exhanges = {0: future}
self.transport._set_exchange_errors(error)
self.assertEqual(future.result(), result)
self.assertEqual(self.transport._pending_exhanges, dict())
def test_set_exchange_results(self):
future1 = asyncio.Future(loop=self.loop)
future2 = asyncio.Future(loop=self.loop)
future2_result = object()
future2.set_result(future2_result)
future3 = asyncio.Future(loop=self.loop)
self.transport._pending_exhanges = {0: future1, 1: future2, 3: future3}
payload = [{"id": 0}, {"id": 1}, {"id": 2}, {}]
self.transport._set_exchange_results(payload)
self.assertEqual(future1.result(), payload[0])
self.assertEqual(future2.result(), future2_result)
self.assertEqual(self.transport._pending_exhanges, {3: future3})
async def test_receive(self):
response = mock.MagicMock()
response_payload = object()
response.json.return_value = response_payload
socket = mock.MagicMock()
socket.receive = mock.CoroutineMock(
side_effect=[response, asyncio.CancelledError()]
)
self.transport._consume_payload = mock.CoroutineMock()
self.transport._set_exchange_results = mock.CoroutineMock()
with self.assertRaises(asyncio.CancelledError):
await self.transport._receive(socket)
socket.receive.assert_called()
response.json.assert_called_with(loads=self.transport._json_loads)
self.transport._consume_payload.assert_called_with(response_payload)
self.transport._set_exchange_results.assert_called_with(
response_payload
)
async def test_receive_socket_closed(self):
response = mock.MagicMock()
response.type = WSMsgType.CLOSE
response_payload = object()
response.json.return_value = response_payload
socket = mock.MagicMock()
socket.receive = mock.CoroutineMock(
return_value=response
)
self.transport._consume_payload = mock.CoroutineMock()
self.transport._set_exchange_results = mock.CoroutineMock()
with self.assertRaisesRegex(TransportConnectionClosed,
"Received CLOSE message on "
"the factory."):
await self.transport._receive(socket)
socket.receive.assert_called()
response.json.assert_not_called()
self.transport._consume_payload.assert_not_called()
self.transport._set_exchange_results.assert_not_called()
async def test_receive_parse_type_error(self):
response = mock.MagicMock()
response.json.side_effect = TypeError()
socket = mock.MagicMock()
socket.receive = mock.CoroutineMock(
side_effect=[response, asyncio.CancelledError()]
)
self.transport._consume_payload = mock.CoroutineMock()
self.transport._set_exchange_results = mock.CoroutineMock()
with self.assertRaisesRegex(TransportError,
"Received invalid response from the "
"server."):
await self.transport._receive(socket)
socket.receive.assert_called()
response.json.assert_called_with(loads=self.transport._json_loads)
self.transport._consume_payload.assert_not_called()
self.transport._set_exchange_results.assert_not_called()
async def test_receive_any_error(self):
response = mock.MagicMock()
response_payload = object()
response.json.return_value = response_payload
socket = mock.MagicMock()
error = ValueError()
socket.receive = mock.CoroutineMock(
side_effect=error
)
self.transport._consume_payload = mock.CoroutineMock()
self.transport._set_exchange_results = mock.CoroutineMock()
self.transport._set_exchange_errors = mock.CoroutineMock()
with self.assertRaises(ValueError):
await self.transport._receive(socket)
socket.receive.assert_called()
response.json.assert_not_called()
self.transport._consume_payload.assert_not_called()
self.transport._set_exchange_results.assert_not_called()
self.transport._set_exchange_errors.assert_called_with(error)
|
99116
|
import click
import pyperclip
from ..utils.logging import logger
from ..utils.exceptions import handle_exceptions
from ..utils.load import get_default_code_name
import os
@click.command(short_help='Copies code from file to clipboard.')
@click.argument('code_file',
type=click.Path(exists=True, dir_okay=False),
required=False)
@handle_exceptions(BaseException)
def main(code_file):
'''
Copies code from CODE_FILE to the clipboard.
If CODE_FILE is not passed, a default file is suggested based
on current directory.
The suggested file is the most recently edited code file
recognized by termicoder.
'''
if(code_file is None):
default_file = get_default_code_name()
if (not os.path.exists(default_file)):
default_file = None
code_file = click.prompt(
"Please enter the file to copy",
default=default_file,
type=click.Path(readable=True, exists=True)
)
pyperclip.copy(open(code_file, 'r').read())
logger.info("copied %s to clipboard" % code_file)
|
99139
|
from ..factory import Type
class inputMessageLocation(Type):
location = None # type: "location"
live_period = None # type: "int32"
|
99162
|
fp = open("./packet.csv", "r")
vals = fp.readlines()
count = 1
pre_val = 0
current = 0
sampling_rate = 31
val_bins = []
for i in range(len(vals)):
pre_val = current
current = int(vals[i])
if current == pre_val:
count = count + 1
else:
count = 1
if count == sampling_rate:
val_bins.append(pre_val)
count = 1
pre_val = 0
current = 0
next_bins = []
for i in range(len(val_bins) // 2):
b0 = val_bins[i * 2]
b1 = val_bins[i * 2 + 1]
if b0 == 1 and b1 == 0:
next_bins.append(1)
elif b0 == 0 and b1 == 1:
next_bins.append(0)
val_bins = next_bins[3:]
c = 0
for i in range(len(val_bins)):
val = int(val_bins[i])
c = (c << 1) | val
if i % 8 == 7:
print(chr(c), end="")
c = 0
print("")
|
99165
|
import sys
from oletools import olevba
class OfficeParser():
def __init__(self,sample):
self.sample = sample
self.results = {}
def extract_macro(self):
vba = olevba.VBA_Parser(self.sample)
macro_code = ""
if vba.detect_vba_macros():
for (filename, stream_path, vba_filename, vba_code) in vba.extract_macros():
macro_code += olevba.filter_vba(vba_code)
self.results["analysis"] = vba.analyze_macros()
self.results["code"] = macro_code
vba.close()
return self.results
vba.close()
return False
def analysis(self):
return self.extract_macro()
if __name__ == '__main__':
obj = OfficeParser(sys.argv[1])
results = obj.analysis()
for r in results["analysis"]:
print r
print "code: %s" % results["code"]
|
99179
|
from ScrollText import ScrollText
from Tkinter import *
from PrologFrame import PrologFrame
from Prolog import PrologException
from FastIndex import FastIndex
import ErrDialog
import re
import AnnotationColours
#import Colour
import parser
def startCompare(x, y):
(xLine, xCol) = x[1].split('.')
(yLine, yCol) = y[1].split('.')
if xLine == yLine:
return int(xCol) - int(yCol)
else:
return int(xLine) - int(yLine)
class SourceFrame(PrologFrame):
def __init__(self, master=None, text="", readonly=True, app=None):
PrologFrame.__init__(self, master=master, text=text,
readonly=readonly, app=app)
self.app = app
app.pref.register_listener('annotation highlights',
self.annotation_colours_changed)
self.colours_changed(app.colours.ann_colour)
app.pref.register_listener('filter highlights', self.colours_changed)
self.colours_changed(app.colours.fil_colour)
for fTag in ['dynamic', 'static']:
self.text.tag_bind(fTag, "<Motion>",
lambda _, fTag=fTag: self.mouse_over_filter_tag(fTag))
self.text.tag_bind(fTag, "<Leave>",
lambda _, fTag=fTag: self.mouse_leave_filter_tag(fTag))
self.text.tag_configure("unsafe", background="red")
self.text.tag_configure("hide_nf", background="yellow")
self.text.tag_configure("errorTag", background="red")
self.last_annotation = ""
self.annotation_tags = AnnotationColours.annotations
self.annotation_colour = self.app.colours.ann_colour
self.menu = {}
self.annotation_menu = {"unfold":"call", "call":"call", "memo":"call",
"rescall":"call", "ucall":"call",
"mcall":"call", "unknown":"call",
"semicall":"call", "online":"call",
"if":"if", "resif":"if", "semif":"if",
"logif":"logif", "reslogif":"logif",
"findall":"findall", "resfindall":"findall",
"resnot":"not", "not":"not",
";":"or", "resdisj":"or",
"pp_cll":"pp_cll",
"pp_mnf":"pp_mnf",
"time_out":"time_out",
"mnf":"mnf",
"when":"when", "reswhen":"when",
"semiwhen":"when",
"gxspec":"module", "gx":"module",
#"spec":"module",
}
self.text.bind("<Alt-n>", self.keyb_next_ann)
self.text.bind("<Alt-p>", self.keyb_prev_ann)
self.commands = []
if (sys.platform == "win32"):
Menu_Key = "<Button-3>"
else:
Menu_Key = "<Button-1>"
self.annotation_colours_changed(self.annotation_colour)
for tag in self.annotation_tags:
self.text.tag_bind(tag, "<Motion>", self.mouse_over_tag)
self.text.tag_bind(tag, "<Leave>", self.mouse_leave_tag)
self.text.tag_bind(tag, Menu_Key, self.mouse_click_tag)
#self.text.tag_bind("hide_nf", Menu_Key, self.hidenf_click)
self.text.tag_configure("unknown", background="black",
foreground="white")
#make call and rescall stand out as requested by mv
self.text.tag_configure("call", underline=True)
self.text.tag_configure("rescall", underline=True)
self.text.tag_configure("unsafe", background="red", foreground="white")
def annotation_colours_changed(self, map):
# reset menus
for m in ['call', 'if', 'logif', 'findall', 'not', 'or', 'pp_cll',
'pp_mnf', 'time_out', 'mnf', 'when', 'module']:
self.menu[m] = Menu(self.app, tearoff=0)
self.commands = []
menus = {}
self.hide_nf_menu_pos = {}
for tag in self.annotation_tags:
if self.hide_nf_menu_pos.get(self.annotation_menu[tag]) == None:
# start at 2 because item 0, there will be a separator
self.hide_nf_menu_pos[self.annotation_menu[tag]] = 2
else:
self.hide_nf_menu_pos[self.annotation_menu[tag]] += 1
self.commands.append(lambda tag=tag: self.change_ann(tag))
menus[self.annotation_menu[tag]] = 1
self.menu[self.annotation_menu[tag]].add_command(label=tag,
foreground=map[tag],
command=self.commands[-1],
underline=0)
# STEVE : should perhaps to be restricted to only a few menus...
for m in menus:
menu = self.menu[m]
menu.add_separator()
menu.add_command(label="Remove hide_nf",
command=self.remove_hidenf)
self.colours_changed(map)
def new_hidenf(self):
sel = self.text.tag_ranges("sel")
if sel != ():
(start, stop) = sel
#h2 = self.get_tag_position("head", stop)
h2 = self.text.tag_prevrange("head", stop)
if h2 == ():
# attempting to annotate before any head tags, which means
# nothing of use is being annotated!
print "annotation is pointless as before all heads"
return
elif self.text.compare(h2[1], ">", start):
print "annotation encompasses head"
return
h1 = self.get_prev_ann(stop, self.annotation_tags)
hidenf_stop = h1[2]
(_, s1, e1) = self.get_prev_ann(start, self.annotation_tags)
if self.text.compare(start, ">=", s1) and \
self.text.compare(start, "<", e1):
hidenf_start = s1
else:
(_, hidenf_start, _) = self.get_next_ann(start, self.annotation_tags)
if self.text.compare(hidenf_start, ">", hidenf_stop) or \
hidenf_start == 0.0:
print "no clauses selected"
return
#print hidenf_start, hidenf_stop
self.text.tag_add("hide_nf", hidenf_start, hidenf_stop)
self.text.tag_remove("sel", start, stop)
self.text.ann_changed = True
def remove_hidenf(self):
# should never be called if there is no hide_nf tag or error will occur
(_, (start, _)) = self.selectedAnn
(start, stop) = self.get_tag_position("hide_nf", start)
self.text.tag_remove("hide_nf", start, stop)
def change_ann(self, new_ann, selected=None):
if selected is None:
(ann, (start, stop)) = self.selectedAnn
else:
(ann, (start, stop)) = selected
if ann != new_ann:
self.text.tag_remove(ann, start, stop)
self.text.tag_add(new_ann, start, stop)
self.ann_changed = True
def keyb_change_ann(self,next=True):
(ann,(start,end)) = self.get_annotation_at(index="insert")
if ann in self.annotation_tags:
group = self.annotation_menu[ann]
poss = []
for i in self.annotation_menu:
if self.annotation_menu[i] == group:
poss += [i]
if next:
next = self.get_next_from_list(ann,poss)
else:
next = self.get_prev_from_list(ann,poss)
self.change_ann(next,(ann,(start,end)))
self.app.status.message('help', next)
def keyb_next_ann(self, unused):
self.keyb_change_ann(next=True)
def keyb_prev_ann(self, unused):
self.keyb_change_ann(next=False)
def get_next_from_list(self, item, list):
for i in xrange(0, len(list)):
if list[i] == item:
if i < len(list) - 1:
return list[i + 1]
else:
return list[0]
#if not found just give first
return list[0]
def get_prev_from_list(self, item, list):
for i in xrange(0, len(list)):
if list[i] == item:
if i == 0:
return list[-1]
else:
return list[i - 1]
#if not found just give first
return list[0]
def mouse_over_tag(self, event):
self.highlight_tag(event, False)
def mouse_leave_tag(self, unused):
self.text.tag_remove("hilite", "1.0", "end")
self.last_annotation = ""
self.app.status.clear()
def mouse_click_tag(self, event):
self.highlight_tag(event, True)
def hidenf_click(self, event):
#not used.
# if want to remove hide_nf by clicking on just hide_nf annotated code
# (eg no unfold) then add code here
print "hidenf_click"
(ann, (start, stop)) = self.get_annotation_at(event.x, event.y)
print ann, start, stop
return "break"
def highlight_tag(self, event, show_menu):
(ann, (start, stop)) = self.get_annotation_at(event.x, event.y)
if self.last_annotation != "":
(s,e) = self.last_annotation
self.text.tag_remove("hilite", s, e)
self.text.tag_add("hilite", start, stop)
self.last_annotation = (start, stop)
if self.ErrMsg != None and start in self.ErrMsg:
self.app.status.set(self.ErrMsg[start] + ":::" + ann + " - " + str(start) + " -> " + str(stop))
else:
self.app.status.set(ann + " - " + str(start) + " -> " + str(stop))
if show_menu:
self.selectedAnn = (ann, (start, stop))
menu = self.menu[self.annotation_menu[ann]]
menu.tk_popup(event.x_root, event.y_root)
hide_nf = self.get_tag_position("hide_nf", start)
state = NORMAL
if hide_nf == ():
state = DISABLED
else:
(hstart, hend) = hide_nf
if self.text.compare(hend, "<", start):
state = DISABLED
menu.entryconfig(self.hide_nf_menu_pos[self.annotation_menu[ann]],
state=state)
def get_annotation_at(self, x=None, y=None,index=None):
if index is None:
index = self.text.index("@" + str(x) + "," + str(y))
curann = self.text.tag_names(index)
for ann in curann:
if self.annotation_tags.count(ann) > 0:
return (ann, self.get_tag_position(ann, index))
return ("", (0.0, 0.0))
def get_tag_position(self, ann, index):
newindex = self.text.index(index + " + 1 char")
return self.text.tag_prevrange(ann, newindex)
def mouse_over_filter_tag(self, filterTag):
self.app.status.set("Argument annotated as %s" % filterTag)
def mouse_leave_filter_tag(self, unused):
self.app.status.clear()
def getHtmlTag(self, text, ann):
if text.tag_cget(ann,"underline") != "":
under = "text-decoration:underline;"
else:
under = ""
openTag = '<p class="code" style="color:%s;%s">'%(self.convertColour(text.tag_cget(ann,"foreground")),under)
closeTag = '</p>'
if ann in self.annotation_tags:
openTag+= '<a onmouseover="window.status=\'%s\';" onmouseout="window.status=\'\';" >' % ann
closeTag = '</a>' +closeTag
return (openTag,closeTag)
|
99185
|
import SCons.Builder
import os
import shutil
from subprocess import Popen
def nmAction(target, source, env):
'''
set up notmuch test db in target directory
'''
config = os.path.abspath(os.path.join (os.path.curdir, 'test/mail/test_config'))
env['ENV']['NOTMUCH_CONFIG'] = config
# run notmuch
myenv = os.environ.copy()
myenv['NOTMUCH_CONFIG'] = config
# remove old db
print "Remove test/mail/.notmuch.."
shutil.rmtree ('test/mail/test_mail/.notmuch', ignore_errors = True)
t = open ('test/mail/test_config.template', 'r')
o = open ('test/mail/test_config', 'w')
for l in t.readlines ():
if l == 'path=\n':
o.write ("path=" + os.path.abspath (os.path.join (os.path.curdir, 'test/mail/test_mail')) + "\n")
else:
o.write (l)
t.close ()
o.flush ()
o.close ()
p = Popen ("notmuch new", env = myenv, shell = True)
p.wait ()
open(str(target[0]),'w').write("SETUP\n")
def nmActionString(target, source, env):
'''
Return output string which will be seen when setting up test db
'''
return 'Setting up test database in ' + str(source[0])
def generate (env):
env['BUILDERS']['NotmuchTestDb'] = env.Builder(
action = env.Action(nmAction, nmActionString),
suffix='.setup')
class NotmuchNotFound (SCons.Warnings.Warning):
pass
def _detect (env):
""" Try to detect notmuch """
# from http://www.scons.org/wiki/ToolsForFools
try:
return env['notmuch']
except KeyError:
pass
nm = env.WhereIs('notmuch')
if nm:
return nm
raise SCons.Errors.StopError(
NotmuchNotFound,
"Could not find notmuch binary")
return None
def exists (env):
return _detect (env)
|
99189
|
from pyxie.model.pynodes.values import ProfilePyNode
import pyxie.model.functions
def initialise_external_function_definitions():
# Inside <Servo.h>
function_calls = {
"Servo": {
"iterator": False,
"return_ctype": "Servo", # C type of the returned value
},
"Adafruit_NeoPixel": {
"iterator": False,
"return_ctype": "Adafruit_NeoPixel", # C type of the returned value
}
}
types = {
"Servo": { },
"Adafruit_NeoPixel": {}
}
# Update the actual profile functions/types
for function in function_calls:
pyxie.model.functions.profile_funcs[function] = function_calls[function]
for t in types:
pyxie.model.functions.profile_types[t] = types[t]
def populate_profile_context(context):
for i in range(8):
a_pin_name = "A" + str(i)
a_pin = ProfilePyNode(a_pin_name, "integer")
context.store(a_pin_name, a_pin)
a_pin = ProfilePyNode("HIGH", "integer")
context.store("HIGH", a_pin)
a_pin = ProfilePyNode("LOW", "integer")
context.store("LOW", a_pin)
a_pin = ProfilePyNode("INPUT", "integer")
context.store("INPUT", a_pin)
a_pin = ProfilePyNode("OUTPUT", "integer")
context.store("OUTPUT", a_pin)
a_def= ProfilePyNode("NEO_GRB", "integer")
context.store("NEO_GRB", a_pin)
a_def = ProfilePyNode("NEO_KHZ800", "integer")
context.store("NEO_KHZ800", a_pin)
def initialise_profile(context):
context.tag = "PROFILE:arduino"
populate_profile_context(context)
initialise_external_function_definitions()
|
99190
|
from setuptools import *
from os import path
this_dir = path.abspath(path.dirname(__file__))
with open(path.join(this_dir, "README.md"), encoding = "utf-8") as file:
long_description = file.read()
with open(path.join(this_dir, "requirements.txt"), encoding = "utf-8") as file:
requirements = file.readlines()
setup(
name = "pysql-cli",
version = "1.1.2",
author = "<NAME>",
author_email = "<EMAIL>",
url = "https://github.com/Devansh3712/PySQL",
description = "CLI for making MySQL queries easier",
long_description = long_description,
long_description_content_type = "text/markdown",
license = "MIT",
packages = find_packages(),
include_package_data = True,
entry_points = {
"console_scripts": [
"pysql=pysql.main:cli",
"cpysql=pysql.main_c:cli"
]
},
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires = requirements,
)
"""
PySQL
<NAME>, 2021
"""
|
99256
|
from .parser import Parser
# This class was used in the paper for translating, all the translating logic is now implemented in Parser
# So this class is a wrapper for that one.
class Translator:
def __init__(self, database):
self.parser = Parser(database)
def translate(self, query):
return self.parser.parse(query)
|
99285
|
from typing import List, Tuple, Dict
from copy import deepcopy
from sqlite3 import Cursor
import os
from parsimonious import Grammar
from parsimonious.exceptions import ParseError
from allennlp.common.checks import ConfigurationError
from allennlp.semparse.contexts.sql_context_utils import SqlVisitor
from allennlp.semparse.contexts.sql_context_utils import format_grammar_string, initialize_valid_actions
from allennlp.data.dataset_readers.dataset_utils.text2sql_utils import read_dataset_schema
from text2sql.semparse.contexts.text2sql_table_context_v3 import GRAMMAR_DICTIONARY
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_with_table_values
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_with_tables
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_with_global_values
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_to_be_variable_free
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_with_untyped_entities
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_values_with_variables
from text2sql.semparse.contexts.text2sql_table_context_v3 import update_grammar_numbers_and_strings_with_variables, \
update_grammar_with_derived_tabs_and_cols
class AttnSupGrammarBasedWorld:
"""
World representation for any of the Text2Sql datasets.
Parameters
----------
schema_path: ``str``
A path to a schema file which we read into a dictionary
representing the SQL tables in the dataset, the keys are the
names of the tables that map to lists of the table's column names.
cursor : ``Cursor``, optional (default = None)
An optional cursor for a database, which is used to add
database values to the grammar.
use_prelinked_entities : ``bool``, (default = True)
Whether or not to use the pre-linked entities from the text2sql data.
We take this parameter here because it effects whether we need to add
table values to the grammar.
variable_free : ``bool``, optional (default = True)
Denotes whether the data being parsed by the grammar is variable free.
If it is, the grammar is modified to be less expressive by removing
elements which are not necessary if the data is variable free.
use_untyped_entities : ``bool``, optional (default = False)
Whether or not to try to infer the types of prelinked variables.
If not, they are added as untyped values to the grammar instead.
"""
def __init__(self,
schema_path: str,
cursor: Cursor = None,
use_prelinked_entities: bool = True,
variable_free: bool = False,
use_untyped_entities: bool = True) -> None:
self.cursor = cursor
self.schema = read_dataset_schema(schema_path)
self.columns = {column.name: column for table in self.schema.values() for column in table}
self.dataset_name = os.path.basename(schema_path).split("-")[0]
self.use_prelinked_entities = use_prelinked_entities
self.variable_free = variable_free
self.use_untyped_entities = use_untyped_entities
# NOTE: This base dictionary should not be modified.
self.base_grammar_dictionary = self._initialize_grammar_dictionary(deepcopy(GRAMMAR_DICTIONARY))
def get_action_sequence_and_all_actions(self,
query: List[str] = None,
derived_cols: List[Tuple[str,str]] = [],
derived_tables: List[str] = [],
prelinked_entities: Dict[str, Dict[str, str]] = None) -> Tuple[List[str], List[str]]: # pylint: disable=line-too-long
grammar_with_context = deepcopy(self.base_grammar_dictionary)
if not self.use_prelinked_entities and prelinked_entities is not None:
raise ConfigurationError("The Text2SqlWorld was specified to not use prelinked "
"entities, but prelinked entities were passed.")
prelinked_entities = prelinked_entities or {}
if self.use_untyped_entities:
update_grammar_values_with_variables(grammar_with_context, prelinked_entities, self.dataset_name)
else:
update_grammar_numbers_and_strings_with_variables(grammar_with_context,
prelinked_entities,
self.columns)
update_grammar_with_derived_tabs_and_cols(grammar_with_context, derived_tables, derived_cols)
grammar = Grammar(format_grammar_string(grammar_with_context))
valid_actions = initialize_valid_actions(grammar)
all_actions = set()
for action_list in valid_actions.values():
all_actions.update(action_list)
sorted_actions = sorted(all_actions)
sql_visitor = SqlVisitor(grammar)
try:
action_sequence = sql_visitor.parse(" ".join(query)) if query else []
except ParseError:
action_sequence = None
return action_sequence, sorted_actions
@staticmethod
def modify_alignment(action_sequence: List[str], alignment: List[str]) -> List[str]:
"""
modifies alignment between input tokens to sql query tokens, to an alignment between
input tokens and the action sequence that produces the same sql query
:param action_sequence: list of strings that represent production rules
:param alignment: tokens from the input, where the i'th token is alignment to the i'th sql query token when
tokenized with text2sql.data.tokenizer.whitespace_tokenizers.StandardTokenizer
:return: List[str], the new alignment of length len(action_sequence)
"""
query = []
for action_index, action in enumerate(action_sequence):
nonterminal, right_hand_side = action.split(' -> ')
right_hand_side_tokens = right_hand_side[1:-1].split(', ')
right_hand_side_tokens = [(t, action_index) for t in right_hand_side_tokens]
if nonterminal == 'statement':
query.extend(right_hand_side_tokens)
else:
for query_index, token_tuple in list(enumerate(query)):
token, _ = token_tuple
if token == nonterminal:
query = query[:query_index] + \
right_hand_side_tokens + \
query[query_index + 1:]
break
alignment_to_action_map = []
for t, action_index in query:
t = t.strip('"')
if '.' in t and t != '.': # TABLEalias0.COLUMN
alignment_to_action_map.extend([-1, -1, action_index])
elif t[0] == t[-1] == '\'': # string value
alignment_to_action_map.extend([-1, action_index, -1])
elif t == 'YEAR ( CURDATE ( ))': # special case
t = t.replace(')', ' )')
t_len = len(t.split()) - 1
alignment_to_action_map.extend([action_index]+t_len * [-1])
elif t == 'N / A':
alignment_to_action_map.extend(t.split())
else: # irreducible
alignment_to_action_map.append(action_index)
new_alignment = ['NO_ALIGN'] * len(action_sequence)
if len(alignment_to_action_map) != len(alignment):
# there are 4 sql queries that are manually fixed to be parsed by the grammar,
# hence they can't be modified
print([ent[0].strip('"') for ent in query])
return new_alignment
for i in range(len(alignment_to_action_map)):
if alignment_to_action_map[i] == -1:
continue
if new_alignment[alignment_to_action_map[i]] != 'NO_ALIGN':
continue
# if alignment[i] == '?': (I leave it for now to match the seq2seq alignment)
# alignment[i] = 'NO_ALIGN'
new_alignment[alignment_to_action_map[i]] = alignment[i]
return new_alignment
def _initialize_grammar_dictionary(self, grammar_dictionary: Dict[str, List[str]]) -> Dict[str, List[str]]:
# Add all the table and column names to the grammar.
update_grammar_with_tables(grammar_dictionary, self.schema, self.dataset_name)
if self.cursor is not None and not self.use_prelinked_entities:
# Now if we have strings in the table, we need to be able to
# produce them, so we find all of the strings in the tables here
# and create production rules from them. We only do this if
# we haven't pre-linked entities, because if we have, we don't
# need to be able to generate the values - just the placeholder
# symbols which link to them.
grammar_dictionary["number"] = []
grammar_dictionary["string"] = []
update_grammar_with_table_values(grammar_dictionary, self.schema, self.cursor)
# Finally, update the grammar with global, non-variable values
# found in the dataset, if present.
update_grammar_with_global_values(grammar_dictionary, self.dataset_name)
if self.variable_free:
update_grammar_to_be_variable_free(grammar_dictionary)
if self.use_untyped_entities:
update_grammar_with_untyped_entities(grammar_dictionary)
return grammar_dictionary
def is_global_rule(self, production_rule: str) -> bool:
if self.use_prelinked_entities:
# we are checking -4 as is not a global rule if we
# see the 0 in the a rule like 'value -> ["\'city_name0\'"]'
if "value" in production_rule and production_rule[-4].isnumeric():
return False
return True
|
99295
|
r"""
Commutative algebras
"""
#*****************************************************************************
# Copyright (C) 2005 <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# 2008-2009 <NAME> <nthiery at users.sf.net>
#
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#******************************************************************************
from sage.categories.category_with_axiom import CategoryWithAxiom_over_base_ring
from sage.categories.algebras import Algebras
class CommutativeAlgebras(CategoryWithAxiom_over_base_ring):
"""
The category of commutative algebras with unit over a given base ring.
EXAMPLES::
sage: M = CommutativeAlgebras(GF(19))
sage: M
Category of commutative algebras over Finite Field of size 19
sage: CommutativeAlgebras(QQ).super_categories()
[Category of algebras over Rational Field, Category of commutative rings]
This is just a shortcut for::
sage: Algebras(QQ).Commutative()
Category of commutative algebras over Rational Field
TESTS::
sage: Algebras(QQ).Commutative() is CommutativeAlgebras(QQ)
True
sage: TestSuite(CommutativeAlgebras(ZZ)).run()
Todo:
- product ( = Cartesian product)
- coproduct ( = tensor product over base ring)
"""
def __contains__(self, A):
"""
EXAMPLES::
sage: QQ['a'] in CommutativeAlgebras(QQ)
True
sage: QQ['a,b'] in CommutativeAlgebras(QQ)
True
sage: FreeAlgebra(QQ,2,'a,b') in CommutativeAlgebras(QQ)
False
TODO: get rid of this method once all commutative algebras in
Sage declare themselves in this category
"""
return super(CommutativeAlgebras, self).__contains__(A) or \
(A in Algebras(self.base_ring()) and hasattr(A, "is_commutative") and A.is_commutative())
|
99329
|
import pytest
from sdk.data.Mappable import NoneAsMappable, StringAsMappable
def test_none_as_mappable():
none = NoneAsMappable()
assert none.to_json() == ''
assert none == NoneAsMappable()
@pytest.mark.parametrize('string', [
'asdfg',
'test-test',
'dunnoLol'
])
def test_string_as_mappable(string):
repr = StringAsMappable(string)
assert repr.to_json() == string
assert StringAsMappable.from_str(string) == repr
def test_string_as_mappable_equality_fail():
repr = StringAsMappable('lol')
assert not repr == {}
|
99354
|
def condensate_to_gas_equivalence(api, stb):
"Derivation from real gas equation"
Tsc = 519.57 # standard temp in Rankine
psc = 14.7 # standard pressure in psi
R = 10.732
rho_w = 350.16 # water density in lbm/STB
so = 141.5 / (api + 131.5) # so: specific gravity of oil (dimensionless)
Mo = 5854 / (api - 8.811) # molecular weight of oil
n = (rho_w * so) / Mo
V1stb = ((n * R * Tsc) / psc)
V = V1stb * stb
return(V)
def general_equivalence(gamma, M):
"Calculate equivalence of 1 STB of water/condensate to scf of gas"
# gamma: specific gravity of condensate/water. oil specific gravity use formula: so=141.5/(api+131.5). water specific gravity = 1
# M: molecular weight of condensate/water. oil: Mo = 5854 / (api - 8.811). water: Mw = 18
V1stb = 132849 * (gamma / M)
return(V1stb)
|
99376
|
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def __repr__(self):
return str(self.data)
def count_unival_trees(root):
if not root:
return 0
elif not root.left and not root.right:
return 1
elif not root.left and root.data == root.right.data:
return 1 + count_unival_trees(root.right)
elif not root.right and root.data == root.left.data:
return 1 + count_unival_trees(root.left)
child_counts = count_unival_trees(root.left) + count_unival_trees(root.right)
current_node_count = 0
if root.data == root.left.data and root.data == root.left.data:
current_node_count = 1
return current_node_count + child_counts
node_a = Node('0')
node_b = Node('1')
node_c = Node('0')
node_d = Node('1')
node_e = Node('0')
node_f = Node('1')
node_g = Node('1')
node_a.left = node_b
node_a.right = node_c
node_c.left = node_d
node_c.right = node_e
node_d.left = node_f
node_d.right = node_g
assert count_unival_trees(None) == 0
assert count_unival_trees(node_a) == 5
assert count_unival_trees(node_c) == 4
assert count_unival_trees(node_g) == 1
assert count_unival_trees(node_d) == 3
|
99383
|
from pyco import user_input
from pyco.color import Fore, Back, Style
user_input("Plain prompt: ")
user_input(Fore.GREEN + "Prompt in green: ")
user_input(Fore.BRIGHT_RED + "Prompt in bright red, user input in cyan: ", input_color=Fore.CYAN)
user_input(Fore.BLUE + Back.BRIGHT_WHITE + "Prompt in blue on a bright white background, user input in bright magenta with an underline: ", input_color=Fore.BRIGHT_MAGENTA + Style.UNDERLINE)
user_input("This prompt and the following user input has been logged: ", log=True)
|
99386
|
import binascii
import datetime
import hashlib
import hmac
import os
import uuid
from typing import Dict, List, Optional, Type, Union
from fastapi import HTTPException, Query, status
from pydantic import BaseModel, Extra, validator
from ..config import settings
from .auth import TYPING_AUTH
from . import EBaseModel, logger, md
class User(EBaseModel):
__public_fields__ = {
"user_id",
"username",
"score",
"solved_tasks",
"affilation",
"country",
"profile_pic",
}
__admin_only_fields__ = {
"is_admin",
"oauth_id",
}
__private_fields__ = {}
user_id: uuid.UUID = None
username: str = "unknown"
score: int = 0
solved_tasks: Dict[uuid.UUID, datetime.datetime] = {} # uuid or task :hm
is_admin: bool = False
affilation: str = ""
country: str = ""
profile_pic: Optional[str] = None
auth_source: TYPING_AUTH
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.username = self.auth_source.generate_username()
if self.admin_checker():
logger.warning(f"Promoting {self} to admin")
self.is_admin = True
def admin_checker(self):
return self.auth_source.is_admin()
@validator("user_id", pre=True, always=True)
def set_id(cls, v):
return v or uuid.uuid4()
def get_last_solve_time(self):
if len(self.solved_tasks) > 0:
return max(self.solved_tasks.items(), key=lambda x: x[1])
else:
return ("", datetime.datetime.fromtimestamp(0))
def short_desc(self):
return f"id={self.user_id}; name={self.username}; src={self.auth_source.classtype}"
|
99392
|
from hashlib import sha256
from hmac import new as hmac
from random import randint
from re import fullmatch
from socket import socket
from struct import pack as encode
from subprocess import PIPE, STDOUT, Popen
from crcmod.predefined import mkCrcFun
from goodix import FLAGS_TRANSPORT_LAYER_SECURITY_DATA, Device
from protocol import USBProtocol
from tool import connect_device, decode_image, warning, write_pgm
TARGET_FIRMWARE: str = "GF3268_RTSEC_APP_10041"
IAP_FIRMWARE: str = "MILAN_RTSEC_IAP_10027"
VALID_FIRMWARE: str = "GF32[0-9]{2}_RTSEC_APP_100[0-9]{2}"
PSK: bytes = bytes.fromhex(
"0000000000000000000000000000000000000000000000000000000000000000")
PSK_WHITE_BOX: bytes = bytes.fromhex(
"ec35ae3abb45ed3f12c4751f1e5c2cc05b3c5452e9104d9f2a3118644f37a04b"
"6fd66b1d97cf80f1345f76c84f03ff30bb51bf308f2a9875c41e6592cd2a2f9e"
"60809b17b5316037b69bb2fa5d4c8ac31edb3394046ec06bbdacc57da6a756c5")
PMK_HASH: bytes = bytes.fromhex(
"81b8ff490612022a121a9449ee3aad2792f32b9f3141182cd01019945ee50361")
DEVICE_CONFIG: bytes = bytes.fromhex(
"6011607124952cc114d510e500e514f9030402000008001111ba000180ca0007"
"008400c0b38600bbc48800baba8a00b2b28c00aaaa8e00c1c19000bbbb9200b1"
"b1940000a8960000b6980000bf9a0000ba50000105d000000070000000720078"
"56740034122600001220001040120003042a0102002200012024003200800001"
"005c008000560008205800010032002c028200800cba000180ca0007002a0182"
"03200010402200012024001400800005005c0000015600082058000300820080"
"142a0108005c0080006200090364001800220000202a0108005c000001520008"
"0054000001000000000000000000000000000000000000000000000000009a69")
SENSOR_WIDTH = 88
SENSOR_HEIGHT = 108
def init_device(product: int) -> Device:
device = Device(product, USBProtocol)
device.nop()
return device
def check_psk(device: Device) -> bool:
reply = device.preset_psk_read(0xbb020007)
if not reply[0]:
raise ValueError("Failed to read PSK")
if reply[1] != 0xbb020007:
raise ValueError("Invalid flags")
return reply[2] == PMK_HASH
def write_psk(device: Device) -> bool:
if not device.preset_psk_write(0xbb010003, PSK_WHITE_BOX):
return False
if not check_psk(device):
return False
return True
def erase_firmware(device: Device) -> None:
device.mcu_erase_app(50, False)
device.disconnect()
def update_firmware(device: Device) -> None:
firmware_file = open(f"firmware/55x4/{TARGET_FIRMWARE}.bin", "rb")
firmware = firmware_file.read()
firmware_file.close()
mod = b""
for i in range(1, 65):
mod += encode("<B", i)
raw_pmk = (encode(">H", len(PSK)) + PSK) * 2
pmk = sha256(raw_pmk).digest()
pmk_hmac = hmac(pmk, mod, sha256).digest()
firmware_hmac = hmac(pmk_hmac, firmware, sha256).digest()
try:
length = len(firmware)
for i in range(0, length, 256):
if not device.write_firmware(i, firmware[i:i + 256]):
raise ValueError("Failed to write firmware")
if not device.check_firmware(0, length,
mkCrcFun("crc-32-mpeg")(firmware),
firmware_hmac):
raise ValueError("Failed to check firmware")
except Exception as error:
print(
warning(f"The program went into serious problems while trying to "
f"update the firmware: {error}"))
erase_firmware(device)
raise error
device.reset(False, True, 100)
device.disconnect()
def run_driver(device: Device):
tls_server = Popen([
"openssl", "s_server", "-nocert", "-psk",
PSK.hex(), "-port", "4433", "-quiet"
],
stdout=PIPE,
stderr=STDOUT)
try:
if not device.reset(True, False, 20)[0]:
raise ValueError("Reset failed")
device.read_sensor_register(0x0000, 4) # Read chip ID (0x00a1)
device.read_otp()
# OTP: 0867860a12cc02faa65d2b4b0204e20cc20c9664087bf80706000000c02d431d
tls_client = socket()
tls_client.connect(("localhost", 4433))
try:
connect_device(device, tls_client)
if not device.upload_config_mcu(DEVICE_CONFIG):
raise ValueError("Failed to upload config")
device.mcu_switch_to_fdt_mode(
b"\x0d\x01\x80\x12\x80\x12\x80\x98"
b"\x80\x82\x80\x12\x80\xa0\x80\x99"
b"\x80\x7f\x80\x12\x80\x9f\x80\x93"
b"\x80\x7e", True)
tls_client.sendall(
device.mcu_get_image(b"\x01\x00",
FLAGS_TRANSPORT_LAYER_SECURITY_DATA)[9:])
write_pgm(decode_image(tls_server.stdout.read(14260)[:-4]),
SENSOR_WIDTH, SENSOR_HEIGHT, "clear-0.pgm")
device.mcu_switch_to_fdt_mode(
b"\x0d\x01\x80\x12\x80\x12\x80\x98"
b"\x80\x82\x80\x12\x80\xa0\x80\x99"
b"\x80\x7f\x80\x12\x80\x9f\x80\x93"
b"\x80\x7e", True)
device.mcu_switch_to_idle_mode(20)
device.read_sensor_register(0x0082, 2)
tls_client.sendall(
device.mcu_get_image(b"\x01\x00",
FLAGS_TRANSPORT_LAYER_SECURITY_DATA)[9:])
write_pgm(decode_image(tls_server.stdout.read(14260)[:-4]),
SENSOR_WIDTH, SENSOR_HEIGHT, "clear-1.pgm")
device.mcu_switch_to_fdt_mode(
b"\x0d\x01\x80\x12\x80\x12\x80\x98"
b"\x80\x82\x80\x12\x80\xa0\x80\x99"
b"\x80\x7f\x80\x12\x80\x9f\x80\x93"
b"\x80\x7e", True)
if not device.switch_to_sleep_mode(0x6c):
raise ValueError("Failed to switch to sleep mode")
print("Waiting for finger...")
device.mcu_switch_to_fdt_down(
b"\x0c\x01\x80\xb0\x80\xc4\x80\xba"
b"\x80\xa6\x80\xb7\x80\xc7\x80\xc0"
b"\x80\xaa\x80\xb4\x80\xc4\x80\xba"
b"\x80\xa6", True)
tls_client.sendall(
device.mcu_get_image(b"\x01\x00",
FLAGS_TRANSPORT_LAYER_SECURITY_DATA)[9:])
write_pgm(decode_image(tls_server.stdout.read(14260)[:-4]),
SENSOR_WIDTH, SENSOR_HEIGHT, "fingerprint.pgm")
finally:
tls_client.close()
finally:
tls_server.terminate()
def main(product: int) -> None:
print(
warning("This program might break your device.\n"
"Consider that it may flash the device firmware.\n"
"Continue at your own risk.\n"
"But don't hold us responsible if your device is broken!\n"
"Don't run this program as part of a regular process."))
code = randint(0, 9999)
if input(f"Type {code} to continue and confirm that you are not a bot: "
) != str(code):
print("Abort")
return
previous_firmware = None
device = init_device(product)
while True:
firmware = device.firmware_version()
print(f"Firmware: {firmware}")
valid_psk = check_psk(device)
print(f"Valid PSK: {valid_psk}")
if firmware == IAP_FIRMWARE:
iap = IAP_FIRMWARE
else:
iap = device.get_iap_version(25)
print(f"IAP: {iap}")
if iap != IAP_FIRMWARE:
raise ValueError(
"Invalid IAP\n" +
warning("Please consider that removing this security "
"is a very bad idea!"))
if firmware == previous_firmware:
raise ValueError("Unchanged firmware")
previous_firmware = firmware
if fullmatch(TARGET_FIRMWARE, firmware):
if not valid_psk:
if not write_psk(device):
raise ValueError("Failed to write PSK")
run_driver(device)
return
if fullmatch(VALID_FIRMWARE, firmware):
erase_firmware(device)
device = init_device(product)
continue
if fullmatch(IAP_FIRMWARE, firmware):
if not valid_psk:
if not write_psk(device):
raise ValueError("Failed to write PSK")
update_firmware(device)
device = init_device(product)
continue
raise ValueError("Invalid firmware\n" +
warning("Please consider that removing this security "
"is a very bad idea!"))
|
99402
|
import argparse
import json
import re
from typing import List
quiet = False
def print_message(message: str):
"""
Print message to STDOUT if the quiet option is set to False (this is the default).
:param message: message to print
:return: None
"""
global quiet
if not quiet:
print(message)
def get_matches(log_file: str, regex):
"""
Generator object to generically parse a given log file using a compiled regex pattern.
:param log_file: file to read logs from
:param regex: compiled regex pattern
:return: a generator, which when iterated returns tuples of captured groups
"""
with open(log_file, 'r') as f:
line = f.readline()
while line:
line = line.strip()
matches = re.match(regex, line)
if not matches:
print_message('WARNING, unable to parse log message: {}'.format(line))
line = f.readline()
continue
groups = matches.groups()
yield groups
line = f.readline()
def parse_apache_error_logs(log_file: str) -> List:
"""
Parse an apache error log file.
:param log_file: log file to read from
:return: list of dictionaries of fields parsed from logs
"""
logs = []
regex = re.compile(r'^\[(.+)\] \[(\w+)\] \[client (\d{1,3}(?:\.\d{1,3}){3})\] ([\w\s]+): (\S+)$', re.IGNORECASE)
for groups in get_matches(log_file, regex):
if groups[2] == '127.0.0.1':
continue
log_dict = {'datetime': groups[0], 'log_level': groups[1], 'client_ip': groups[2], 'message': groups[3],
'request_path': groups[4]}
logs.append(log_dict)
return logs
def parse_apache_logs(log_file: str) -> List:
"""
Parse an apache access log file.
:param log_file: log file to read from
:return: list of dictionaries of fields parsed from logs
"""
logs = []
regex = re.compile(
r'^(\d{1,3}(?:\.\d{1,3}){3}) \- \- \[(.+)\] "(\w+) (\S+) (\S+)" (\d+) ([\d\-]+) "(\S+)"(?: "(.+)")?$',
re.IGNORECASE)
for groups in get_matches(log_file, regex):
if groups[0] == '127.0.0.1':
continue
log_dict = {'client_ip': groups[0], 'datetime': groups[1], 'request_method': groups[2],
'request_path': groups[3], 'protocol': groups[4], 'response_code': groups[5],
'response_size': groups[6], 'referer': groups[7], 'user_agent': groups[8]}
logs.append(log_dict)
return logs
def main():
global quiet
parser = argparse.ArgumentParser(description='Generic log file parser application.')
parser.add_argument('-i', '--input', required=True, help='Log file to read from')
parser.add_argument('-l', '--log-format', required=True, choices=['apache', 'apache_error'],
help='Type of log to parse')
parser.add_argument('-o', '--output', help='Output file to write to')
parser.add_argument('-q', '--quiet', help='Do not print informative messages', action='store_true')
args = parser.parse_args()
input_file = args.input
log_format = args.log_format
output = args.output
quiet = args.quiet
parse_function = globals()['parse_{}_logs'.format(log_format)]
parsed_logs = parse_function(input_file)
if output:
with open(output, 'w') as of:
json.dump(parsed_logs, of, indent=2)
else:
print(json.dumps(parsed_logs, indent=2))
if __name__ == '__main__':
main()
|
99406
|
from .ReportDaily import *
# Lists which git version was used by how many users yesterday
class ReportGitVersionsNew(ReportDaily):
def name(self):
return "git-versions-new"
def updateDailyData(self):
newHeader, newData = self.parseData(
self.executeScript(self.scriptPath("git-versions.sh")))
self.header = ["date"] + newHeader
newData = [[str(self.yesterday())] + row for row in newData]
self.data.extend(newData)
self.truncateData(self.timeRangeTotal())
self.sortDataByDate()
|
99453
|
import FWCore.ParameterSet.Config as cms
from CalibMuon.DTCalibration.dtSegmentSelection_cfi import dtSegmentSelection
dtVDriftSegmentCalibration = cms.EDAnalyzer("DTVDriftSegmentCalibration",
# Segment selection
dtSegmentSelection,
recHits4DLabel = cms.InputTag('dt4DSegments'),
rootFileName = cms.untracked.string('DTVDriftHistos.root'),
# Choose the chamber you want to calibrate (default = "All"), specify the chosen chamber
# in the format "wheel station sector" (i.e. "-1 3 10")
calibChamber = cms.untracked.string('All')
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.