hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b90e1563532d8f5a9bd0112e0c1e612d450caec6
| 120
|
py
|
Python
|
chickpea/__init__.py
|
ENPH-2113-Silicon-Photonics/legume
|
45c64b78f09b7d8779f7a4e12f204f5778b08b0b
|
[
"MIT"
] | 2
|
2021-04-13T22:43:04.000Z
|
2021-04-15T12:19:00.000Z
|
chickpea/__init__.py
|
ENPH-2113-Silicon-Photonics/legume
|
45c64b78f09b7d8779f7a4e12f204f5778b08b0b
|
[
"MIT"
] | null | null | null |
chickpea/__init__.py
|
ENPH-2113-Silicon-Photonics/legume
|
45c64b78f09b7d8779f7a4e12f204f5778b08b0b
|
[
"MIT"
] | 1
|
2022-02-20T07:04:34.000Z
|
2022-02-20T07:04:34.000Z
|
from .ctl import PhotonicCrystalCavity
from .ctl import NanoBeamCavity
from .ctl import PhotonicCrystalTopologyBuilder
| 24
| 47
| 0.866667
| 12
| 120
| 8.666667
| 0.5
| 0.201923
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108333
| 120
| 4
| 48
| 30
| 0.971963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f8deab487cb0a79486a84144e652a41c00369e53
| 7,886
|
py
|
Python
|
loadbalanceRL/lib/algorithm/Qlearning/agents/regression.py
|
fqzhou/LoadBalanceControl-RL
|
689eec3b3b27e121aa45d2793e411f1863f6fc0b
|
[
"MIT"
] | 11
|
2018-10-29T06:50:43.000Z
|
2022-03-28T14:26:09.000Z
|
loadbalanceRL/lib/algorithm/Qlearning/agents/regression.py
|
fqzhou/LoadBalanceControl-RL
|
689eec3b3b27e121aa45d2793e411f1863f6fc0b
|
[
"MIT"
] | 1
|
2022-03-01T13:46:25.000Z
|
2022-03-01T13:46:25.000Z
|
loadbalanceRL/lib/algorithm/Qlearning/agents/regression.py
|
fqzhou/LoadBalanceControl-RL
|
689eec3b3b27e121aa45d2793e411f1863f6fc0b
|
[
"MIT"
] | 6
|
2019-02-05T20:01:53.000Z
|
2020-09-04T12:30:00.000Z
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This module stores implementation of various regression models.
"""
import logging
import numpy as np
import tensorflow as tf
from loadbalanceRL.lib.algorithm.Qlearning.agents import agent_template
__author__ = 'Ari Saha (arisaha@icloud.com), Mingyang Liu(liux3941@umn.edu)'
class QLinearRegressionAgent(agent_template.Base):
def __init__(self, alg_config, agent_config):
# Make sure actions are provided by the environment
assert self.n_actions
# Make sure state_dim is provided by the environment
assert self.state_dim
# setup logging
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.info("Linear regression instance is created!")
# log params
self.logger.info("Configuration used for the Agent:")
self.logger.info("episodes: {}".format(self.episodes))
self.logger.info("alpha: {}".format(self.alpha))
self.logger.info("gamma: {}".format(self.gamma))
self.logger.info("epsilon: {}".format(self.epsilon))
self.logger.info("epsilon_decay: {}".format(self.epsilon_decay))
self.logger.info("epsilon_min: {}".format(self.epsilon_min))
# Build Linear Regression model
self._build_model()
def _build_model(self):
"""
Helper method to build a model for the agent
"""
self.State = tf.placeholder(tf.float32, [None, self.state_dim])
self.Target = tf.placeholder(tf.float32, [None, 1])
self.W = tf.Variable(tf.ones([self.state_dim, 1]))
self.b = tf.Variable(tf.ones([1]))
self.y_ = tf.add(tf.matmul(self.State, self.W), self.b)
self.cost = tf.reduce_mean(tf.square(self.y_ - self.Target))
self.training_step = tf.train.GradientDescentOptimizer(
self.alpha).minimize(self.cost)
# start tensorflow session
self.sess = tf.Session()
init = tf.global_variables_initializer()
self.sess.run(init)
def predict(self, state):
"""
Helper method to predict models's output
"""
return self.sess.run(
self.y_, feed_dict={self.State: state})
def train(self, state, target):
"""
Helper method to train the model
"""
self.sess.run(
self.training_step,
feed_dict={self.State: state, self.Target: target})
def model_cost(self, state, target):
"""
Calculate cost
"""
return self.sess.run(
self.cost,
feed_dict={self.State: state, self.Target: target})
def model_error(self, pred_y, test_y):
"""
Calculate mean sqare error
"""
return tf.reduce_mean(tf.square(pred_y - test_y))
def _take_action(self, state):
"""
Implements how to take actions when provided with a state
This follows epsilon-greedy policy (behavior policy)
Args:
state: (tuple)
Returns:
action: (float)
"""
# explore if random number between [0, 1] is less than epsilon,
# that is this agent exlores 10% of the time and rest exploits
if np.random.rand() < self.epsilon:
return np.random.choice(list(range(self.n_actions)))
return np.argmax(self.predict(np.reshape(state, (1, self.state_dim))))
def _learn(self, state, action, reward, next_state):
"""
Implements how the agent learns
Args:
state: (tuple)
Current state of the environment.
action: (float)
Current action taken by the agent.
reward: (float):
Reward produced by the environment.
next_state: (tuple)
Next state of the environment.
"""
if self.epsilon > self.epsilon_min:
self.epsilon *= self.epsilon_decay
state = np.reshape(state, (1, self.state_dim))
target = self.predict(state)
self.train(state, target)
class QCellularLinearRegressionAgent(agent_template.Base):
def __init__(self, alg_config, agent_config):
# Make sure actions are provided by the environment
assert self.n_actions
# Make sure state_dim is provided by the environment
assert self.state_dim
# setup logging
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.info(
"Linear regression instance for cellular network is created!")
# log params
self.logger.info("Configuration used for the Agent:")
self.logger.info("episodes: {}".format(self.episodes))
self.logger.info("alpha: {}".format(self.alpha))
self.logger.info("gamma: {}".format(self.gamma))
self.logger.info("epsilon: {}".format(self.epsilon))
self.logger.info("epsilon_decay: {}".format(self.epsilon_decay))
self.logger.info("epsilon_min: {}".format(self.epsilon_min))
# Build Linear Regression model
self._build_model()
def _build_model(self):
"""
Helper method to build a model for the agent
"""
self.State = tf.placeholder(tf.float32, [None, self.state_dim])
self.Target = tf.placeholder(tf.float32, [None, 1])
self.W = tf.Variable(tf.ones([self.state_dim, 1]))
self.b = tf.Variable(tf.ones([1]))
self.y_ = tf.add(tf.matmul(self.State, self.W), self.b)
self.cost = tf.reduce_mean(tf.square(self.y_ - self.Target))
self.training_step = tf.train.GradientDescentOptimizer(
self.alpha).minimize(self.cost)
# start tensorflow session
self.sess = tf.Session()
init = tf.global_variables_initializer()
self.sess.run(init)
def predict(self, state):
"""
Helper method to predict models's output
"""
return self.sess.run(
self.y_, feed_dict={self.State: state})
def train(self, state, target):
"""
Helper method to train the model
"""
self.sess.run(
self.training_step,
feed_dict={self.State: state, self.Target: target})
def model_cost(self, state, target):
"""
Calculate cost
"""
return self.sess.run(
self.cost,
feed_dict={self.State: state, self.Target: target})
def model_error(self, pred_y, test_y):
"""
Calculate mean sqare error
"""
return tf.reduce_mean(tf.square(pred_y - test_y))
def _take_action(self, state):
"""
Implements how to take actions when provided with a state
This follows epsilon-greedy policy (behavior policy)
Args:
state: (tuple)
Returns:
action: (float)
"""
# explore if random number between [0, 1] is less than epsilon,
# that is this agent exlores 10% of the time and rest exploits
if np.random.rand() < self.epsilon:
return np.random.choice(list(range(self.n_actions)))
return np.argmax(self.predict(np.reshape(state, (1, self.state_dim))))
def _learn(self, state, action, reward, next_state):
"""
Implements how the agent learns
Args:
state: (tuple)
Current state of the environment.
action: (float)
Current action taken by the agent.
reward: (float):
Reward produced by the environment.
next_state: (tuple)
Next state of the environment.
"""
if self.epsilon > self.epsilon_min:
self.epsilon *= self.epsilon_decay
state = np.reshape(state, (1, self.state_dim))
target = self.predict(state)
self.train(state, target)
| 32.452675
| 78
| 0.598909
| 958
| 7,886
| 4.807933
| 0.179541
| 0.058619
| 0.048632
| 0.027356
| 0.929223
| 0.929223
| 0.929223
| 0.929223
| 0.929223
| 0.929223
| 0
| 0.005708
| 0.28912
| 7,886
| 242
| 79
| 32.586777
| 0.815912
| 0.248034
| 0
| 0.903846
| 0
| 0
| 0.069069
| 0.00784
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.153846
| false
| 0
| 0.038462
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d256101942915858612916efddc55c863dbaab1
| 12,262
|
py
|
Python
|
tests/test_observable/test_pausablebuffered.py
|
AlexMost/RxPY
|
05cb14c72806dc41e243789c05f498dede11cebd
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_pausablebuffered.py
|
AlexMost/RxPY
|
05cb14c72806dc41e243789c05f498dede11cebd
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_pausablebuffered.py
|
AlexMost/RxPY
|
05cb14c72806dc41e243789c05f498dede11cebd
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-04T11:13:49.000Z
|
2021-11-04T11:13:49.000Z
|
import unittest
from rx import Observable
from rx.testing import TestScheduler, ReactiveTest, is_prime, MockDisposable
from rx.disposables import Disposable, SerialDisposable
from rx.subjects import Subject
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestPausable_buffered(unittest.TestCase):
def test_paused_no_skip(self):
subscription = [None]
scheduler = TestScheduler()
controller = Subject()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_completed(500)
)
def action0(scheduler, state):
subscription[0] = xs.pausable_buffered(controller).subscribe(results)
controller.on_next(True)
scheduler.schedule_absolute(200, action0)
def action1(scheduler, state):
controller.on_next(False)
scheduler.schedule_absolute(205, action1)
def action2(scheduler, state):
controller.on_next(True)
scheduler.schedule_absolute(209, action2)
def action3(scheduler, state):
subscription[0].dispose()
scheduler.schedule_absolute(1000, action3)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_completed(500)
)
def test_paused_skips(self):
subscription = [None]
scheduler = TestScheduler()
controller = Subject()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_completed(500)
)
def action0(schedler, state):
subscription[0] = xs.pausable_buffered(controller).subscribe(results)
controller.on_next(True)
scheduler.schedule_absolute(200, action0)
def action1(scheduler, state):
controller.on_next(False)
scheduler.schedule_absolute(300, action1)
def action2(scheduler, state):
controller.on_next(True)
scheduler.schedule_absolute(400, action2)
def action3(scheduler, state):
subscription[0].dispose()
scheduler.schedule_absolute(1000, action3)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_next(230, 3),
on_next(400, 4),
on_next(400, 5),
on_next(400, 6),
on_completed(500)
)
def test_paused_error(self):
subscription = [None]
err = Exception()
scheduler = TestScheduler()
controller = Subject()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_error(230, err),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_completed(500)
)
def action0(scheduler, state):
subscription[0] = xs.pausable_buffered(controller).subscribe(results)
controller.on_next(True)
scheduler.schedule_absolute(200, action0)
def action1(scheduler, state):
controller.on_next(False)
scheduler.schedule_absolute(300, action1)
def action2(scheduler, state):
controller.on_next(True)
scheduler.schedule_absolute(400, action2)
def action3(scheduler, state):
subscription[0].dispose()
scheduler.schedule_absolute(1000, action3)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_error(230, err)
)
def test_paused_skip_initial_elements(self):
subscription = [None]
scheduler = TestScheduler()
controller = Subject()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(230, 2),
on_next(270, 3),
on_completed(400)
)
def action1(scheduler, state):
subscription[0] = xs.pausable_buffered(controller).subscribe(results)
controller.on_next(False)
scheduler.schedule_absolute(200, action1)
def action2(scheduler, state):
controller.on_next(True)
scheduler.schedule_absolute(280, action2)
def action3(scheduler, state):
subscription[0].dispose()
scheduler.schedule_absolute(1000, action3)
scheduler.start()
results.messages.assert_equal(
on_next(280, 2),
on_next(280, 3),
on_completed(400)
)
def test_paused_with_observable_controller_and_pause_and_unpause(self):
subscription = [None]
scheduler = TestScheduler()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_next(450, 7),
on_next(470, 8),
on_completed(500)
)
controller = scheduler.create_hot_observable(
on_next(201, True),
on_next(300, False),
on_next(400, True)
)
pausable_buffered = xs.pausable_buffered(controller)
def action1(scheduler, state):
subscription[0] = pausable_buffered.subscribe(results)
scheduler.schedule_absolute(200, action1)
def action2(scheduler, state):
pausable_buffered.pause()
scheduler.schedule_absolute(460, action2)
def action3(scheduler, state):
pausable_buffered.resume()
scheduler.schedule_absolute(480, action3)
def action4(scheduler, state):
subscription[0].dispose()
scheduler.schedule_absolute(1000, action4)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_next(230, 3),
on_next(400, 4),
on_next(400, 5),
on_next(400, 6),
on_next(450, 7),
on_next(480, 8),
on_completed(500)
)
def test_paused_with_immediate_unpause(self):
subscription = [None]
scheduler = TestScheduler()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_completed(500)
)
controller = Observable.just(True)
pausable_buffered = xs.pausable_buffered(controller)
def action1(scheduler, state):
subscription[0] = pausable_buffered.subscribe(results)
scheduler.schedule_absolute(200, action1)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_completed(500)
)
def test_paused_when_finishing(self):
subscription = [None]
scheduler = TestScheduler()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_next(450, 7),
on_next(470, 8),
on_completed(500)
)
controller = scheduler.create_hot_observable(
on_next(201, True),
on_next(300, False),
on_next(400, True)
)
pausable_buffered = xs.pausable_buffered(controller)
def action1(scheduler, state):
subscription[0] = pausable_buffered.subscribe(results)
scheduler.schedule_absolute(200, action1)
def action2(scheduler, state):
pausable_buffered.pause()
scheduler.schedule_absolute(460, action2)
def action3(scheduler, state):
subscription[0].dispose()
scheduler.schedule_absolute(1000, action3)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_next(230, 3),
on_next(400, 4),
on_next(400, 5),
on_next(400, 6),
on_next(450, 7)
)
def test_paused_with_observable_controller_and_pause_and_unpause_after_end(self):
scheduler = TestScheduler()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_next(450, 7),
on_next(470, 8),
on_completed(500)
)
controller = scheduler.create_hot_observable(
on_next(201, True),
on_next(300, False),
on_next(600, True)
)
def create():
return xs.pausable_buffered(controller)
results = scheduler.start(create)
results.messages.assert_equal(
on_next(210, 2),
on_next(230, 3),
on_next(600, 4),
on_next(600, 5),
on_next(600, 6),
on_next(600, 7),
on_next(600, 8),
on_completed(600)
)
def test_paused_with_observable_controller_and_pause_and_unpause_after_error(self):
error = Exception()
scheduler = TestScheduler()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(230, 3),
on_next(301, 4),
on_next(350, 5),
on_next(399, 6),
on_next(450, 7),
on_next(470, 8),
on_error(500, error)
)
controller = scheduler.create_hot_observable(
on_next(201, True),
on_next(300, False),
on_next(600, True)
)
def create():
return xs.pausable_buffered(controller)
results = scheduler.start(create=create)
results.messages.assert_equal(
on_next(210, 2),
on_next(230, 3),
on_next(600, 4),
on_next(600, 5),
on_next(600, 6),
on_next(600, 7),
on_next(600, 8),
on_error(600, error)
)
def test_paused_with_state_change_in_subscriber(self):
scheduler = TestScheduler()
results = scheduler.create_observer()
xs = scheduler.create_hot_observable(
on_next(150, 1),
on_next(210, 2),
on_next(250, 3),
on_next(270, 4),
on_next(330, 5),
on_completed(500)
)
controller = Subject()
pausable_buffered = xs.pausable_buffered(controller)
def action1(scheduler, state):
def on_next(value):
results.on_next(value)
controller.on_next(False)
def action2(scheduler, state):
controller.on_next(True)
scheduler.schedule_relative(100, action2)
subscription = pausable_buffered.subscribe(on_next, results.on_error, results.on_completed)
controller.on_next(True)
scheduler.schedule_absolute(200, action1)
scheduler.start()
results.messages.assert_equal(
on_next(210, 2),
on_next(310, 3),
on_next(310, 4),
on_next(410, 5),
on_completed(500)
)
| 27.617117
| 103
| 0.566955
| 1,311
| 12,262
| 5.067124
| 0.089245
| 0.121933
| 0.090321
| 0.027096
| 0.845702
| 0.841186
| 0.833659
| 0.823122
| 0.8168
| 0.803101
| 0
| 0.080659
| 0.336731
| 12,262
| 443
| 104
| 27.679458
| 0.736137
| 0
| 0
| 0.763848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029155
| 1
| 0.110787
| false
| 0
| 0.014577
| 0.005831
| 0.134111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d41df2846a991e51bcc6a6fb0422413c25dbf51
| 27,090
|
py
|
Python
|
catkin_ws/install/lib/python2.7/dist-packages/sick_safetyscanners/msg/_ApplicationDataMsg.py
|
JMur2/MarquetteRMC2021
|
3f139b3278927e82c988306b4b75449f0f111027
|
[
"Apache-2.0"
] | 1
|
2022-02-25T22:14:27.000Z
|
2022-02-25T22:14:27.000Z
|
catkin_ws/install/lib/python2.7/dist-packages/sick_safetyscanners/msg/_ApplicationDataMsg.py
|
JMur2/MarquetteRMC2021
|
3f139b3278927e82c988306b4b75449f0f111027
|
[
"Apache-2.0"
] | null | null | null |
catkin_ws/install/lib/python2.7/dist-packages/sick_safetyscanners/msg/_ApplicationDataMsg.py
|
JMur2/MarquetteRMC2021
|
3f139b3278927e82c988306b4b75449f0f111027
|
[
"Apache-2.0"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from sick_safetyscanners/ApplicationDataMsg.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import sick_safetyscanners.msg
class ApplicationDataMsg(genpy.Message):
_md5sum = "e2c1cd2050a13f8b7c83b35c17583a56"
_type = "sick_safetyscanners/ApplicationDataMsg"
_has_header = False # flag to mark the presence of a Header object
_full_text = """ApplicationInputsMsg inputs
ApplicationOutputsMsg outputs
================================================================================
MSG: sick_safetyscanners/ApplicationInputsMsg
bool[] unsafe_inputs_input_sources
bool[] unsafe_inputs_flags
uint16[] monitoring_case_number_inputs
bool[] monitoring_case_number_inputs_flags
int16 linear_velocity_inputs_velocity_0
bool linear_velocity_inputs_velocity_0_valid
bool linear_velocity_inputs_velocity_0_transmitted_safely
int16 linear_velocity_inputs_velocity_1
bool linear_velocity_inputs_velocity_1_valid
bool linear_velocity_inputs_velocity_1_transmitted_safely
uint8 sleep_mode_input
================================================================================
MSG: sick_safetyscanners/ApplicationOutputsMsg
bool[] evaluation_path_outputs_eval_out
bool[] evaluation_path_outputs_is_safe
bool[] evaluation_path_outputs_is_valid
uint16[] monitoring_case_number_outputs
bool[] monitoring_case_number_outputs_flags
uint8 sleep_mode_output
bool sleep_mode_output_valid
bool error_flag_contamination_warning
bool error_flag_contamination_error
bool error_flag_manipulation_error
bool error_flag_glare
bool error_flag_reference_contour_intruded
bool error_flag_critical_error
bool error_flags_are_valid
int16 linear_velocity_outputs_velocity_0
bool linear_velocity_outputs_velocity_0_valid
bool linear_velocity_outputs_velocity_0_transmitted_safely
int16 linear_velocity_outputs_velocity_1
bool linear_velocity_outputs_velocity_1_valid
bool linear_velocity_outputs_velocity_1_transmitted_safely
int16[] resulting_velocity
bool[] resulting_velocity_flags
"""
__slots__ = ['inputs','outputs']
_slot_types = ['sick_safetyscanners/ApplicationInputsMsg','sick_safetyscanners/ApplicationOutputsMsg']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
inputs,outputs
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(ApplicationDataMsg, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.inputs is None:
self.inputs = sick_safetyscanners.msg.ApplicationInputsMsg()
if self.outputs is None:
self.outputs = sick_safetyscanners.msg.ApplicationOutputsMsg()
else:
self.inputs = sick_safetyscanners.msg.ApplicationInputsMsg()
self.outputs = sick_safetyscanners.msg.ApplicationOutputsMsg()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
length = len(self.inputs.unsafe_inputs_input_sources)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.inputs.unsafe_inputs_input_sources))
length = len(self.inputs.unsafe_inputs_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.inputs.unsafe_inputs_flags))
length = len(self.inputs.monitoring_case_number_inputs)
buff.write(_struct_I.pack(length))
pattern = '<%sH'%length
buff.write(struct.Struct(pattern).pack(*self.inputs.monitoring_case_number_inputs))
length = len(self.inputs.monitoring_case_number_inputs_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.inputs.monitoring_case_number_inputs_flags))
_x = self
buff.write(_get_struct_h2Bh3B().pack(_x.inputs.linear_velocity_inputs_velocity_0, _x.inputs.linear_velocity_inputs_velocity_0_valid, _x.inputs.linear_velocity_inputs_velocity_0_transmitted_safely, _x.inputs.linear_velocity_inputs_velocity_1, _x.inputs.linear_velocity_inputs_velocity_1_valid, _x.inputs.linear_velocity_inputs_velocity_1_transmitted_safely, _x.inputs.sleep_mode_input))
length = len(self.outputs.evaluation_path_outputs_eval_out)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.evaluation_path_outputs_eval_out))
length = len(self.outputs.evaluation_path_outputs_is_safe)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.evaluation_path_outputs_is_safe))
length = len(self.outputs.evaluation_path_outputs_is_valid)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.evaluation_path_outputs_is_valid))
length = len(self.outputs.monitoring_case_number_outputs)
buff.write(_struct_I.pack(length))
pattern = '<%sH'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.monitoring_case_number_outputs))
length = len(self.outputs.monitoring_case_number_outputs_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.monitoring_case_number_outputs_flags))
_x = self
buff.write(_get_struct_9Bh2Bh2B().pack(_x.outputs.sleep_mode_output, _x.outputs.sleep_mode_output_valid, _x.outputs.error_flag_contamination_warning, _x.outputs.error_flag_contamination_error, _x.outputs.error_flag_manipulation_error, _x.outputs.error_flag_glare, _x.outputs.error_flag_reference_contour_intruded, _x.outputs.error_flag_critical_error, _x.outputs.error_flags_are_valid, _x.outputs.linear_velocity_outputs_velocity_0, _x.outputs.linear_velocity_outputs_velocity_0_valid, _x.outputs.linear_velocity_outputs_velocity_0_transmitted_safely, _x.outputs.linear_velocity_outputs_velocity_1, _x.outputs.linear_velocity_outputs_velocity_1_valid, _x.outputs.linear_velocity_outputs_velocity_1_transmitted_safely))
length = len(self.outputs.resulting_velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sh'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.resulting_velocity))
length = len(self.outputs.resulting_velocity_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.Struct(pattern).pack(*self.outputs.resulting_velocity_flags))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.inputs is None:
self.inputs = sick_safetyscanners.msg.ApplicationInputsMsg()
if self.outputs is None:
self.outputs = sick_safetyscanners.msg.ApplicationOutputsMsg()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.unsafe_inputs_input_sources = s.unpack(str[start:end])
self.inputs.unsafe_inputs_input_sources = list(map(bool, self.inputs.unsafe_inputs_input_sources))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.unsafe_inputs_flags = s.unpack(str[start:end])
self.inputs.unsafe_inputs_flags = list(map(bool, self.inputs.unsafe_inputs_flags))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sH'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.monitoring_case_number_inputs = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.monitoring_case_number_inputs_flags = s.unpack(str[start:end])
self.inputs.monitoring_case_number_inputs_flags = list(map(bool, self.inputs.monitoring_case_number_inputs_flags))
_x = self
start = end
end += 9
(_x.inputs.linear_velocity_inputs_velocity_0, _x.inputs.linear_velocity_inputs_velocity_0_valid, _x.inputs.linear_velocity_inputs_velocity_0_transmitted_safely, _x.inputs.linear_velocity_inputs_velocity_1, _x.inputs.linear_velocity_inputs_velocity_1_valid, _x.inputs.linear_velocity_inputs_velocity_1_transmitted_safely, _x.inputs.sleep_mode_input,) = _get_struct_h2Bh3B().unpack(str[start:end])
self.inputs.linear_velocity_inputs_velocity_0_valid = bool(self.inputs.linear_velocity_inputs_velocity_0_valid)
self.inputs.linear_velocity_inputs_velocity_0_transmitted_safely = bool(self.inputs.linear_velocity_inputs_velocity_0_transmitted_safely)
self.inputs.linear_velocity_inputs_velocity_1_valid = bool(self.inputs.linear_velocity_inputs_velocity_1_valid)
self.inputs.linear_velocity_inputs_velocity_1_transmitted_safely = bool(self.inputs.linear_velocity_inputs_velocity_1_transmitted_safely)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.evaluation_path_outputs_eval_out = s.unpack(str[start:end])
self.outputs.evaluation_path_outputs_eval_out = list(map(bool, self.outputs.evaluation_path_outputs_eval_out))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.evaluation_path_outputs_is_safe = s.unpack(str[start:end])
self.outputs.evaluation_path_outputs_is_safe = list(map(bool, self.outputs.evaluation_path_outputs_is_safe))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.evaluation_path_outputs_is_valid = s.unpack(str[start:end])
self.outputs.evaluation_path_outputs_is_valid = list(map(bool, self.outputs.evaluation_path_outputs_is_valid))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sH'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.monitoring_case_number_outputs = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.monitoring_case_number_outputs_flags = s.unpack(str[start:end])
self.outputs.monitoring_case_number_outputs_flags = list(map(bool, self.outputs.monitoring_case_number_outputs_flags))
_x = self
start = end
end += 17
(_x.outputs.sleep_mode_output, _x.outputs.sleep_mode_output_valid, _x.outputs.error_flag_contamination_warning, _x.outputs.error_flag_contamination_error, _x.outputs.error_flag_manipulation_error, _x.outputs.error_flag_glare, _x.outputs.error_flag_reference_contour_intruded, _x.outputs.error_flag_critical_error, _x.outputs.error_flags_are_valid, _x.outputs.linear_velocity_outputs_velocity_0, _x.outputs.linear_velocity_outputs_velocity_0_valid, _x.outputs.linear_velocity_outputs_velocity_0_transmitted_safely, _x.outputs.linear_velocity_outputs_velocity_1, _x.outputs.linear_velocity_outputs_velocity_1_valid, _x.outputs.linear_velocity_outputs_velocity_1_transmitted_safely,) = _get_struct_9Bh2Bh2B().unpack(str[start:end])
self.outputs.sleep_mode_output_valid = bool(self.outputs.sleep_mode_output_valid)
self.outputs.error_flag_contamination_warning = bool(self.outputs.error_flag_contamination_warning)
self.outputs.error_flag_contamination_error = bool(self.outputs.error_flag_contamination_error)
self.outputs.error_flag_manipulation_error = bool(self.outputs.error_flag_manipulation_error)
self.outputs.error_flag_glare = bool(self.outputs.error_flag_glare)
self.outputs.error_flag_reference_contour_intruded = bool(self.outputs.error_flag_reference_contour_intruded)
self.outputs.error_flag_critical_error = bool(self.outputs.error_flag_critical_error)
self.outputs.error_flags_are_valid = bool(self.outputs.error_flags_are_valid)
self.outputs.linear_velocity_outputs_velocity_0_valid = bool(self.outputs.linear_velocity_outputs_velocity_0_valid)
self.outputs.linear_velocity_outputs_velocity_0_transmitted_safely = bool(self.outputs.linear_velocity_outputs_velocity_0_transmitted_safely)
self.outputs.linear_velocity_outputs_velocity_1_valid = bool(self.outputs.linear_velocity_outputs_velocity_1_valid)
self.outputs.linear_velocity_outputs_velocity_1_transmitted_safely = bool(self.outputs.linear_velocity_outputs_velocity_1_transmitted_safely)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sh'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.resulting_velocity = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.resulting_velocity_flags = s.unpack(str[start:end])
self.outputs.resulting_velocity_flags = list(map(bool, self.outputs.resulting_velocity_flags))
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
length = len(self.inputs.unsafe_inputs_input_sources)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.inputs.unsafe_inputs_input_sources.tostring())
length = len(self.inputs.unsafe_inputs_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.inputs.unsafe_inputs_flags.tostring())
length = len(self.inputs.monitoring_case_number_inputs)
buff.write(_struct_I.pack(length))
pattern = '<%sH'%length
buff.write(self.inputs.monitoring_case_number_inputs.tostring())
length = len(self.inputs.monitoring_case_number_inputs_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.inputs.monitoring_case_number_inputs_flags.tostring())
_x = self
buff.write(_get_struct_h2Bh3B().pack(_x.inputs.linear_velocity_inputs_velocity_0, _x.inputs.linear_velocity_inputs_velocity_0_valid, _x.inputs.linear_velocity_inputs_velocity_0_transmitted_safely, _x.inputs.linear_velocity_inputs_velocity_1, _x.inputs.linear_velocity_inputs_velocity_1_valid, _x.inputs.linear_velocity_inputs_velocity_1_transmitted_safely, _x.inputs.sleep_mode_input))
length = len(self.outputs.evaluation_path_outputs_eval_out)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.outputs.evaluation_path_outputs_eval_out.tostring())
length = len(self.outputs.evaluation_path_outputs_is_safe)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.outputs.evaluation_path_outputs_is_safe.tostring())
length = len(self.outputs.evaluation_path_outputs_is_valid)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.outputs.evaluation_path_outputs_is_valid.tostring())
length = len(self.outputs.monitoring_case_number_outputs)
buff.write(_struct_I.pack(length))
pattern = '<%sH'%length
buff.write(self.outputs.monitoring_case_number_outputs.tostring())
length = len(self.outputs.monitoring_case_number_outputs_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.outputs.monitoring_case_number_outputs_flags.tostring())
_x = self
buff.write(_get_struct_9Bh2Bh2B().pack(_x.outputs.sleep_mode_output, _x.outputs.sleep_mode_output_valid, _x.outputs.error_flag_contamination_warning, _x.outputs.error_flag_contamination_error, _x.outputs.error_flag_manipulation_error, _x.outputs.error_flag_glare, _x.outputs.error_flag_reference_contour_intruded, _x.outputs.error_flag_critical_error, _x.outputs.error_flags_are_valid, _x.outputs.linear_velocity_outputs_velocity_0, _x.outputs.linear_velocity_outputs_velocity_0_valid, _x.outputs.linear_velocity_outputs_velocity_0_transmitted_safely, _x.outputs.linear_velocity_outputs_velocity_1, _x.outputs.linear_velocity_outputs_velocity_1_valid, _x.outputs.linear_velocity_outputs_velocity_1_transmitted_safely))
length = len(self.outputs.resulting_velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sh'%length
buff.write(self.outputs.resulting_velocity.tostring())
length = len(self.outputs.resulting_velocity_flags)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(self.outputs.resulting_velocity_flags.tostring())
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.inputs is None:
self.inputs = sick_safetyscanners.msg.ApplicationInputsMsg()
if self.outputs is None:
self.outputs = sick_safetyscanners.msg.ApplicationOutputsMsg()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.unsafe_inputs_input_sources = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.inputs.unsafe_inputs_input_sources = list(map(bool, self.inputs.unsafe_inputs_input_sources))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.unsafe_inputs_flags = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.inputs.unsafe_inputs_flags = list(map(bool, self.inputs.unsafe_inputs_flags))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sH'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.monitoring_case_number_inputs = numpy.frombuffer(str[start:end], dtype=numpy.uint16, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.inputs.monitoring_case_number_inputs_flags = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.inputs.monitoring_case_number_inputs_flags = list(map(bool, self.inputs.monitoring_case_number_inputs_flags))
_x = self
start = end
end += 9
(_x.inputs.linear_velocity_inputs_velocity_0, _x.inputs.linear_velocity_inputs_velocity_0_valid, _x.inputs.linear_velocity_inputs_velocity_0_transmitted_safely, _x.inputs.linear_velocity_inputs_velocity_1, _x.inputs.linear_velocity_inputs_velocity_1_valid, _x.inputs.linear_velocity_inputs_velocity_1_transmitted_safely, _x.inputs.sleep_mode_input,) = _get_struct_h2Bh3B().unpack(str[start:end])
self.inputs.linear_velocity_inputs_velocity_0_valid = bool(self.inputs.linear_velocity_inputs_velocity_0_valid)
self.inputs.linear_velocity_inputs_velocity_0_transmitted_safely = bool(self.inputs.linear_velocity_inputs_velocity_0_transmitted_safely)
self.inputs.linear_velocity_inputs_velocity_1_valid = bool(self.inputs.linear_velocity_inputs_velocity_1_valid)
self.inputs.linear_velocity_inputs_velocity_1_transmitted_safely = bool(self.inputs.linear_velocity_inputs_velocity_1_transmitted_safely)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.evaluation_path_outputs_eval_out = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.outputs.evaluation_path_outputs_eval_out = list(map(bool, self.outputs.evaluation_path_outputs_eval_out))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.evaluation_path_outputs_is_safe = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.outputs.evaluation_path_outputs_is_safe = list(map(bool, self.outputs.evaluation_path_outputs_is_safe))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.evaluation_path_outputs_is_valid = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.outputs.evaluation_path_outputs_is_valid = list(map(bool, self.outputs.evaluation_path_outputs_is_valid))
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sH'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.monitoring_case_number_outputs = numpy.frombuffer(str[start:end], dtype=numpy.uint16, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.monitoring_case_number_outputs_flags = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.outputs.monitoring_case_number_outputs_flags = list(map(bool, self.outputs.monitoring_case_number_outputs_flags))
_x = self
start = end
end += 17
(_x.outputs.sleep_mode_output, _x.outputs.sleep_mode_output_valid, _x.outputs.error_flag_contamination_warning, _x.outputs.error_flag_contamination_error, _x.outputs.error_flag_manipulation_error, _x.outputs.error_flag_glare, _x.outputs.error_flag_reference_contour_intruded, _x.outputs.error_flag_critical_error, _x.outputs.error_flags_are_valid, _x.outputs.linear_velocity_outputs_velocity_0, _x.outputs.linear_velocity_outputs_velocity_0_valid, _x.outputs.linear_velocity_outputs_velocity_0_transmitted_safely, _x.outputs.linear_velocity_outputs_velocity_1, _x.outputs.linear_velocity_outputs_velocity_1_valid, _x.outputs.linear_velocity_outputs_velocity_1_transmitted_safely,) = _get_struct_9Bh2Bh2B().unpack(str[start:end])
self.outputs.sleep_mode_output_valid = bool(self.outputs.sleep_mode_output_valid)
self.outputs.error_flag_contamination_warning = bool(self.outputs.error_flag_contamination_warning)
self.outputs.error_flag_contamination_error = bool(self.outputs.error_flag_contamination_error)
self.outputs.error_flag_manipulation_error = bool(self.outputs.error_flag_manipulation_error)
self.outputs.error_flag_glare = bool(self.outputs.error_flag_glare)
self.outputs.error_flag_reference_contour_intruded = bool(self.outputs.error_flag_reference_contour_intruded)
self.outputs.error_flag_critical_error = bool(self.outputs.error_flag_critical_error)
self.outputs.error_flags_are_valid = bool(self.outputs.error_flags_are_valid)
self.outputs.linear_velocity_outputs_velocity_0_valid = bool(self.outputs.linear_velocity_outputs_velocity_0_valid)
self.outputs.linear_velocity_outputs_velocity_0_transmitted_safely = bool(self.outputs.linear_velocity_outputs_velocity_0_transmitted_safely)
self.outputs.linear_velocity_outputs_velocity_1_valid = bool(self.outputs.linear_velocity_outputs_velocity_1_valid)
self.outputs.linear_velocity_outputs_velocity_1_transmitted_safely = bool(self.outputs.linear_velocity_outputs_velocity_1_transmitted_safely)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sh'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.resulting_velocity = numpy.frombuffer(str[start:end], dtype=numpy.int16, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
s = struct.Struct(pattern)
end += s.size
self.outputs.resulting_velocity_flags = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
self.outputs.resulting_velocity_flags = list(map(bool, self.outputs.resulting_velocity_flags))
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_9Bh2Bh2B = None
def _get_struct_9Bh2Bh2B():
global _struct_9Bh2Bh2B
if _struct_9Bh2Bh2B is None:
_struct_9Bh2Bh2B = struct.Struct("<9Bh2Bh2B")
return _struct_9Bh2Bh2B
_struct_h2Bh3B = None
def _get_struct_h2Bh3B():
global _struct_h2Bh3B
if _struct_h2Bh3B is None:
_struct_h2Bh3B = struct.Struct("<h2Bh3B")
return _struct_h2Bh3B
| 53.117647
| 734
| 0.747988
| 3,621
| 27,090
| 5.220105
| 0.055786
| 0.068088
| 0.040631
| 0.068141
| 0.898529
| 0.872712
| 0.859644
| 0.83425
| 0.826844
| 0.809597
| 0
| 0.010052
| 0.148062
| 27,090
| 509
| 735
| 53.222004
| 0.808961
| 0.046585
| 0
| 0.730942
| 1
| 0
| 0.081918
| 0.054067
| 0
| 0
| 0.00039
| 0
| 0
| 1
| 0.020179
| false
| 0
| 0.011211
| 0
| 0.060538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5d69f1ae33ddaafe5e475c83fe16b7e8e2e750b9
| 1,805
|
py
|
Python
|
aitlas/models/alexnet.py
|
biasvariancelabs/aitlas
|
e36913c44d5a8393566b7271607ba839f9be0df3
|
[
"MIT"
] | 32
|
2020-12-04T19:48:19.000Z
|
2022-03-16T18:18:05.000Z
|
aitlas/models/alexnet.py
|
biasvariancelabs/aitlas
|
e36913c44d5a8393566b7271607ba839f9be0df3
|
[
"MIT"
] | 2
|
2021-04-11T17:09:14.000Z
|
2021-05-14T13:22:41.000Z
|
aitlas/models/alexnet.py
|
biasvariancelabs/aitlas
|
e36913c44d5a8393566b7271607ba839f9be0df3
|
[
"MIT"
] | 8
|
2021-04-06T22:06:27.000Z
|
2022-01-30T06:01:39.000Z
|
import torch.nn as nn
import torchvision.models as models
from ..base import BaseMulticlassClassifier, BaseMultilabelClassifier
class AlexNet(BaseMulticlassClassifier):
def __init__(self, config):
super().__init__(config)
if self.config.pretrained:
self.model = models.alexnet(self.config.pretrained, False)
self.model.classifier = self.model.classifier[:-1] # remove final layer
self.model.classifier.add_module(
"6", nn.Linear(4096, self.config.num_classes, bias=True)
)
else:
self.model = models.alexnet(
self.config.pretrained, False, num_classes=self.config.num_classes
)
def forward(self, x):
return self.model(x)
def extract_features(self):
""" Remove final layers if we only need to extract features """
self.model.classifier = self.model.classifier[:-3]
return self.model
class AlexNetMultiLabel(BaseMultilabelClassifier):
def __init__(self, config):
super().__init__(config)
if self.config.pretrained:
self.model = models.alexnet(self.config.pretrained, False)
self.model.classifier = self.model.classifier[:-1] # remove final layer
self.model.classifier.add_module(
"6", nn.Linear(4096, self.config.num_classes, bias=True)
)
else:
self.model = models.alexnet(
self.config.pretrained, False, num_classes=self.config.num_classes
)
def forward(self, x):
return self.model(x)
def extract_features(self):
""" Remove final layers if we only need to extract features """
self.model.classifier = self.model.classifier[:-3]
return self.model
| 31.666667
| 84
| 0.629363
| 203
| 1,805
| 5.46798
| 0.236453
| 0.145946
| 0.171171
| 0.079279
| 0.827027
| 0.827027
| 0.827027
| 0.827027
| 0.827027
| 0.827027
| 0
| 0.01063
| 0.27036
| 1,805
| 56
| 85
| 32.232143
| 0.832194
| 0.084211
| 0
| 0.769231
| 0
| 0
| 0.00122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.076923
| 0.051282
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5377d2b13a0b95a52bd8837d251fe769cd9ece23
| 149,637
|
py
|
Python
|
pyke/krb_compiler/compiler_bc.py
|
alimon/pyke3
|
fc02c50c1c658dce0dc4b6ffa33cb819be03f6e2
|
[
"MIT"
] | 5
|
2021-05-30T19:15:21.000Z
|
2022-02-09T20:13:17.000Z
|
pyke/krb_compiler/compiler_bc.py
|
alimon/pyke3
|
fc02c50c1c658dce0dc4b6ffa33cb819be03f6e2
|
[
"MIT"
] | 1
|
2021-05-30T16:38:09.000Z
|
2021-05-30T16:38:09.000Z
|
pyke/krb_compiler/compiler_bc.py
|
alimon/pyke3
|
fc02c50c1c658dce0dc4b6ffa33cb819be03f6e2
|
[
"MIT"
] | 2
|
2020-06-25T21:33:34.000Z
|
2020-06-26T14:45:43.000Z
|
# compiler_bc.py
from pyke import contexts, pattern, bc_rule
pyke_version = '1.1.1'
compiler_version = 1
def file(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
helpers.fc_head(context.lookup_data('rb_name'))):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
helpers.bc_head(context.lookup_data('rb_name'))):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(2).match_data(context, context,
helpers.plan_head(context.lookup_data('rb_name'))):
context.end_save_all_undo()
flag_4 = False
with engine.prove(rule.rule_base.root_name, 'rule_decl', context,
(rule.pattern(3),
rule.pattern(4),
rule.pattern(5),)) \
as gen_4:
for x_4 in gen_4:
flag_4 = True
assert x_4 is None, \
"compiler.file: got unexpected plan from when clause 4"
flag_5 = False
with engine.prove(rule.rule_base.root_name, 'fc_rules', context,
(rule.pattern(6),
rule.pattern(7),
rule.pattern(8),)) \
as gen_5:
for x_5 in gen_5:
flag_5 = True
assert x_5 is None, \
"compiler.file: got unexpected plan from when clause 5"
flag_6 = False
with engine.prove(rule.rule_base.root_name, 'bc_rules', context,
(rule.pattern(3),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),)) \
as gen_6:
for x_6 in gen_6:
flag_6 = True
assert x_6 is None, \
"compiler.file: got unexpected plan from when clause 6"
mark7 = context.mark(True)
if rule.pattern(13).match_data(context, context,
(context.lookup_data('fc_head'),
context.lookup_data('fc_fun_lines'),
"",
"def populate(engine):",
('INDENT', 2),
context.lookup_data('decl_line'),
context.lookup_data('fc_init_lines'),
'POPINDENT',
"",
context.lookup_data('fc_extra_lines'),
) \
if context.lookup_data('fc_fun_lines') \
else ()):
context.end_save_all_undo()
mark8 = context.mark(True)
if rule.pattern(14).match_data(context, context,
(context.lookup_data('plan_head'),
context.lookup_data('bc_plan_lines'),
"",
context.lookup_data('plan_extra_lines')) \
if context.lookup_data('bc_plan_lines') \
else ()):
context.end_save_all_undo()
mark9 = context.mark(True)
if rule.pattern(15).match_data(context, context,
(context.lookup_data('bc_head'),
("from %s import %s_plans" %
(context.lookup_data('generated_root_pkg'), context.lookup_data('rb_name'))
if context.lookup_data('bc_plan_lines')
else ()),
context.lookup_data('bc_bc_fun_lines'),
"",
"def populate(engine):",
('INDENT', 2),
context.lookup_data('decl_line'),
context.lookup_data('bc_bc_init_lines'),
'POPINDENT',
"",
context.lookup_data('bc_extra_lines')) \
if context.lookup_data('bc_bc_fun_lines') \
else ()):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark9)
else: context.end_save_all_undo()
context.undo_to_mark(mark8)
else: context.end_save_all_undo()
context.undo_to_mark(mark7)
if not flag_6:
raise AssertionError("compiler.file: 'when' clause 6 failed")
if not flag_5:
raise AssertionError("compiler.file: 'when' clause 5 failed")
if not flag_4:
raise AssertionError("compiler.file: 'when' clause 4 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def rule_decl(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"This_rule_base = engine.get_create(%r)" % context.lookup_data('rb_name')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def rule_decl_with_parent(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"This_rule_base = engine.get_create(%r, %r, %s)" % \
(context.lookup_data('rb_name'), context.lookup_data('parent'),
tuple(repr(sym) for sym in context.lookup_data('excluded_symbols')))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_rules(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
fc_funs = []
fc_init = []
forall91_worked = True
for python_ans in \
context.lookup_data('fc_rules'):
mark2 = context.mark(True)
if rule.pattern(0).match_data(context, context, python_ans):
context.end_save_all_undo()
forall91_worked = False
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'fc_rule', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.fc_rules: got unexpected plan from when clause 3"
fc_funs.append(context.lookup_data('fc_fun_1'))
fc_init.append(context.lookup_data('fc_init_1'))
forall91_worked = True
if forall91_worked: break
if not flag_3:
raise AssertionError("compiler.fc_rules: 'when' clause 3 failed")
if not forall91_worked:
context.undo_to_mark(mark2)
break
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
if forall91_worked:
mark5 = context.mark(True)
if rule.pattern(3).match_data(context, context,
tuple(fc_funs)):
context.end_save_all_undo()
mark6 = context.mark(True)
if rule.pattern(4).match_data(context, context,
tuple(fc_init)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark6)
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_rule_(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(1),
rule.pattern(2),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.fc_rule_: got unexpected plan from when clause 1"
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'assertions', context,
(rule.pattern(11),
rule.pattern(12),
rule.pattern(10),
rule.pattern(13),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.fc_rule_: got unexpected plan from when clause 2"
mark3 = context.mark(True)
if rule.pattern(14).match_data(context, context,
("",
"def %s(rule, context = None, index = None):" % context.lookup_data('rule_name'),
("INDENT", 2),
"engine = rule.rule_base.engine",
"if context is None: context = contexts.simple_context()",
"try:",
("INDENT", 2),
context.lookup_data('prem_fn_head'),
context.lookup_data('asserts_fn_lines'),
"rule.rule_base.num_fc_rules_triggered += 1",
context.lookup_data('prem_fn_tail'),
"POPINDENT",
"finally:",
("INDENT", 2),
"context.done()",
"POPINDENT",
"POPINDENT",
)):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(15).match_data(context, context,
("",
"fc_rule.fc_rule('%(name)s', This_rule_base, %(name)s," %
{'name': context.lookup_data('rule_name')},
("INDENT", 2),
helpers.add_brackets(context.lookup_data('prem_decl_lines'), '(', '),'),
helpers.list_format(context.lookup_data('patterns_out'), '(', '))'),
"POPINDENT",
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
if not flag_2:
raise AssertionError("compiler.fc_rule_: 'when' clause 2 failed")
if not flag_1:
raise AssertionError("compiler.fc_rule_: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_premises0(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_premises1(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'fc_premise', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.fc_premises1: got unexpected plan from when clause 1"
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(0),
rule.pattern(2),
rule.pattern(13),
rule.pattern(14),
rule.pattern(4),
rule.pattern(5),
rule.pattern(15),
rule.pattern(16),
rule.pattern(9),
rule.pattern(17),
rule.pattern(18),
rule.pattern(12),
rule.pattern(19),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.fc_premises1: got unexpected plan from when clause 2"
mark3 = context.mark(True)
if rule.pattern(20).match_data(context, context,
context.lookup_data('decl_lines1') + context.lookup_data('decl_lines2')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
if not flag_2:
raise AssertionError("compiler.fc_premises1: 'when' clause 2 failed")
if not flag_1:
raise AssertionError("compiler.fc_premises1: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_premise(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
with engine.prove(rule.rule_base.root_name, 'gen_fc_for', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),)) \
as gen_1:
for x_1 in gen_1:
assert x_1 is None, \
"compiler.fc_premise: got unexpected plan from when clause 1"
mark2 = context.mark(True)
if rule.pattern(7).match_data(context, context,
(() if context.lookup_data('break_cond') is None
else "if %s: break" % context.lookup_data('break_cond'),
'POPINDENT',
'POPINDENT',),):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(8).match_data(context, context,
context.lookup_data('clause_num') + 1):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(9).match_data(context, context,
context.lookup_data('decl_num_in') + 1):
context.end_save_all_undo()
mark5 = context.mark(True)
if rule.pattern(10).match_data(context, context,
("(%r, %r," % (context.lookup_data('kb_name'), context.lookup_data('entity_name')),
('INDENT', 1),
helpers.list_format(context.lookup_data('arg_patterns'), '(', '),'),
"%s)," % context.lookup_data('multi_match'),
"POPINDENT",
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def gen_fc_for_false(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
(('STARTING_LINENO', context.lookup_data('start_lineno')),
"with knowledge_base.Gen_once if index == %d \\" % \
context.lookup_data('decl_num'),
('INDENT', 9),
"else engine.lookup(%r, %r, context," % \
(context.lookup_data('kb_name'), context.lookup_data('entity_name')),
('INDENT', 19),
"rule.foreach_patterns(%d)) \\" % context.lookup_data('decl_num'),
'POPINDENT',
'POPINDENT',
('INDENT', 2),
"as gen_%d:" % context.lookup_data('decl_num'),
"for dummy in gen_%d:" % context.lookup_data('decl_num'),
('ENDING_LINENO', context.lookup_data('end_lineno')),
('INDENT', 2),
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def gen_fc_for_true(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
(('STARTING_LINENO', context.lookup_data('start_lineno')),
"with engine.lookup(%r, %r, context, \\" % \
(context.lookup_data('kb_name'), context.lookup_data('entity_name')),
('INDENT', 19),
"rule.foreach_patterns(%d)) \\" % context.lookup_data('decl_num'),
'POPINDENT',
('INDENT', 2),
"as gen_%d:" % context.lookup_data('decl_num'),
"for dummy in gen_%d:" % context.lookup_data('decl_num'),
('ENDING_LINENO', context.lookup_data('end_lineno')),
('INDENT', 2))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_first(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"first%d_worked" % context.lookup_data('clause_num')):
context.end_save_all_undo()
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(0),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.fc_first: got unexpected plan from when clause 2"
mark3 = context.mark(True)
if rule.pattern(13).match_data(context, context,
"%s = False" % context.lookup_data('break_cond')):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(14).match_data(context, context,
"%s = True" % context.lookup_data('break_cond')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
if not flag_2:
raise AssertionError("compiler.fc_first: 'when' clause 2 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_forall_None(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.fc_forall_None: got unexpected plan from when clause 1"
mark2 = context.mark(True)
if rule.pattern(13).match_data(context, context,
context.lookup_data('fn_head1') + context.lookup_data('fn_tail1')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
if not flag_1:
raise AssertionError("compiler.fc_forall_None: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_forall_require(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"forall%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
"not forall%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(1),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.fc_forall_require: got unexpected plan from when clause 3"
flag_4 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(2),
rule.pattern(4),
rule.pattern(14),
rule.pattern(15),
rule.pattern(0),
rule.pattern(6),
rule.pattern(16),
rule.pattern(17),
rule.pattern(10),
rule.pattern(18),
rule.pattern(19),
rule.pattern(13),
rule.pattern(20),)) \
as gen_4:
for x_4 in gen_4:
flag_4 = True
assert x_4 is None, \
"compiler.fc_forall_require: got unexpected plan from when clause 4"
mark5 = context.mark(True)
if rule.pattern(21).match_data(context, context,
("forall%d_worked = True" % context.lookup_data('start_lineno'),
context.lookup_data('fn_head1'),
"forall%d_worked = False" % context.lookup_data('start_lineno'),
context.lookup_data('fn_head2'),
"forall%d_worked = True" % context.lookup_data('start_lineno'),
context.lookup_data('fn_tail2'),
context.lookup_data('fn_tail1'),
"if forall%d_worked:" % context.lookup_data('start_lineno'),
("INDENT", 2))):
context.end_save_all_undo()
mark6 = context.mark(True)
if rule.pattern(22).match_data(context, context,
context.lookup_data('decl_lines1') + context.lookup_data('decl_lines2')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark6)
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
if not flag_4:
raise AssertionError("compiler.fc_forall_require: 'when' clause 4 failed")
if not flag_3:
raise AssertionError("compiler.fc_forall_require: 'when' clause 3 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_notany(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"notany%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
"not notany%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'fc_premises', context,
(rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(1),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.fc_notany: got unexpected plan from when clause 3"
mark4 = context.mark(True)
if rule.pattern(14).match_data(context, context,
("notany%d_worked = True" % context.lookup_data('start_lineno'),
context.lookup_data('fn_head1'),
"notany%d_worked = False" % context.lookup_data('start_lineno'),
context.lookup_data('fn_tail1'),
"if notany%d_worked:" % context.lookup_data('start_lineno'),
("INDENT", 2))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
if not flag_3:
raise AssertionError("compiler.fc_notany: 'when' clause 3 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def fc_python_premise(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
context.lookup_data('clause_num') + 1):
context.end_save_all_undo()
with engine.prove(rule.rule_base.root_name, 'python_premise', context,
(rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"compiler.fc_python_premise: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def assertions_0(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def assertions_n(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'assertion', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.assertions_n: got unexpected plan from when clause 1"
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'assertions', context,
(rule.pattern(4),
rule.pattern(5),
rule.pattern(3),
rule.pattern(6),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.assertions_n: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
if not flag_2:
raise AssertionError("compiler.assertions_n: 'when' clause 2 failed")
if not flag_1:
raise AssertionError("compiler.assertions_n: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def assertion(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
\
helpers.merge_patterns(context.lookup_data('patterns'), context.lookup_data('patterns_in'))):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
(('STARTING_LINENO', context.lookup_data('start_lineno')),
"engine.assert_(%r, %r," % (context.lookup_data('kb_name'), context.lookup_data('entity_name')),
('INDENT', 15),
helpers.list_format(
("rule.pattern(%d).as_data(context)" % pat_num
for pat_num in context.lookup_data('pat_nums')),
'(', ')),'),
('ENDING_LINENO', context.lookup_data('end_lineno')),
"POPINDENT",
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def python_assertion(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_rules(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
bc_plan_lines = []
bc_bc_funs = []
bc_bc_init = []
forall356_worked = True
for python_ans in \
context.lookup_data('bc_rules'):
mark2 = context.mark(True)
if rule.pattern(0).match_data(context, context, python_ans):
context.end_save_all_undo()
forall356_worked = False
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'bc_rule', context,
(rule.pattern(1),
rule.pattern(0),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.bc_rules: got unexpected plan from when clause 3"
bc_plan_lines.extend(context.lookup_data('bc_plan1'))
bc_bc_funs.append(context.lookup_data('bc_bc_fun1'))
bc_bc_init.append(context.lookup_data('bc_bc_init1'))
forall356_worked = True
if forall356_worked: break
if not flag_3:
raise AssertionError("compiler.bc_rules: 'when' clause 3 failed")
if not forall356_worked:
context.undo_to_mark(mark2)
break
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
if forall356_worked:
mark5 = context.mark(True)
if rule.pattern(5).match_data(context, context,
tuple(bc_plan_lines)):
context.end_save_all_undo()
mark6 = context.mark(True)
if rule.pattern(6).match_data(context, context,
tuple(bc_bc_funs)):
context.end_save_all_undo()
mark7 = context.mark(True)
if rule.pattern(7).match_data(context, context,
tuple(bc_bc_init)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark7)
else: context.end_save_all_undo()
context.undo_to_mark(mark6)
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_rule_(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.bc_rule_: got unexpected plan from when clause 1"
mark2 = context.mark(True)
if rule.pattern(8).match_data(context, context,
\
helpers.goal(context.lookup_data('rb_name'), context.lookup_data('name'), context.lookup_data('goal'),
context.lookup_data('prem_plan_lines'), context.lookup_data('python_lines'))):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(9).match_data(context, context,
(context.lookup_data('goal_fn_head'),
context.lookup_data('prem_fn_head'),
'rule.rule_base.num_bc_rule_successes += 1',
'yield context' if context.lookup_data('plan_lines') else 'yield',
context.lookup_data('prem_fn_tail'),
'rule.rule_base.num_bc_rule_failures += 1',
context.lookup_data('goal_fn_tail'),
)):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(10).match_data(context, context,
(context.lookup_data('goal_decl_lines'),
context.lookup_data('prem_decl_lines'),
"POPINDENT",
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
if not flag_1:
raise AssertionError("compiler.bc_rule_: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_premises(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.bc_premises: got unexpected plan from when clause 1"
mark2 = context.mark(True)
if rule.pattern(14).match_data(context, context,
helpers.list_format(context.lookup_data('patterns'), '(', '))')):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(15).match_data(context, context,
('(' + ' '.join(tuple(repr(plan_var_name) + ','
for plan_var_name
in context.lookup_data('plan_var_names'))) +
'),',) + context.lookup_data('pat_lines')):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(16).match_data(context, context,
tuple(itertools.chain.from_iterable(itertools.chain(
(lines for step, lines in context.lookup_data('plan_lines1') if step is None),
(lines for step, lines
in sorted(((step, lines) for step, lines in context.lookup_data('plan_lines1')
if step is not None),
key=lambda t: t[0])))))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
if not flag_1:
raise AssertionError("compiler.bc_premises: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_premises1_0(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_premises1_n(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'bc_premise', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.bc_premises1_n: got unexpected plan from when clause 1"
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(3),
rule.pattern(14),
rule.pattern(15),
rule.pattern(5),
rule.pattern(6),
rule.pattern(8),
rule.pattern(16),
rule.pattern(10),
rule.pattern(17),
rule.pattern(18),
rule.pattern(19),
rule.pattern(20),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.bc_premises1_n: got unexpected plan from when clause 2"
mark3 = context.mark(True)
if rule.pattern(21).match_data(context, context,
context.lookup_data('plan_lines1') + context.lookup_data('plan_lines2')):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(22).match_data(context, context,
context.lookup_data('fn_head1') + context.lookup_data('fn_head2')):
context.end_save_all_undo()
mark5 = context.mark(True)
if rule.pattern(23).match_data(context, context,
context.lookup_data('fn_tail2') + context.lookup_data('fn_tail1')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
if not flag_2:
raise AssertionError("compiler.bc_premises1_n: 'when' clause 2 failed")
if not flag_1:
raise AssertionError("compiler.bc_premises1_n: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_premise(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
context.lookup_data('clause_num') + 1):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
context.lookup_data('kb_name') or "rule.rule_base.root_name"):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(2).match_data(context, context,
\
helpers.merge_patterns(context.lookup_data('arg_patterns'), context.lookup_data('patterns_in'))):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(3).match_data(context, context,
(('STARTING_LINENO', context.lookup_data('start_lineno')),
"with engine.prove(%s, %s, context," %
(context.lookup_data('kb_name2'), context.lookup_data('entity_name')),
('INDENT', 2),
('INDENT', 16),
helpers.list_format(('rule.pattern(%d)' % pat_num
for pat_num in context.lookup_data('pat_nums')),
'(', ')) \\'),
'POPINDENT',
"as gen_%d:" % context.lookup_data('clause_num'),
"for x_%d in gen_%d:" % (context.lookup_data('clause_num'), context.lookup_data('clause_num')),
('INDENT', 2),
)):
context.end_save_all_undo()
flag_5 = False
with engine.prove(rule.rule_base.root_name, 'add_required', context,
(rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(3),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),)) \
as gen_5:
for x_5 in gen_5:
flag_5 = True
assert x_5 is None, \
"compiler.bc_premise: got unexpected plan from when clause 5"
flag_6 = False
with engine.prove(rule.rule_base.root_name, 'gen_plan_lines', context,
(rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),
rule.pattern(14),
rule.pattern(15),
rule.pattern(16),
rule.pattern(17),
rule.pattern(18),)) \
as gen_6:
for x_6 in gen_6:
flag_6 = True
assert x_6 is None, \
"compiler.bc_premise: got unexpected plan from when clause 6"
mark7 = context.mark(True)
if rule.pattern(19).match_data(context, context,
helpers.merge_patterns(context.lookup_data('plan_vars_needed'),
context.lookup_data('plan_var_names_in'))):
context.end_save_all_undo()
mark8 = context.mark(True)
if rule.pattern(20).match_data(context, context,
context.lookup_data('fn_head2') + context.lookup_data('fn_head3') + (('ENDING_LINENO', context.lookup_data('end_lineno')),)):
context.end_save_all_undo()
mark9 = context.mark(True)
if rule.pattern(21).match_data(context, context,
(context.lookup_data('fn_tail3'),
() if context.lookup_data('break_cond') is None
else "if %s: break" % context.lookup_data('break_cond'),
context.lookup_data('fn_tail2'))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark9)
else: context.end_save_all_undo()
context.undo_to_mark(mark8)
else: context.end_save_all_undo()
context.undo_to_mark(mark7)
if not flag_6:
raise AssertionError("compiler.bc_premise: 'when' clause 6 failed")
if not flag_5:
raise AssertionError("compiler.bc_premise: 'when' clause 5 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_first(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"first%d_worked" % context.lookup_data('clause_num')):
context.end_save_all_undo()
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(0),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.bc_first: got unexpected plan from when clause 2"
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'add_required', context,
(rule.pattern(14),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(12),
rule.pattern(13),
rule.pattern(15),
rule.pattern(16),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.bc_first: got unexpected plan from when clause 3"
mark4 = context.mark(True)
if rule.pattern(17).match_data(context, context,
"%s = False" % context.lookup_data('break_cond')):
context.end_save_all_undo()
mark5 = context.mark(True)
if rule.pattern(18).match_data(context, context,
"%s = True" % context.lookup_data('break_cond')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
if not flag_3:
raise AssertionError("compiler.bc_first: 'when' clause 3 failed")
if not flag_2:
raise AssertionError("compiler.bc_first: 'when' clause 2 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_forall_None(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
flag_1 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(0),
rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),)) \
as gen_1:
for x_1 in gen_1:
flag_1 = True
assert x_1 is None, \
"compiler.bc_forall_None: got unexpected plan from when clause 1"
mark2 = context.mark(True)
if rule.pattern(14).match_data(context, context,
context.lookup_data('fn_head1') + context.lookup_data('fn_tail')):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
if not flag_1:
raise AssertionError("compiler.bc_forall_None: 'when' clause 1 failed")
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_forall_require(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"forall%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
"not forall%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(1),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),
rule.pattern(14),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.bc_forall_require: got unexpected plan from when clause 3"
flag_4 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(2),
rule.pattern(3),
rule.pattern(5),
rule.pattern(15),
rule.pattern(16),
rule.pattern(0),
rule.pattern(7),
rule.pattern(9),
rule.pattern(17),
rule.pattern(11),
rule.pattern(18),
rule.pattern(12),
rule.pattern(19),
rule.pattern(20),)) \
as gen_4:
for x_4 in gen_4:
flag_4 = True
assert x_4 is None, \
"compiler.bc_forall_require: got unexpected plan from when clause 4"
mark5 = context.mark(True)
if rule.pattern(21).match_data(context, context,
("forall%d_worked = True" % context.lookup_data('start_lineno'),
context.lookup_data('fn_head1'),
"forall%d_worked = False" % context.lookup_data('start_lineno'),
context.lookup_data('fn_head2'),
"forall%d_worked = True" % context.lookup_data('start_lineno'),
context.lookup_data('fn_tail2'),
context.lookup_data('fn_tail1'),
"if forall%d_worked:" % context.lookup_data('start_lineno'),
("INDENT", 2))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark5)
if not flag_4:
raise AssertionError("compiler.bc_forall_require: 'when' clause 4 failed")
if not flag_3:
raise AssertionError("compiler.bc_forall_require: 'when' clause 3 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_notany(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
"notany%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
"not notany%d_worked" % context.lookup_data('start_lineno')):
context.end_save_all_undo()
flag_3 = False
with engine.prove(rule.rule_base.root_name, 'bc_premises1', context,
(rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(1),
rule.pattern(7),
rule.pattern(8),
rule.pattern(9),
rule.pattern(10),
rule.pattern(11),
rule.pattern(12),
rule.pattern(13),
rule.pattern(14),)) \
as gen_3:
for x_3 in gen_3:
flag_3 = True
assert x_3 is None, \
"compiler.bc_notany: got unexpected plan from when clause 3"
mark4 = context.mark(True)
if rule.pattern(15).match_data(context, context,
("notany%d_worked = True" % context.lookup_data('start_lineno'),
context.lookup_data('fn_head1'),
"notany%d_worked = False" % context.lookup_data('start_lineno'),
context.lookup_data('fn_tail1'),
"if notany%d_worked:" % context.lookup_data('start_lineno'),
("INDENT", 2)) ):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
if not flag_3:
raise AssertionError("compiler.bc_notany: 'when' clause 3 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def no_plan(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
('assert x_%d is None, \\' % context.lookup_data('clause_num'),
('INDENT', 2),
'"%(rb_name)s.%(rule_name)s: got unexpected plan from '
'when clause %(clause_num)d"' %
{'clause_num': context.lookup_data('clause_num'),
'rb_name': context.lookup_data('rb_name'),
'rule_name': context.lookup_data('rule_name')},
'POPINDENT',)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def as_plan(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
\
helpers.merge_pattern("contexts.variable(%r)" % context.lookup_data('pat_var_name'),
context.lookup_data('patterns_in'))):
context.end_save_all_undo()
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'plan_bindings', context,
(rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.as_plan: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
if not flag_2:
raise AssertionError("compiler.as_plan: 'when' clause 2 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def plan_spec(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
\
helpers.merge_pattern("contexts.variable(%r)" % context.lookup_data('plan_var_name'),
context.lookup_data('patterns_in'))):
context.end_save_all_undo()
flag_2 = False
with engine.prove(rule.rule_base.root_name, 'plan_bindings', context,
(rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),)) \
as gen_2:
for x_2 in gen_2:
flag_2 = True
assert x_2 is None, \
"compiler.plan_spec: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
if not flag_2:
raise AssertionError("compiler.plan_spec: 'when' clause 2 failed")
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def illegal_plan_spec(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
helpers.syntax_error("illegal plan_spec in forall",
context.lookup_data('lineno'), context.lookup_data('lexpos'))):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def plan_bindings(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
('assert x_%d is not None, \\' % context.lookup_data('clause_num'),
('INDENT', 2),
'"%(rb_name)s.%(rule_name)s: expected plan from '
'when clause %(clause_num)d"' %
{'clause_num': context.lookup_data('clause_num'),
'rb_name': context.lookup_data('rb_name'),
'rule_name': context.lookup_data('rule_name')},
'POPINDENT',
"mark%d = context.mark(True)" % context.lookup_data('clause_num'),
"if not rule.pattern(%d).match_data(context, context, "
"x_%d):" % (context.lookup_data('pat_num'), context.lookup_data('clause_num')),
('INDENT', 2),
'raise AssertionError("%(rb_name)s.%(rule_name)s: '
'plan match to $%(plan_var_name)s failed in '
'when clause %(clause_num)d")' %
{'clause_num': context.lookup_data('clause_num'),
'plan_var_name': context.lookup_data('plan_var_name'),
'rb_name': context.lookup_data('rb_name'),
'rule_name': context.lookup_data('rule_name')},
'POPINDENT',
"context.end_save_all_undo()")):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
("context.undo_to_mark(mark%d)" % context.lookup_data('clause_num'),)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def not_required(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def required(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
("flag_%d = False" % context.lookup_data('clause_num'),
context.lookup_data('fn_head1'),
"flag_%d = True" % context.lookup_data('clause_num'),
)):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
(context.lookup_data('fn_tail1'),
"if not flag_%d:" % context.lookup_data('clause_num'),
("INDENT", 2),
"raise AssertionError(\"%s.%s: 'when' clause %d failed\")"
% (context.lookup_data('rb_name'), context.lookup_data('rule_name'), context.lookup_data('clause_num')),
"POPINDENT",
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def bc_python_premise(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
context.lookup_data('clause_num') + 1):
context.end_save_all_undo()
with engine.prove(rule.rule_base.root_name, 'python_premise', context,
(rule.pattern(1),
rule.pattern(2),
rule.pattern(3),
rule.pattern(4),
rule.pattern(5),
rule.pattern(6),
rule.pattern(7),)) \
as gen_2:
for x_2 in gen_2:
assert x_2 is None, \
"compiler.bc_python_premise: got unexpected plan from when clause 2"
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def python_eq(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
\
helpers.merge_pattern(context.lookup_data('pattern'), context.lookup_data('patterns_in'))):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
context.lookup_data('python_code')[:-1] + (context.lookup_data('python_code')[-1] + '):',)):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(2).match_data(context, context,
("mark%d = context.mark(True)" % context.lookup_data('clause_num'),
"if rule.pattern(%d).match_data(context, context," %
context.lookup_data('pat_num'),
('INDENT', 2),
('INDENT', 5),
('STARTING_LINENO', context.lookup_data('start_lineno')),
context.lookup_data('python_code2'),
('ENDING_LINENO', context.lookup_data('end_lineno')),
"POPINDENT",
"context.end_save_all_undo()",
)):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(3).match_data(context, context,
('POPINDENT',
"else: context.end_save_all_undo()",
"context.undo_to_mark(mark%d)" % context.lookup_data('clause_num'),)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def python_in(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
\
helpers.merge_pattern(context.lookup_data('pattern'), context.lookup_data('patterns_in'))):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
context.lookup_data('python_code')[:-1] + (context.lookup_data('python_code')[-1] + ':',)):
context.end_save_all_undo()
mark3 = context.mark(True)
if rule.pattern(2).match_data(context, context,
("for python_ans in \\",
('INDENT', 2),
('INDENT', 2),
('STARTING_LINENO', context.lookup_data('start_lineno')),
context.lookup_data('python_code2'),
('ENDING_LINENO', context.lookup_data('end_lineno')),
'POPINDENT',
"mark%d = context.mark(True)" % context.lookup_data('clause_num'),
"if rule.pattern(%d).match_data(context, context, "
"python_ans):" % context.lookup_data('pat_num'),
('INDENT', 2),
"context.end_save_all_undo()",
)):
context.end_save_all_undo()
mark4 = context.mark(True)
if rule.pattern(3).match_data(context, context,
( () if context.lookup_data('break_cond') is None
else ("if %s:" % context.lookup_data('break_cond'),
('INDENT', 2),
"context.undo_to_mark(mark%d)" % context.lookup_data('clause_num'),
"break",
'POPINDENT',),
'POPINDENT',
"else: context.end_save_all_undo()",
"context.undo_to_mark(mark%d)" % context.lookup_data('clause_num'),
'POPINDENT',)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark4)
else: context.end_save_all_undo()
context.undo_to_mark(mark3)
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def python_check(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
mark1 = context.mark(True)
if rule.pattern(0).match_data(context, context,
context.lookup_data('python_code')[:-1] + (context.lookup_data('python_code')[-1] + ':',)):
context.end_save_all_undo()
mark2 = context.mark(True)
if rule.pattern(1).match_data(context, context,
(('STARTING_LINENO', context.lookup_data('start_lineno')),
"if " + context.lookup_data('python_code2')[0].strip(),
('INDENT', 3),
context.lookup_data('python_code2')[1:],
'POPINDENT',
('ENDING_LINENO', context.lookup_data('end_lineno')),
('INDENT', 2),
)):
context.end_save_all_undo()
rule.rule_base.num_bc_rule_successes += 1
yield
else: context.end_save_all_undo()
context.undo_to_mark(mark2)
else: context.end_save_all_undo()
context.undo_to_mark(mark1)
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def python_block(rule, arg_patterns, arg_context):
engine = rule.rule_base.engine
patterns = rule.goal_arg_patterns()
if len(arg_patterns) == len(patterns):
context = contexts.bc_context(rule)
try:
if all(map(lambda pat, arg:
pat.match_pattern(context, context,
arg, arg_context),
patterns,
arg_patterns)):
rule.rule_base.num_bc_rules_matched += 1
rule.rule_base.num_bc_rule_successes += 1
yield
rule.rule_base.num_bc_rule_failures += 1
finally:
context.done()
def populate(engine):
This_rule_base = engine.get_create('compiler')
bc_rule.bc_rule('file', This_rule_base, 'compile',
file, None,
(contexts.variable('generated_root_pkg'),
contexts.variable('rb_name'),
pattern.pattern_tuple((pattern.pattern_literal('file'), contexts.variable('parent'), pattern.pattern_tuple((contexts.variable('fc_rules'), contexts.variable('fc_extra_lines'),), None), pattern.pattern_tuple((contexts.variable('bc_rules'), contexts.variable('bc_extra_lines'), contexts.variable('plan_extra_lines'),), None),), None),
contexts.variable('fc_lines'),
contexts.variable('bc_lines'),
contexts.variable('plan_lines'),),
(),
(contexts.variable('fc_head'),
contexts.variable('bc_head'),
contexts.variable('plan_head'),
contexts.variable('rb_name'),
contexts.variable('parent'),
contexts.variable('decl_line'),
contexts.variable('fc_rules'),
contexts.variable('fc_fun_lines'),
contexts.variable('fc_init_lines'),
contexts.variable('bc_rules'),
contexts.variable('bc_plan_lines'),
contexts.variable('bc_bc_fun_lines'),
contexts.variable('bc_bc_init_lines'),
contexts.variable('fc_lines'),
contexts.variable('plan_lines'),
contexts.variable('bc_lines'),))
bc_rule.bc_rule('rule_decl', This_rule_base, 'rule_decl',
rule_decl, None,
(contexts.variable('rb_name'),
pattern.pattern_literal(None),
contexts.variable('decl_line'),),
(),
(contexts.variable('decl_line'),))
bc_rule.bc_rule('rule_decl_with_parent', This_rule_base, 'rule_decl',
rule_decl_with_parent, None,
(contexts.variable('rb_name'),
pattern.pattern_tuple((pattern.pattern_literal('parent'), contexts.variable('parent'), contexts.variable('excluded_symbols'),), None),
contexts.variable('decl_line'),),
(),
(contexts.variable('decl_line'),))
bc_rule.bc_rule('fc_rules', This_rule_base, 'fc_rules',
fc_rules, None,
(contexts.variable('fc_rules'),
contexts.variable('fc_funs'),
contexts.variable('fc_init'),),
(),
(contexts.variable('fc_rule'),
contexts.variable('fc_fun_1'),
contexts.variable('fc_init_1'),
contexts.variable('fc_funs'),
contexts.variable('fc_init'),))
bc_rule.bc_rule('fc_rule_', This_rule_base, 'fc_rule',
fc_rule_, None,
(pattern.pattern_tuple((pattern.pattern_literal('fc_rule'), contexts.variable('rule_name'), contexts.variable('fc_premises'), contexts.variable('assertions'),), None),
contexts.variable('fc_fun'),
contexts.variable('fc_init'),),
(),
(contexts.variable('rule_name'),
pattern.pattern_literal(0),
contexts.anonymous('_'),
contexts.variable('fc_premises'),
pattern.pattern_literal(None),
pattern.pattern_literal(False),
contexts.variable('prem_fn_head'),
contexts.variable('prem_fn_tail'),
contexts.variable('prem_decl_lines'),
pattern.pattern_literal(()),
contexts.variable('patterns_out1'),
contexts.variable('assertions'),
contexts.variable('asserts_fn_lines'),
contexts.variable('patterns_out'),
contexts.variable('fc_fun'),
contexts.variable('fc_init'),))
bc_rule.bc_rule('fc_premises0', This_rule_base, 'fc_premises',
fc_premises0, None,
(contexts.anonymous('_'),
contexts.variable('clause_num'),
contexts.variable('clause_num'),
pattern.pattern_literal(()),
contexts.anonymous('_'),
contexts.anonymous('_'),
pattern.pattern_literal(()),
pattern.pattern_literal(()),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_in'),
pattern.pattern_literal(()),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),),
(),
())
bc_rule.bc_rule('fc_premises1', This_rule_base, 'fc_premises',
fc_premises1, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((contexts.variable('first_prem'),), contexts.variable('rest_prems')),
contexts.variable('break_cond'),
contexts.variable('multi_match'),
pattern.pattern_tuple((contexts.variable('fn_head1'),), contexts.variable('fn_head2')),
pattern.pattern_tuple((contexts.variable('fn_tail2'),), contexts.variable('fn_tail1')),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num1'),
contexts.variable('first_prem'),
contexts.variable('break_cond'),
contexts.variable('multi_match'),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out1'),
contexts.variable('decl_lines1'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out1'),
contexts.variable('next_clause_num'),
contexts.variable('rest_prems'),
contexts.variable('fn_head2'),
contexts.variable('fn_tail2'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines2'),
contexts.variable('patterns_out'),
contexts.variable('decl_lines'),))
bc_rule.bc_rule('fc_premise', This_rule_base, 'fc_premise',
fc_premise, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('fc_premise'), contexts.variable('kb_name'), contexts.variable('entity_name'), contexts.variable('arg_patterns'), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.variable('break_cond'),
contexts.variable('multi_match'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),),
(),
(contexts.variable('kb_name'),
contexts.variable('entity_name'),
contexts.variable('start_lineno'),
contexts.variable('end_lineno'),
contexts.variable('multi_match'),
contexts.variable('decl_num_in'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('next_clause_num'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),))
bc_rule.bc_rule('gen_fc_for_false', This_rule_base, 'gen_fc_for',
gen_fc_for_false, None,
(contexts.variable('kb_name'),
contexts.variable('entity_name'),
contexts.variable('start_lineno'),
contexts.variable('end_lineno'),
pattern.pattern_literal(False),
contexts.variable('decl_num'),
contexts.variable('fn_head'),),
(),
(contexts.variable('fn_head'),))
bc_rule.bc_rule('gen_fc_for_true', This_rule_base, 'gen_fc_for',
gen_fc_for_true, None,
(contexts.variable('kb_name'),
contexts.variable('entity_name'),
contexts.variable('start_lineno'),
contexts.variable('end_lineno'),
pattern.pattern_literal(True),
contexts.variable('decl_num'),
contexts.variable('fn_head'),),
(),
(contexts.variable('fn_head'),))
bc_rule.bc_rule('fc_first', This_rule_base, 'fc_premise',
fc_first, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('fc_first'), contexts.variable('premises1'), contexts.anonymous('_'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
pattern.pattern_tuple((contexts.variable('init_worked'), contexts.variable('fn_head'), contexts.variable('set_worked'),), None),
contexts.variable('fn_tail'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('break_cond'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('premises1'),
pattern.pattern_literal(True),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('init_worked'),
contexts.variable('set_worked'),))
bc_rule.bc_rule('fc_forall_None', This_rule_base, 'fc_premise',
fc_forall_None, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('fc_forall'), contexts.variable('premises1'), pattern.pattern_literal(None), contexts.anonymous('_'), contexts.anonymous('_'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('fn_head'),
pattern.pattern_literal(()),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('premises1'),
pattern.pattern_literal(None),
pattern.pattern_literal(True),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),))
bc_rule.bc_rule('fc_forall_require', This_rule_base, 'fc_premise',
fc_forall_require, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('fc_forall'), contexts.variable('premises1'), contexts.variable('require'), contexts.variable('start_lineno'), contexts.anonymous('_'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('fn_head'),
pattern.pattern_literal(("POPINDENT",)),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('break_true'),
contexts.variable('break_false'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num1'),
contexts.variable('premises1'),
pattern.pattern_literal(True),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out1'),
contexts.variable('decl_lines1'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out1'),
contexts.variable('next_clause_num'),
contexts.variable('require'),
contexts.variable('fn_head2'),
contexts.variable('fn_tail2'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines2'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('decl_lines'),))
bc_rule.bc_rule('fc_notany', This_rule_base, 'fc_premise',
fc_notany, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('fc_notany'), contexts.variable('premises'), contexts.variable('start_lineno'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('fn_head'),
pattern.pattern_literal(("POPINDENT",)),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('break_true'),
contexts.variable('break_false'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('premises'),
pattern.pattern_literal(True),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_out'),
contexts.variable('decl_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),))
bc_rule.bc_rule('fc_python_premise', This_rule_base, 'fc_premise',
fc_python_premise, None,
(contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('python_premise'),
contexts.variable('break_cond'),
contexts.anonymous('_'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('decl_num_in'),
contexts.variable('decl_num_in'),
pattern.pattern_literal(()),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('next_clause_num'),
contexts.variable('clause_num'),
contexts.variable('python_premise'),
contexts.variable('break_cond'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('assertions_0', This_rule_base, 'assertions',
assertions_0, None,
(pattern.pattern_literal(()),
pattern.pattern_literal(()),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),),
(),
())
bc_rule.bc_rule('assertions_n', This_rule_base, 'assertions',
assertions_n, None,
(pattern.pattern_tuple((contexts.variable('first_assertion'),), contexts.variable('rest_assertions')),
pattern.pattern_tuple((contexts.variable('fn_lines1'),), contexts.variable('fn_lines2')),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(contexts.variable('first_assertion'),
contexts.variable('fn_lines1'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out1'),
contexts.variable('rest_assertions'),
contexts.variable('fn_lines2'),
contexts.variable('patterns_out'),))
bc_rule.bc_rule('assertion', This_rule_base, 'assertion',
assertion, None,
(pattern.pattern_tuple((pattern.pattern_literal('assert'), contexts.variable('kb_name'), contexts.variable('entity_name'), contexts.variable('patterns'), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.variable('fn_lines'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),),
(),
(pattern.pattern_tuple((contexts.variable('pat_nums'), contexts.variable('patterns_out'),), None),
contexts.variable('fn_lines'),))
bc_rule.bc_rule('python_assertion', This_rule_base, 'assertion',
python_assertion, None,
(pattern.pattern_tuple((pattern.pattern_literal('python_assertion'), pattern.pattern_tuple((contexts.variable('python_code'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'),), None), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
pattern.pattern_tuple((pattern.pattern_tuple((pattern.pattern_literal('STARTING_LINENO'), contexts.variable('start_lineno'),), None), contexts.variable('python_code'), pattern.pattern_tuple((pattern.pattern_literal('ENDING_LINENO'), contexts.variable('end_lineno'),), None),), None),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),),
(),
())
bc_rule.bc_rule('bc_rules', This_rule_base, 'bc_rules',
bc_rules, None,
(contexts.variable('rb_name'),
contexts.variable('bc_rules'),
contexts.variable('bc_plan_lines'),
contexts.variable('bc_bc_funs'),
contexts.variable('bc_bc_init'),),
(),
(contexts.variable('bc_rule'),
contexts.variable('rb_name'),
contexts.variable('bc_plan1'),
contexts.variable('bc_bc_fun1'),
contexts.variable('bc_bc_init1'),
contexts.variable('bc_plan_lines'),
contexts.variable('bc_bc_funs'),
contexts.variable('bc_bc_init'),))
bc_rule.bc_rule('bc_rule_', This_rule_base, 'bc_rule',
bc_rule_, None,
(contexts.variable('rb_name'),
pattern.pattern_tuple((pattern.pattern_literal('bc_rule'), contexts.variable('name'), contexts.variable('goal'), contexts.variable('bc_premises'), contexts.variable('python_lines'), contexts.variable('plan_vars_needed'),), None),
contexts.variable('plan_lines'),
contexts.variable('bc_fun_lines'),
contexts.variable('bc_init_lines'),),
(),
(contexts.variable('rb_name'),
contexts.variable('name'),
contexts.variable('bc_premises'),
contexts.variable('plan_vars_needed'),
contexts.variable('prem_plan_lines'),
contexts.variable('prem_fn_head'),
contexts.variable('prem_fn_tail'),
contexts.variable('prem_decl_lines'),
pattern.pattern_tuple((contexts.variable('plan_lines'), contexts.variable('goal_fn_head'), contexts.variable('goal_fn_tail'), contexts.variable('goal_decl_lines'),), None),
contexts.variable('bc_fun_lines'),
contexts.variable('bc_init_lines'),))
bc_rule.bc_rule('bc_premises', This_rule_base, 'bc_premises',
bc_premises, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('bc_premises'),
contexts.variable('plan_vars_needed'),
contexts.variable('plan_lines'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('decl_lines'),),
(),
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
pattern.pattern_literal(1),
contexts.anonymous('_'),
contexts.variable('bc_premises'),
pattern.pattern_literal(None),
pattern.pattern_literal(True),
pattern.pattern_literal(()),
contexts.variable('patterns'),
contexts.variable('plan_vars_needed'),
contexts.variable('plan_var_names'),
contexts.variable('plan_lines1'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('pat_lines'),
contexts.variable('decl_lines'),
contexts.variable('plan_lines'),))
bc_rule.bc_rule('bc_premises1_0', This_rule_base, 'bc_premises1',
bc_premises1_0, None,
(contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('clause_num'),
contexts.variable('clause_num'),
pattern.pattern_literal(()),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('patterns'),
contexts.variable('patterns'),
contexts.variable('plan_var_names'),
contexts.variable('plan_var_names'),
pattern.pattern_literal(()),
pattern.pattern_literal(()),
pattern.pattern_literal(()),),
(),
())
bc_rule.bc_rule('bc_premises1_n', This_rule_base, 'bc_premises1',
bc_premises1_n, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((contexts.variable('first_prem'),), contexts.variable('rest_prems')),
contexts.variable('break_cond'),
contexts.variable('allow_plan'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num1'),
contexts.variable('first_prem'),
contexts.variable('break_cond'),
contexts.variable('allow_plan'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out1'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out1'),
contexts.variable('plan_lines1'),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('next_clause_num'),
contexts.variable('rest_prems'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines2'),
contexts.variable('fn_head2'),
contexts.variable('fn_tail2'),
contexts.variable('plan_lines'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('bc_premise', This_rule_base, 'bc_premise',
bc_premise, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('bc_premise'), contexts.variable('required'), contexts.variable('kb_name'), contexts.variable('entity_name'), contexts.variable('arg_patterns'), contexts.variable('plan_spec'), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.variable('break_cond'),
contexts.variable('allow_plan'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(contexts.variable('next_clause_num'),
contexts.variable('kb_name2'),
pattern.pattern_tuple((contexts.variable('pat_nums'), contexts.variable('patterns_out1'),), None),
contexts.variable('fn_head1'),
contexts.variable('required'),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
pattern.pattern_literal(('POPINDENT', 'POPINDENT',)),
contexts.variable('fn_head2'),
contexts.variable('fn_tail2'),
contexts.variable('plan_spec'),
contexts.variable('allow_plan'),
contexts.variable('patterns_out1'),
contexts.variable('patterns_out'),
contexts.variable('fn_head3'),
contexts.variable('fn_tail3'),
contexts.variable('plan_lines'),
contexts.variable('plan_vars_needed'),
pattern.pattern_tuple((contexts.anonymous('_'), contexts.variable('plan_var_names_out'),), None),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('bc_first', This_rule_base, 'bc_premise',
bc_first, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('bc_first'), contexts.variable('required'), contexts.variable('bc_premises'), contexts.anonymous('_'),), None),
contexts.anonymous('_'),
contexts.variable('allow_plan'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines'),
pattern.pattern_tuple((contexts.variable('init_worked'), contexts.variable('fn_head'), contexts.variable('set_worked'),), None),
contexts.variable('fn_tail'),),
(),
(contexts.variable('break_cond'),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('bc_premises'),
contexts.variable('allow_plan'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines'),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('required'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('init_worked'),
contexts.variable('set_worked'),))
bc_rule.bc_rule('bc_forall_None', This_rule_base, 'bc_premise',
bc_forall_None, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('bc_forall'), contexts.variable('bc_premises'), pattern.pattern_literal(None), contexts.anonymous('_'), contexts.anonymous('_'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines'),
contexts.variable('fn_head'),
pattern.pattern_literal(()),),
(),
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('bc_premises'),
pattern.pattern_literal(None),
pattern.pattern_literal(False),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
contexts.variable('plan_lines'),
contexts.variable('fn_head1'),
contexts.variable('fn_tail'),
contexts.variable('fn_head'),))
bc_rule.bc_rule('bc_forall_require', This_rule_base, 'bc_premise',
bc_forall_require, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('bc_forall'), contexts.variable('premises1'), contexts.variable('require'), contexts.variable('start_lineno'), contexts.anonymous('_'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out'),
pattern.pattern_literal(()),
contexts.variable('fn_head'),
pattern.pattern_literal(("POPINDENT",)),),
(),
(contexts.variable('break_true'),
contexts.variable('break_false'),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num1'),
contexts.variable('premises1'),
pattern.pattern_literal(False),
contexts.variable('patterns_in'),
contexts.variable('patterns_out1'),
contexts.variable('plan_var_names_in'),
contexts.variable('plan_var_names_out1'),
pattern.pattern_literal(()),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('next_clause_num'),
contexts.variable('require'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names_out'),
contexts.variable('fn_head2'),
contexts.variable('fn_tail2'),
contexts.variable('fn_head'),))
bc_rule.bc_rule('bc_notany', This_rule_base, 'bc_premise',
bc_notany, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('bc_notany'), contexts.variable('bc_premises'), contexts.variable('start_lineno'),), None),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_in'),
contexts.variable('plan_var_out'),
pattern.pattern_literal(()),
contexts.variable('fn_head'),
pattern.pattern_literal(("POPINDENT",)),),
(),
(contexts.variable('break_true'),
contexts.variable('break_false'),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('bc_premises'),
pattern.pattern_literal(False),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_in'),
contexts.variable('plan_var_out'),
pattern.pattern_literal(()),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('fn_head'),))
bc_rule.bc_rule('no_plan', This_rule_base, 'gen_plan_lines',
no_plan, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
pattern.pattern_literal(None),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),
contexts.variable('fn_head'),
pattern.pattern_literal(()),
pattern.pattern_literal(()),
pattern.pattern_literal(()),),
(),
(contexts.variable('fn_head'),))
bc_rule.bc_rule('as_plan', This_rule_base, 'gen_plan_lines',
as_plan, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('as'), contexts.variable('pat_var_name'),), None),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
pattern.pattern_literal(()),
pattern.pattern_literal(()),),
(),
(pattern.pattern_tuple((contexts.variable('pat_num'), contexts.variable('patterns_out'),), None),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('pat_var_name'),
contexts.variable('pat_num'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('plan_spec', This_rule_base, 'gen_plan_lines',
plan_spec, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('plan_spec'), contexts.variable('step_num'), contexts.variable('plan_var_name'), contexts.variable('python_code'), contexts.variable('plan_vars_needed'), contexts.anonymous('_'), contexts.anonymous('_'),), None),
pattern.pattern_literal(True),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
pattern.pattern_tuple((pattern.pattern_tuple((contexts.variable('step_num'), contexts.variable('python_code'),), None),), None),
contexts.variable('plan_vars_needed'),),
(),
(pattern.pattern_tuple((contexts.variable('pat_num'), contexts.variable('patterns_out'),), None),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('plan_var_name'),
contexts.variable('pat_num'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('illegal_plan_spec', This_rule_base, 'gen_plan_lines',
illegal_plan_spec, None,
(contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.anonymous('_'),
pattern.pattern_tuple((pattern.pattern_literal('plan_spec'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.variable('lineno'), contexts.variable('lexpos'),), None),
pattern.pattern_literal(False),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.anonymous('_'),),
(),
(contexts.anonymous('_'),))
bc_rule.bc_rule('plan_bindings', This_rule_base, 'plan_bindings',
plan_bindings, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('plan_var_name'),
contexts.variable('pat_num'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('not_required', This_rule_base, 'add_required',
not_required, None,
(pattern.pattern_literal(False),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.anonymous('_'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
())
bc_rule.bc_rule('required', This_rule_base, 'add_required',
required, None,
(pattern.pattern_literal(True),
contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('fn_head1'),
contexts.variable('fn_tail1'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('bc_python_premise', This_rule_base, 'bc_premise',
bc_python_premise, None,
(contexts.variable('rb_name'),
contexts.variable('rule_name'),
contexts.variable('clause_num'),
contexts.variable('next_clause_num'),
contexts.variable('python_premise'),
contexts.variable('break_cond'),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('plan_var_names'),
contexts.variable('plan_var_names'),
pattern.pattern_literal(()),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(contexts.variable('next_clause_num'),
contexts.variable('clause_num'),
contexts.variable('python_premise'),
contexts.variable('break_cond'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('python_eq', This_rule_base, 'python_premise',
python_eq, None,
(contexts.variable('clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('python_eq'), contexts.variable('pattern'), pattern.pattern_tuple((contexts.variable('python_code'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'),), None), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(pattern.pattern_tuple((contexts.variable('pat_num'), contexts.variable('patterns_out'),), None),
contexts.variable('python_code2'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('python_in', This_rule_base, 'python_premise',
python_in, None,
(contexts.variable('clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('python_in'), contexts.variable('pattern'), pattern.pattern_tuple((contexts.variable('python_code'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'),), None), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.variable('break_cond'),
contexts.variable('patterns_in'),
contexts.variable('patterns_out'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),),
(),
(pattern.pattern_tuple((contexts.variable('pat_num'), contexts.variable('patterns_out'),), None),
contexts.variable('python_code2'),
contexts.variable('fn_head'),
contexts.variable('fn_tail'),))
bc_rule.bc_rule('python_check', This_rule_base, 'python_premise',
python_check, None,
(contexts.variable('clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('python_check'), pattern.pattern_tuple((contexts.variable('python_code'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'),), None), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),
contexts.variable('fn_head'),
pattern.pattern_literal(('POPINDENT',)),),
(),
(contexts.variable('python_code2'),
contexts.variable('fn_head'),))
bc_rule.bc_rule('python_block', This_rule_base, 'python_premise',
python_block, None,
(contexts.variable('clause_num'),
pattern.pattern_tuple((pattern.pattern_literal('python_block'), pattern.pattern_tuple((contexts.variable('python_code'), contexts.anonymous('_'), contexts.anonymous('_'), contexts.anonymous('_'),), None), contexts.variable('start_lineno'), contexts.variable('end_lineno'),), None),
contexts.anonymous('_'),
contexts.variable('patterns_in'),
contexts.variable('patterns_in'),
pattern.pattern_tuple((pattern.pattern_tuple((pattern.pattern_literal('STARTING_LINENO'), contexts.variable('start_lineno'),), None), contexts.variable('python_code'), pattern.pattern_tuple((pattern.pattern_literal('ENDING_LINENO'), contexts.variable('end_lineno'),), None),), None),
pattern.pattern_literal(()),),
(),
())
import itertools
from pyke.krb_compiler import helpers
Krb_filename = '../compiler.krb'
Krb_lineno_map = (
((14, 18), (24, 28)),
((22, 22), (30, 30)),
((26, 26), (31, 31)),
((30, 30), (32, 32)),
((33, 41), (33, 33)),
((43, 51), (34, 34)),
((53, 63), (35, 36)),
((66, 78), (37, 49)),
((82, 87), (50, 55)),
((91, 106), (56, 71)),
((138, 142), (74, 74)),
((146, 146), (76, 76)),
((162, 166), (79, 79)),
((170, 172), (81, 83)),
((188, 192), (86, 86)),
((194, 195), (88, 90)),
((198, 198), (92, 92)),
((204, 212), (94, 94)),
((213, 214), (95, 97)),
((227, 227), (98, 98)),
((231, 231), (99, 99)),
((249, 253), (102, 103)),
((256, 274), (105, 107)),
((276, 285), (108, 109)),
((288, 305), (110, 127)),
((309, 316), (128, 135)),
((338, 342), (138, 139)),
((356, 360), (142, 146)),
((363, 381), (148, 152)),
((383, 401), (153, 157)),
((404, 404), (158, 158)),
((424, 428), (161, 167)),
((430, 441), (169, 170)),
((444, 447), (171, 174)),
((451, 451), (175, 175)),
((455, 455), (176, 176)),
((459, 464), (177, 182)),
((486, 490), (185, 186)),
((494, 509), (188, 203)),
((525, 529), (207, 208)),
((533, 543), (210, 220)),
((559, 563), (223, 227)),
((567, 567), (229, 229)),
((570, 588), (230, 234)),
((591, 591), (235, 235)),
((595, 595), (236, 236)),
((617, 621), (239, 242)),
((624, 642), (244, 248)),
((645, 645), (249, 249)),
((663, 667), (252, 256)),
((671, 671), (258, 258)),
((675, 675), (259, 259)),
((678, 696), (260, 264)),
((698, 716), (265, 269)),
((719, 727), (270, 278)),
((731, 731), (279, 279)),
((757, 761), (282, 286)),
((765, 765), (288, 288)),
((769, 769), (289, 289)),
((772, 790), (290, 294)),
((793, 798), (295, 300)),
((820, 824), (303, 306)),
((828, 828), (308, 308)),
((830, 841), (309, 311)),
((856, 860), (314, 314)),
((874, 878), (317, 318)),
((881, 890), (320, 320)),
((892, 901), (321, 321)),
((918, 922), (324, 326)),
((926, 927), (328, 329)),
((931, 940), (330, 339)),
((958, 962), (342, 347)),
((976, 980), (350, 350)),
((982, 984), (352, 355)),
((987, 987), (357, 357)),
((993, 1003), (359, 359)),
((1004, 1006), (360, 363)),
((1019, 1019), (364, 364)),
((1023, 1023), (365, 365)),
((1027, 1027), (366, 366)),
((1047, 1051), (369, 371)),
((1054, 1067), (373, 375)),
((1070, 1072), (376, 378)),
((1076, 1083), (379, 386)),
((1087, 1090), (387, 390)),
((1112, 1116), (393, 395)),
((1119, 1138), (397, 400)),
((1141, 1141), (401, 401)),
((1145, 1148), (402, 405)),
((1152, 1157), (406, 411)),
((1179, 1183), (414, 416)),
((1197, 1201), (419, 423)),
((1204, 1223), (425, 429)),
((1225, 1244), (430, 434)),
((1247, 1247), (435, 435)),
((1251, 1251), (436, 436)),
((1255, 1255), (437, 437)),
((1279, 1283), (440, 446)),
((1287, 1287), (448, 448)),
((1291, 1291), (449, 449)),
((1295, 1296), (450, 451)),
((1300, 1312), (452, 464)),
((1315, 1328), (465, 466)),
((1330, 1346), (467, 470)),
((1349, 1350), (471, 472)),
((1354, 1354), (473, 473)),
((1358, 1361), (474, 477)),
((1393, 1397), (480, 484)),
((1401, 1401), (486, 486)),
((1404, 1423), (487, 491)),
((1425, 1438), (492, 493)),
((1441, 1441), (494, 494)),
((1445, 1445), (495, 495)),
((1469, 1473), (498, 502)),
((1476, 1495), (504, 508)),
((1498, 1498), (509, 509)),
((1516, 1520), (512, 516)),
((1524, 1524), (518, 518)),
((1528, 1528), (519, 519)),
((1531, 1550), (520, 524)),
((1552, 1571), (525, 529)),
((1574, 1582), (530, 538)),
((1606, 1610), (541, 545)),
((1614, 1614), (548, 548)),
((1618, 1618), (549, 549)),
((1621, 1640), (550, 554)),
((1643, 1648), (555, 560)),
((1670, 1674), (563, 565)),
((1678, 1685), (567, 574)),
((1701, 1705), (577, 581)),
((1709, 1711), (583, 585)),
((1714, 1726), (586, 587)),
((1743, 1747), (590, 595)),
((1751, 1753), (597, 599)),
((1756, 1768), (600, 601)),
((1785, 1789), (604, 606)),
((1793, 1794), (608, 609)),
((1810, 1814), (612, 613)),
((1818, 1838), (615, 635)),
((1842, 1842), (636, 636)),
((1860, 1864), (639, 640)),
((1878, 1882), (643, 644)),
((1886, 1889), (646, 649)),
((1893, 1899), (650, 656)),
((1917, 1921), (659, 663)),
((1925, 1925), (665, 665)),
((1927, 1938), (666, 668)),
((1953, 1957), (671, 675)),
((1961, 1962), (677, 678)),
((1966, 1966), (679, 679)),
((1970, 1980), (680, 690)),
((1984, 1986), (691, 693)),
((2008, 2012), (696, 700)),
((2016, 2017), (702, 703)),
((2021, 2021), (704, 704)),
((2025, 2037), (705, 717)),
((2041, 2050), (718, 727)),
((2072, 2076), (730, 735)),
((2080, 2080), (737, 737)),
((2084, 2091), (738, 745)),
((2109, 2113), (748, 756)),
)
| 46.92286
| 351
| 0.505837
| 15,032
| 149,637
| 4.75765
| 0.048896
| 0.157501
| 0.052771
| 0.040648
| 0.929891
| 0.913559
| 0.878868
| 0.848665
| 0.824727
| 0.806074
| 0
| 0.036543
| 0.379498
| 149,637
| 3,188
| 352
| 46.937578
| 0.7337
| 0.000094
| 0
| 0.791935
| 0
| 0
| 0.117731
| 0.010353
| 0
| 0
| 0
| 0
| 0.030323
| 1
| 0.013548
| false
| 0
| 0.00129
| 0
| 0.014839
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53f6e9c9734461eb65967c07b5b9cf506e96cb70
| 18,591
|
py
|
Python
|
lib/genpyblocking/snowflake/Snowflake.py
|
michaelmontano/snowflakepy
|
5ffd0931df7ce2ba2e1d19d17d8d447f77e39060
|
[
"BSD-3-Clause"
] | 2
|
2015-07-11T20:35:15.000Z
|
2019-04-22T02:38:04.000Z
|
lib/genpyblocking/snowflake/Snowflake.py
|
michaelmontano/snowflakepy
|
5ffd0931df7ce2ba2e1d19d17d8d447f77e39060
|
[
"BSD-3-Clause"
] | null | null | null |
lib/genpyblocking/snowflake/Snowflake.py
|
michaelmontano/snowflakepy
|
5ffd0931df7ce2ba2e1d19d17d8d447f77e39060
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def get_worker_id(self, ):
pass
def get_datacenter_id(self, ):
pass
def get_timestamp(self, ):
pass
def get_id(self, ):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot != None:
self._oprot = oprot
self._seqid = 0
def get_worker_id(self, ):
self.send_get_worker_id()
return self.recv_get_worker_id()
def send_get_worker_id(self, ):
self._oprot.writeMessageBegin('get_worker_id', TMessageType.CALL, self._seqid)
args = get_worker_id_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_worker_id(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_worker_id_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_worker_id failed: unknown result");
def get_datacenter_id(self, ):
self.send_get_datacenter_id()
return self.recv_get_datacenter_id()
def send_get_datacenter_id(self, ):
self._oprot.writeMessageBegin('get_datacenter_id', TMessageType.CALL, self._seqid)
args = get_datacenter_id_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_datacenter_id(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_datacenter_id_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_datacenter_id failed: unknown result");
def get_timestamp(self, ):
self.send_get_timestamp()
return self.recv_get_timestamp()
def send_get_timestamp(self, ):
self._oprot.writeMessageBegin('get_timestamp', TMessageType.CALL, self._seqid)
args = get_timestamp_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_timestamp(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_timestamp_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_timestamp failed: unknown result");
def get_id(self, ):
self.send_get_id()
return self.recv_get_id()
def send_get_id(self, ):
self._oprot.writeMessageBegin('get_id', TMessageType.CALL, self._seqid)
args = get_id_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_id(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_id_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success != None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_id failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["get_worker_id"] = Processor.process_get_worker_id
self._processMap["get_datacenter_id"] = Processor.process_get_datacenter_id
self._processMap["get_timestamp"] = Processor.process_get_timestamp
self._processMap["get_id"] = Processor.process_get_id
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_get_worker_id(self, seqid, iprot, oprot):
args = get_worker_id_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_worker_id_result()
result.success = self._handler.get_worker_id()
oprot.writeMessageBegin("get_worker_id", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_datacenter_id(self, seqid, iprot, oprot):
args = get_datacenter_id_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_datacenter_id_result()
result.success = self._handler.get_datacenter_id()
oprot.writeMessageBegin("get_datacenter_id", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_timestamp(self, seqid, iprot, oprot):
args = get_timestamp_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_timestamp_result()
result.success = self._handler.get_timestamp()
oprot.writeMessageBegin("get_timestamp", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_id(self, seqid, iprot, oprot):
args = get_id_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_id_result()
result.success = self._handler.get_id()
oprot.writeMessageBegin("get_id", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class get_worker_id_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_worker_id_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_worker_id_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_worker_id_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_datacenter_id_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_datacenter_id_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_datacenter_id_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_datacenter_id_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_timestamp_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_timestamp_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_timestamp_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_timestamp_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_id_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_id_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_id_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_id_result')
if self.success != None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 31.833904
| 188
| 0.689581
| 2,224
| 18,591
| 5.432104
| 0.057104
| 0.03311
| 0.037083
| 0.064895
| 0.895538
| 0.858952
| 0.826173
| 0.798361
| 0.794636
| 0.782965
| 0
| 0.003813
| 0.195901
| 18,591
| 583
| 189
| 31.888508
| 0.804335
| 0.011887
| 0
| 0.802128
| 1
| 0
| 0.033486
| 0.002513
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142553
| false
| 0.008511
| 0.012766
| 0.034043
| 0.302128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
07005da9c63f2e446abbd0de2bbc9a8673abc1c9
| 42,239
|
py
|
Python
|
sdk/python/pulumi_aws/appstream/image_builder.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/appstream/image_builder.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/appstream/image_builder.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ImageBuilderArgs', 'ImageBuilder']
@pulumi.input_type
class ImageBuilderArgs:
def __init__(__self__, *,
instance_type: pulumi.Input[str],
access_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]]] = None,
appstream_agent_version: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
domain_join_info: Optional[pulumi.Input['ImageBuilderDomainJoinInfoArgs']] = None,
enable_default_internet_access: Optional[pulumi.Input[bool]] = None,
iam_role_arn: Optional[pulumi.Input[str]] = None,
image_arn: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_config: Optional[pulumi.Input['ImageBuilderVpcConfigArgs']] = None):
"""
The set of arguments for constructing a ImageBuilder resource.
:param pulumi.Input[str] instance_type: The instance type to use when launching the image builder.
:param pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]] access_endpoints: Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
:param pulumi.Input[str] appstream_agent_version: The version of the AppStream 2.0 agent to use for this image builder.
:param pulumi.Input[str] description: Description to display.
:param pulumi.Input[str] display_name: Human-readable friendly name for the AppStream image builder.
:param pulumi.Input['ImageBuilderDomainJoinInfoArgs'] domain_join_info: Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
:param pulumi.Input[bool] enable_default_internet_access: Enables or disables default internet access for the image builder.
:param pulumi.Input[str] iam_role_arn: ARN of the IAM role to apply to the image builder.
:param pulumi.Input[str] image_arn: ARN of the public, private, or shared image to use.
:param pulumi.Input[str] image_name: Name of the image used to create the image builder.
:param pulumi.Input[str] name: Unique name for the image builder.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input['ImageBuilderVpcConfigArgs'] vpc_config: Configuration block for the VPC configuration for the image builder. See below.
"""
pulumi.set(__self__, "instance_type", instance_type)
if access_endpoints is not None:
pulumi.set(__self__, "access_endpoints", access_endpoints)
if appstream_agent_version is not None:
pulumi.set(__self__, "appstream_agent_version", appstream_agent_version)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if domain_join_info is not None:
pulumi.set(__self__, "domain_join_info", domain_join_info)
if enable_default_internet_access is not None:
pulumi.set(__self__, "enable_default_internet_access", enable_default_internet_access)
if iam_role_arn is not None:
pulumi.set(__self__, "iam_role_arn", iam_role_arn)
if image_arn is not None:
pulumi.set(__self__, "image_arn", image_arn)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vpc_config is not None:
pulumi.set(__self__, "vpc_config", vpc_config)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Input[str]:
"""
The instance type to use when launching the image builder.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter(name="accessEndpoints")
def access_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]]]:
"""
Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
"""
return pulumi.get(self, "access_endpoints")
@access_endpoints.setter
def access_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]]]):
pulumi.set(self, "access_endpoints", value)
@property
@pulumi.getter(name="appstreamAgentVersion")
def appstream_agent_version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the AppStream 2.0 agent to use for this image builder.
"""
return pulumi.get(self, "appstream_agent_version")
@appstream_agent_version.setter
def appstream_agent_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "appstream_agent_version", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description to display.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable friendly name for the AppStream image builder.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="domainJoinInfo")
def domain_join_info(self) -> Optional[pulumi.Input['ImageBuilderDomainJoinInfoArgs']]:
"""
Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
"""
return pulumi.get(self, "domain_join_info")
@domain_join_info.setter
def domain_join_info(self, value: Optional[pulumi.Input['ImageBuilderDomainJoinInfoArgs']]):
pulumi.set(self, "domain_join_info", value)
@property
@pulumi.getter(name="enableDefaultInternetAccess")
def enable_default_internet_access(self) -> Optional[pulumi.Input[bool]]:
"""
Enables or disables default internet access for the image builder.
"""
return pulumi.get(self, "enable_default_internet_access")
@enable_default_internet_access.setter
def enable_default_internet_access(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_default_internet_access", value)
@property
@pulumi.getter(name="iamRoleArn")
def iam_role_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the IAM role to apply to the image builder.
"""
return pulumi.get(self, "iam_role_arn")
@iam_role_arn.setter
def iam_role_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_role_arn", value)
@property
@pulumi.getter(name="imageArn")
def image_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the public, private, or shared image to use.
"""
return pulumi.get(self, "image_arn")
@image_arn.setter
def image_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_arn", value)
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the image used to create the image builder.
"""
return pulumi.get(self, "image_name")
@image_name.setter
def image_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Unique name for the image builder.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="vpcConfig")
def vpc_config(self) -> Optional[pulumi.Input['ImageBuilderVpcConfigArgs']]:
"""
Configuration block for the VPC configuration for the image builder. See below.
"""
return pulumi.get(self, "vpc_config")
@vpc_config.setter
def vpc_config(self, value: Optional[pulumi.Input['ImageBuilderVpcConfigArgs']]):
pulumi.set(self, "vpc_config", value)
@pulumi.input_type
class _ImageBuilderState:
def __init__(__self__, *,
access_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]]] = None,
appstream_agent_version: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
created_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
domain_join_info: Optional[pulumi.Input['ImageBuilderDomainJoinInfoArgs']] = None,
enable_default_internet_access: Optional[pulumi.Input[bool]] = None,
iam_role_arn: Optional[pulumi.Input[str]] = None,
image_arn: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_config: Optional[pulumi.Input['ImageBuilderVpcConfigArgs']] = None):
"""
Input properties used for looking up and filtering ImageBuilder resources.
:param pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]] access_endpoints: Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
:param pulumi.Input[str] appstream_agent_version: The version of the AppStream 2.0 agent to use for this image builder.
:param pulumi.Input[str] arn: ARN of the appstream image builder.
:param pulumi.Input[str] created_time: Date and time, in UTC and extended RFC 3339 format, when the image builder was created.
:param pulumi.Input[str] description: Description to display.
:param pulumi.Input[str] display_name: Human-readable friendly name for the AppStream image builder.
:param pulumi.Input['ImageBuilderDomainJoinInfoArgs'] domain_join_info: Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
:param pulumi.Input[bool] enable_default_internet_access: Enables or disables default internet access for the image builder.
:param pulumi.Input[str] iam_role_arn: ARN of the IAM role to apply to the image builder.
:param pulumi.Input[str] image_arn: ARN of the public, private, or shared image to use.
:param pulumi.Input[str] image_name: Name of the image used to create the image builder.
:param pulumi.Input[str] instance_type: The instance type to use when launching the image builder.
:param pulumi.Input[str] name: Unique name for the image builder.
:param pulumi.Input[str] state: State of the image builder. Can be: `PENDING`, `UPDATING_AGENT`, `RUNNING`, `STOPPING`, `STOPPED`, `REBOOTING`, `SNAPSHOTTING`, `DELETING`, `FAILED`, `UPDATING`, `PENDING_QUALIFICATION`
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
:param pulumi.Input['ImageBuilderVpcConfigArgs'] vpc_config: Configuration block for the VPC configuration for the image builder. See below.
"""
if access_endpoints is not None:
pulumi.set(__self__, "access_endpoints", access_endpoints)
if appstream_agent_version is not None:
pulumi.set(__self__, "appstream_agent_version", appstream_agent_version)
if arn is not None:
pulumi.set(__self__, "arn", arn)
if created_time is not None:
pulumi.set(__self__, "created_time", created_time)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if domain_join_info is not None:
pulumi.set(__self__, "domain_join_info", domain_join_info)
if enable_default_internet_access is not None:
pulumi.set(__self__, "enable_default_internet_access", enable_default_internet_access)
if iam_role_arn is not None:
pulumi.set(__self__, "iam_role_arn", iam_role_arn)
if image_arn is not None:
pulumi.set(__self__, "image_arn", image_arn)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if name is not None:
pulumi.set(__self__, "name", name)
if state is not None:
pulumi.set(__self__, "state", state)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if vpc_config is not None:
pulumi.set(__self__, "vpc_config", vpc_config)
@property
@pulumi.getter(name="accessEndpoints")
def access_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]]]:
"""
Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
"""
return pulumi.get(self, "access_endpoints")
@access_endpoints.setter
def access_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ImageBuilderAccessEndpointArgs']]]]):
pulumi.set(self, "access_endpoints", value)
@property
@pulumi.getter(name="appstreamAgentVersion")
def appstream_agent_version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the AppStream 2.0 agent to use for this image builder.
"""
return pulumi.get(self, "appstream_agent_version")
@appstream_agent_version.setter
def appstream_agent_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "appstream_agent_version", value)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the appstream image builder.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="createdTime")
def created_time(self) -> Optional[pulumi.Input[str]]:
"""
Date and time, in UTC and extended RFC 3339 format, when the image builder was created.
"""
return pulumi.get(self, "created_time")
@created_time.setter
def created_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description to display.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable friendly name for the AppStream image builder.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="domainJoinInfo")
def domain_join_info(self) -> Optional[pulumi.Input['ImageBuilderDomainJoinInfoArgs']]:
"""
Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
"""
return pulumi.get(self, "domain_join_info")
@domain_join_info.setter
def domain_join_info(self, value: Optional[pulumi.Input['ImageBuilderDomainJoinInfoArgs']]):
pulumi.set(self, "domain_join_info", value)
@property
@pulumi.getter(name="enableDefaultInternetAccess")
def enable_default_internet_access(self) -> Optional[pulumi.Input[bool]]:
"""
Enables or disables default internet access for the image builder.
"""
return pulumi.get(self, "enable_default_internet_access")
@enable_default_internet_access.setter
def enable_default_internet_access(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_default_internet_access", value)
@property
@pulumi.getter(name="iamRoleArn")
def iam_role_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the IAM role to apply to the image builder.
"""
return pulumi.get(self, "iam_role_arn")
@iam_role_arn.setter
def iam_role_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "iam_role_arn", value)
@property
@pulumi.getter(name="imageArn")
def image_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the public, private, or shared image to use.
"""
return pulumi.get(self, "image_arn")
@image_arn.setter
def image_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_arn", value)
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the image used to create the image builder.
"""
return pulumi.get(self, "image_name")
@image_name.setter
def image_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_name", value)
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[pulumi.Input[str]]:
"""
The instance type to use when launching the image builder.
"""
return pulumi.get(self, "instance_type")
@instance_type.setter
def instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_type", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Unique name for the image builder.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
State of the image builder. Can be: `PENDING`, `UPDATING_AGENT`, `RUNNING`, `STOPPING`, `STOPPED`, `REBOOTING`, `SNAPSHOTTING`, `DELETING`, `FAILED`, `UPDATING`, `PENDING_QUALIFICATION`
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter(name="vpcConfig")
def vpc_config(self) -> Optional[pulumi.Input['ImageBuilderVpcConfigArgs']]:
"""
Configuration block for the VPC configuration for the image builder. See below.
"""
return pulumi.get(self, "vpc_config")
@vpc_config.setter
def vpc_config(self, value: Optional[pulumi.Input['ImageBuilderVpcConfigArgs']]):
pulumi.set(self, "vpc_config", value)
class ImageBuilder(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageBuilderAccessEndpointArgs']]]]] = None,
appstream_agent_version: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
domain_join_info: Optional[pulumi.Input[pulumi.InputType['ImageBuilderDomainJoinInfoArgs']]] = None,
enable_default_internet_access: Optional[pulumi.Input[bool]] = None,
iam_role_arn: Optional[pulumi.Input[str]] = None,
image_arn: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_config: Optional[pulumi.Input[pulumi.InputType['ImageBuilderVpcConfigArgs']]] = None,
__props__=None):
"""
Provides an AppStream image builder.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
test_fleet = aws.appstream.ImageBuilder("testFleet",
description="Description of a ImageBuilder",
display_name="Display name of a ImageBuilder",
enable_default_internet_access=False,
image_name="AppStream-WinServer2012R2-07-19-2021",
instance_type="stream.standard.large",
vpc_config=aws.appstream.ImageBuilderVpcConfigArgs(
subnet_ids=[aws_subnet["example"]["id"]],
),
tags={
"Name": "Example Image Builder",
})
```
## Import
`aws_appstream_image_builder` can be imported using the `name`, e.g.,
```sh
$ pulumi import aws:appstream/imageBuilder:ImageBuilder example imageBuilderExample
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageBuilderAccessEndpointArgs']]]] access_endpoints: Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
:param pulumi.Input[str] appstream_agent_version: The version of the AppStream 2.0 agent to use for this image builder.
:param pulumi.Input[str] description: Description to display.
:param pulumi.Input[str] display_name: Human-readable friendly name for the AppStream image builder.
:param pulumi.Input[pulumi.InputType['ImageBuilderDomainJoinInfoArgs']] domain_join_info: Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
:param pulumi.Input[bool] enable_default_internet_access: Enables or disables default internet access for the image builder.
:param pulumi.Input[str] iam_role_arn: ARN of the IAM role to apply to the image builder.
:param pulumi.Input[str] image_arn: ARN of the public, private, or shared image to use.
:param pulumi.Input[str] image_name: Name of the image used to create the image builder.
:param pulumi.Input[str] instance_type: The instance type to use when launching the image builder.
:param pulumi.Input[str] name: Unique name for the image builder.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[pulumi.InputType['ImageBuilderVpcConfigArgs']] vpc_config: Configuration block for the VPC configuration for the image builder. See below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ImageBuilderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an AppStream image builder.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
test_fleet = aws.appstream.ImageBuilder("testFleet",
description="Description of a ImageBuilder",
display_name="Display name of a ImageBuilder",
enable_default_internet_access=False,
image_name="AppStream-WinServer2012R2-07-19-2021",
instance_type="stream.standard.large",
vpc_config=aws.appstream.ImageBuilderVpcConfigArgs(
subnet_ids=[aws_subnet["example"]["id"]],
),
tags={
"Name": "Example Image Builder",
})
```
## Import
`aws_appstream_image_builder` can be imported using the `name`, e.g.,
```sh
$ pulumi import aws:appstream/imageBuilder:ImageBuilder example imageBuilderExample
```
:param str resource_name: The name of the resource.
:param ImageBuilderArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ImageBuilderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageBuilderAccessEndpointArgs']]]]] = None,
appstream_agent_version: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
domain_join_info: Optional[pulumi.Input[pulumi.InputType['ImageBuilderDomainJoinInfoArgs']]] = None,
enable_default_internet_access: Optional[pulumi.Input[bool]] = None,
iam_role_arn: Optional[pulumi.Input[str]] = None,
image_arn: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_config: Optional[pulumi.Input[pulumi.InputType['ImageBuilderVpcConfigArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ImageBuilderArgs.__new__(ImageBuilderArgs)
__props__.__dict__["access_endpoints"] = access_endpoints
__props__.__dict__["appstream_agent_version"] = appstream_agent_version
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["domain_join_info"] = domain_join_info
__props__.__dict__["enable_default_internet_access"] = enable_default_internet_access
__props__.__dict__["iam_role_arn"] = iam_role_arn
__props__.__dict__["image_arn"] = image_arn
__props__.__dict__["image_name"] = image_name
if instance_type is None and not opts.urn:
raise TypeError("Missing required property 'instance_type'")
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["name"] = name
__props__.__dict__["tags"] = tags
__props__.__dict__["vpc_config"] = vpc_config
__props__.__dict__["arn"] = None
__props__.__dict__["created_time"] = None
__props__.__dict__["state"] = None
__props__.__dict__["tags_all"] = None
super(ImageBuilder, __self__).__init__(
'aws:appstream/imageBuilder:ImageBuilder',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageBuilderAccessEndpointArgs']]]]] = None,
appstream_agent_version: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
created_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
domain_join_info: Optional[pulumi.Input[pulumi.InputType['ImageBuilderDomainJoinInfoArgs']]] = None,
enable_default_internet_access: Optional[pulumi.Input[bool]] = None,
iam_role_arn: Optional[pulumi.Input[str]] = None,
image_arn: Optional[pulumi.Input[str]] = None,
image_name: Optional[pulumi.Input[str]] = None,
instance_type: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
vpc_config: Optional[pulumi.Input[pulumi.InputType['ImageBuilderVpcConfigArgs']]] = None) -> 'ImageBuilder':
"""
Get an existing ImageBuilder resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageBuilderAccessEndpointArgs']]]] access_endpoints: Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
:param pulumi.Input[str] appstream_agent_version: The version of the AppStream 2.0 agent to use for this image builder.
:param pulumi.Input[str] arn: ARN of the appstream image builder.
:param pulumi.Input[str] created_time: Date and time, in UTC and extended RFC 3339 format, when the image builder was created.
:param pulumi.Input[str] description: Description to display.
:param pulumi.Input[str] display_name: Human-readable friendly name for the AppStream image builder.
:param pulumi.Input[pulumi.InputType['ImageBuilderDomainJoinInfoArgs']] domain_join_info: Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
:param pulumi.Input[bool] enable_default_internet_access: Enables or disables default internet access for the image builder.
:param pulumi.Input[str] iam_role_arn: ARN of the IAM role to apply to the image builder.
:param pulumi.Input[str] image_arn: ARN of the public, private, or shared image to use.
:param pulumi.Input[str] image_name: Name of the image used to create the image builder.
:param pulumi.Input[str] instance_type: The instance type to use when launching the image builder.
:param pulumi.Input[str] name: Unique name for the image builder.
:param pulumi.Input[str] state: State of the image builder. Can be: `PENDING`, `UPDATING_AGENT`, `RUNNING`, `STOPPING`, `STOPPED`, `REBOOTING`, `SNAPSHOTTING`, `DELETING`, `FAILED`, `UPDATING`, `PENDING_QUALIFICATION`
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
:param pulumi.Input[pulumi.InputType['ImageBuilderVpcConfigArgs']] vpc_config: Configuration block for the VPC configuration for the image builder. See below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ImageBuilderState.__new__(_ImageBuilderState)
__props__.__dict__["access_endpoints"] = access_endpoints
__props__.__dict__["appstream_agent_version"] = appstream_agent_version
__props__.__dict__["arn"] = arn
__props__.__dict__["created_time"] = created_time
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["domain_join_info"] = domain_join_info
__props__.__dict__["enable_default_internet_access"] = enable_default_internet_access
__props__.__dict__["iam_role_arn"] = iam_role_arn
__props__.__dict__["image_arn"] = image_arn
__props__.__dict__["image_name"] = image_name
__props__.__dict__["instance_type"] = instance_type
__props__.__dict__["name"] = name
__props__.__dict__["state"] = state
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["vpc_config"] = vpc_config
return ImageBuilder(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessEndpoints")
def access_endpoints(self) -> pulumi.Output[Optional[Sequence['outputs.ImageBuilderAccessEndpoint']]]:
"""
Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below.
"""
return pulumi.get(self, "access_endpoints")
@property
@pulumi.getter(name="appstreamAgentVersion")
def appstream_agent_version(self) -> pulumi.Output[str]:
"""
The version of the AppStream 2.0 agent to use for this image builder.
"""
return pulumi.get(self, "appstream_agent_version")
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
ARN of the appstream image builder.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="createdTime")
def created_time(self) -> pulumi.Output[str]:
"""
Date and time, in UTC and extended RFC 3339 format, when the image builder was created.
"""
return pulumi.get(self, "created_time")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
Description to display.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
Human-readable friendly name for the AppStream image builder.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="domainJoinInfo")
def domain_join_info(self) -> pulumi.Output['outputs.ImageBuilderDomainJoinInfo']:
"""
Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below.
"""
return pulumi.get(self, "domain_join_info")
@property
@pulumi.getter(name="enableDefaultInternetAccess")
def enable_default_internet_access(self) -> pulumi.Output[bool]:
"""
Enables or disables default internet access for the image builder.
"""
return pulumi.get(self, "enable_default_internet_access")
@property
@pulumi.getter(name="iamRoleArn")
def iam_role_arn(self) -> pulumi.Output[str]:
"""
ARN of the IAM role to apply to the image builder.
"""
return pulumi.get(self, "iam_role_arn")
@property
@pulumi.getter(name="imageArn")
def image_arn(self) -> pulumi.Output[str]:
"""
ARN of the public, private, or shared image to use.
"""
return pulumi.get(self, "image_arn")
@property
@pulumi.getter(name="imageName")
def image_name(self) -> pulumi.Output[str]:
"""
Name of the image used to create the image builder.
"""
return pulumi.get(self, "image_name")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> pulumi.Output[str]:
"""
The instance type to use when launching the image builder.
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Unique name for the image builder.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
State of the image builder. Can be: `PENDING`, `UPDATING_AGENT`, `RUNNING`, `STOPPING`, `STOPPED`, `REBOOTING`, `SNAPSHOTTING`, `DELETING`, `FAILED`, `UPDATING`, `PENDING_QUALIFICATION`
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the instance. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block.
"""
return pulumi.get(self, "tags_all")
@property
@pulumi.getter(name="vpcConfig")
def vpc_config(self) -> pulumi.Output['outputs.ImageBuilderVpcConfig']:
"""
Configuration block for the VPC configuration for the image builder. See below.
"""
return pulumi.get(self, "vpc_config")
| 47.835787
| 260
| 0.664528
| 5,018
| 42,239
| 5.377441
| 0.050219
| 0.093352
| 0.074192
| 0.066039
| 0.919026
| 0.907797
| 0.895975
| 0.890417
| 0.883079
| 0.859472
| 0
| 0.001972
| 0.231563
| 42,239
| 882
| 261
| 47.890023
| 0.82938
| 0.336182
| 0
| 0.80396
| 1
| 0
| 0.126509
| 0.058685
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166337
| false
| 0.00198
| 0.013861
| 0
| 0.281188
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
075fcbc2f8128ddad85d7cbc134e8f326ab70b05
| 3,842
|
py
|
Python
|
tests/unittests/test_tokenizer.py
|
mj-kh/speechbrain
|
9351f61cc057ddf3f8a0b7074a9c3c857dec84ed
|
[
"Apache-2.0"
] | 3,913
|
2021-03-14T13:54:52.000Z
|
2022-03-30T05:09:55.000Z
|
tests/unittests/test_tokenizer.py
|
mj-kh/speechbrain
|
9351f61cc057ddf3f8a0b7074a9c3c857dec84ed
|
[
"Apache-2.0"
] | 667
|
2021-03-14T20:11:17.000Z
|
2022-03-31T04:07:17.000Z
|
tests/unittests/test_tokenizer.py
|
mj-kh/speechbrain
|
9351f61cc057ddf3f8a0b7074a9c3c857dec84ed
|
[
"Apache-2.0"
] | 785
|
2021-03-14T13:20:57.000Z
|
2022-03-31T03:26:03.000Z
|
import os
import torch
def test_tokenizer():
from speechbrain.tokenizers.SentencePiece import SentencePiece
gt = [
["HELLO", "MORNING", "MORNING", "HELLO"],
["HELLO", "MORNING", "HELLO"],
]
# Word-level input test
dict_int2lab = {1: "HELLO", 2: "MORNING"}
spm = SentencePiece(
os.path.abspath("tokenizer_data/"),
100,
annotation_train=os.path.abspath(
"tests/unittests/tokenizer_data/dev-clean.csv"
),
annotation_read="wrd",
model_type="bpe",
)
encoded_seq_ids, encoded_seq_pieces = spm(
torch.Tensor([[1, 2, 2, 1], [1, 2, 1, 0]]),
torch.Tensor([1.0, 0.75]),
dict_int2lab,
task="encode",
)
lens = (encoded_seq_pieces * encoded_seq_ids.shape[1]).round().int()
# decode from torch tensors (batch, batch_lens)
words_seq = spm(encoded_seq_ids, encoded_seq_pieces, task="decode")
assert words_seq == gt, "output not the same"
# decode from a list of bpe sequence (without padding)
hyps_list = [
encoded_seq_ids[0].int().tolist(),
encoded_seq_ids[1][: lens[1]].int().tolist(),
]
words_seq = spm(hyps_list, task="decode_from_list")
assert words_seq == gt, "output not the same"
# Char-level input test
dict_int2lab = {
1: "H",
2: "E",
3: "L",
4: "O",
5: "M",
6: "R",
7: "N",
8: "I",
9: "G",
10: "_",
}
spm = SentencePiece(
os.path.abspath("tokenizer_data/"),
100,
annotation_train=os.path.abspath(
"tests/unittests/tokenizer_data/dev-clean.csv"
),
annotation_read="char",
char_format_input=True,
model_type="bpe",
)
encoded_seq_ids, encoded_seq_pieces = spm(
torch.Tensor(
[
[
1,
2,
3,
3,
4,
10,
5,
4,
6,
7,
8,
7,
9,
10,
5,
4,
6,
7,
8,
7,
9,
10,
1,
2,
3,
3,
4,
],
[
1,
2,
3,
3,
4,
10,
5,
4,
6,
7,
8,
7,
9,
10,
1,
2,
3,
3,
4,
0,
0,
0,
0,
0,
0,
0,
0,
],
]
),
torch.Tensor([1.0, 0.7037037037037037]),
dict_int2lab,
task="encode",
)
lens = (encoded_seq_pieces * encoded_seq_ids.shape[1]).round().int()
# decode from torch tensors (batch, batch_lens)
words_seq = spm(encoded_seq_ids, encoded_seq_pieces, task="decode")
assert words_seq == gt, "output not the same"
# decode from a list of bpe sequence (without padding)
hyps_list = [
encoded_seq_ids[0].int().tolist(),
encoded_seq_ids[1][: lens[1]].int().tolist(),
]
words_seq = spm(hyps_list, task="decode_from_list")
assert words_seq == gt, "output not the same"
| 26.496552
| 72
| 0.391983
| 370
| 3,842
| 3.886486
| 0.237838
| 0.111266
| 0.090403
| 0.013908
| 0.844228
| 0.844228
| 0.7879
| 0.782337
| 0.782337
| 0.782337
| 0
| 0.063377
| 0.498959
| 3,842
| 144
| 73
| 26.680556
| 0.683636
| 0.062728
| 0
| 0.709924
| 0
| 0
| 0.090707
| 0.024485
| 0
| 0
| 0
| 0
| 0.030534
| 1
| 0.007634
| false
| 0
| 0.022901
| 0
| 0.030534
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ae888ee135fab0e05dd441d010f2b15c93d4f0a
| 1,349
|
py
|
Python
|
tests/lib/test_fixp.py
|
bogdanvuk/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 120
|
2018-04-23T08:29:04.000Z
|
2022-03-30T14:41:52.000Z
|
tests/lib/test_fixp.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-07-09T17:12:58.000Z
|
2022-03-18T09:05:10.000Z
|
tests/lib/test_fixp.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-05-10T19:42:08.000Z
|
2022-03-28T18:26:44.000Z
|
from pygears import gear, Intf
from pygears.typing import Fixp, Int
from pygears.sim import cosim
from pygears.sim import call
def test_le():
@gear
def test(a: Fixp, b: Int):
return a <= b
test(Intf(Fixp[1, 16]), Intf(Int[16]))
assert call(test, Fixp[1, 16](0), Int[16](0))[0] == 1
assert call(test, Fixp[1, 16](-0.01), Int[16](0))[0] == 1
assert call(test, Fixp[1, 16](0.01), Int[16](0))[0] == 0
def test_lt():
@gear
def test(a: Fixp, b: Int):
return a < b
test(Intf(Fixp[1, 16]), Intf(Int[16]))
assert call(test, Fixp[1, 16](0), Int[16](0))[0] == 0
assert call(test, Fixp[1, 16](-0.01), Int[16](0))[0] == 1
assert call(test, Fixp[1, 16](0.01), Int[16](0))[0] == 0
def test_ge():
@gear
def test(a: Fixp, b: Int):
return a >= b
test(Intf(Fixp[1, 16]), Intf(Int[16]))
assert call(test, Fixp[1, 16](0), Int[16](0))[0] == 1
assert call(test, Fixp[1, 16](-0.01), Int[16](0))[0] == 0
assert call(test, Fixp[1, 16](0.01), Int[16](0))[0] == 1
def test_gt():
@gear
def test(a: Fixp, b: Int):
return a > b
test(Intf(Fixp[1, 16]), Intf(Int[16]))
assert call(test, Fixp[1, 16](0), Int[16](0))[0] == 0
assert call(test, Fixp[1, 16](-0.01), Int[16](0))[0] == 0
assert call(test, Fixp[1, 16](0.01), Int[16](0))[0] == 1
| 25.45283
| 61
| 0.539659
| 257
| 1,349
| 2.817121
| 0.097276
| 0.099448
| 0.154696
| 0.298343
| 0.825967
| 0.825967
| 0.825967
| 0.825967
| 0.825967
| 0.825967
| 0
| 0.138063
| 0.226835
| 1,349
| 52
| 62
| 25.942308
| 0.556088
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.222222
| false
| 0
| 0.111111
| 0.111111
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
4af8de6e470b9bbfb723a5d86b9140014850a5bc
| 1,145
|
py
|
Python
|
Leetcode/Python/_1122.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | 1
|
2021-11-28T15:03:32.000Z
|
2021-11-28T15:03:32.000Z
|
Leetcode/Python/_1122.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
Leetcode/Python/_1122.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
class Solution:
def relativeSortArray(self, arr1: List[int], arr2: List[int]) -> List[int]:
hashMap = collections.Counter(arr1)
array = []
for num in arr2:
array += [num] * hashMap.pop(num)
return array + sorted(hashMap.elements())
class Solution:
def relativeSortArray(self, arr1: List[int], arr2: List[int]) -> List[int]:
hashMap = collections.Counter(arr1)
array = []
for num in arr2:
array += [num] * hashMap.pop(num)
for num in sorted(hashMap.keys()):
array += [num] * hashMap[num]
return array
class Solution:
def relativeSortArray(self, arr1: List[int], arr2: List[int]) -> List[int]:
hashMap = {}
for num in arr1:
if num not in hashMap:
hashMap[num] = 1
else:
hashMap[num] += 1
array = []
for num in arr2:
array += [num] * hashMap.pop(num)
for num in sorted(hashMap.keys()):
array += [num] * hashMap[num]
return array
| 30.131579
| 79
| 0.499563
| 123
| 1,145
| 4.650407
| 0.203252
| 0.11014
| 0.083916
| 0.173077
| 0.847902
| 0.847902
| 0.847902
| 0.847902
| 0.847902
| 0.847902
| 0
| 0.019746
| 0.380786
| 1,145
| 37
| 80
| 30.945946
| 0.787024
| 0
| 0
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab5150f51734a90985f4d011aeb89f7d575dd887
| 310,230
|
py
|
Python
|
venv/Lib/site-packages/wcwidth/table_zero.py
|
gilbertekalea/booking.com_crawler
|
71e52c87cd72a77f80a3e5fc0af0e1a68a5712ae
|
[
"MIT"
] | 6,989
|
2017-07-18T06:23:18.000Z
|
2022-03-31T15:58:36.000Z
|
venv/Lib/site-packages/wcwidth/table_zero.py
|
gilbertekalea/booking.com_crawler
|
71e52c87cd72a77f80a3e5fc0af0e1a68a5712ae
|
[
"MIT"
] | 1,978
|
2017-07-18T09:17:58.000Z
|
2022-03-31T14:28:43.000Z
|
venv/Lib/site-packages/wcwidth/table_zero.py
|
gilbertekalea/booking.com_crawler
|
71e52c87cd72a77f80a3e5fc0af0e1a68a5712ae
|
[
"MIT"
] | 1,228
|
2017-07-18T09:03:13.000Z
|
2022-03-29T05:57:40.000Z
|
"""Zero_Width table, created by bin/update-tables.py."""
# Generated: 2020-06-23T16:03:21.187024
ZERO_WIDTH = {
'4.1.0': (
# Source: DerivedGeneralCategory-4.1.0.txt
# Date: 2005-02-26, 02:35:50 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00486,), # Combining Cyrillic Titlo..Combining Cyrillic Psili
(0x00488, 0x00489,), # Combining Cyrillic Hundr..Combining Cyrillic Milli
(0x00591, 0x005b9,), # Hebrew Accent Etnahta ..Hebrew Point Holam
(0x005bb, 0x005bd,), # Hebrew Point Qubuts ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x00615,), # Arabic Sign Sallallahou ..Arabic Small High Tah
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x00901, 0x00902,), # Devanagari Sign Candrabi..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00954,), # Devanagari Stress Sign U..Devanagari Acute Accent
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b43,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00d41, 0x00d43,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01032,), # Myanmar Vowel Sign Ai ..Myanmar Vowel Sign Ai
(0x01036, 0x01037,), # Myanmar Sign Anusvara ..Myanmar Sign Dot Below
(0x01039, 0x01039,), # Myanmar Sign Virama ..Myanmar Sign Virama
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0135f, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01dc0, 0x01dc3,), # Combining Dotted Grave A..Combining Suspension Mar
(0x020d0, 0x020eb,), # Combining Left Harpoon A..Combining Long Double So
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe23,), # Combining Ligature Left ..Combining Double Tilde R
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'5.0.0': (
# Source: DerivedGeneralCategory-5.0.0.txt
# Date: 2006-02-27, 23:41:27 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00486,), # Combining Cyrillic Titlo..Combining Cyrillic Psili
(0x00488, 0x00489,), # Combining Cyrillic Hundr..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x00615,), # Arabic Sign Sallallahou ..Arabic Small High Tah
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00901, 0x00902,), # Devanagari Sign Candrabi..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00954,), # Devanagari Stress Sign U..Devanagari Acute Accent
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b43,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d43,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01032,), # Myanmar Vowel Sign Ai ..Myanmar Vowel Sign Ai
(0x01036, 0x01037,), # Myanmar Sign Anusvara ..Myanmar Sign Dot Below
(0x01039, 0x01039,), # Myanmar Sign Virama ..Myanmar Sign Virama
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0135f, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01dc0, 0x01dca,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfe, 0x01dff,), # Combining Left Arrowhead..Combining Right Arrowhea
(0x020d0, 0x020ef,), # Combining Left Harpoon A..Combining Right Arrow Be
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe23,), # Combining Ligature Left ..Combining Double Tilde R
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'5.1.0': (
# Source: DerivedGeneralCategory-5.1.0.txt
# Date: 2008-03-20, 17:54:57 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00901, 0x00902,), # Devanagari Sign Candrabi..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00954,), # Devanagari Stress Sign U..Devanagari Acute Accent
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0135f, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfe, 0x01dff,), # Combining Left Arrowhead..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a67c, 0x0a67d,), # Combining Cyrillic Kavyk..Combining Cyrillic Payer
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'5.2.0': (
# Source: DerivedGeneralCategory-5.2.0.txt
# Date: 2009-08-22, 04:58:21 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065e,), # Arabic Fathatan ..Arabic Fatha With Two Do
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006de, 0x006e4,), # Arabic Start Of Rub El H..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00955,), # Devanagari Stress Sign U..Devanagari Vowel Sign Ca
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f90, 0x00f97,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135f, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfd, 0x01dff,), # Combining Almost Equal T..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a67c, 0x0a67d,), # Combining Cyrillic Kavyk..Combining Cyrillic Payer
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.0.0': (
# Source: DerivedGeneralCategory-6.0.0.txt
# Date: 2010-08-19, 00:48:09 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302f,), # Ideographic Level Tone M..Hangul Double Dot Tone M
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a67c, 0x0a67d,), # Combining Cyrillic Kavyk..Combining Cyrillic Payer
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.1.0': (
# Source: DerivedGeneralCategory-6.1.0.txt
# Date: 2011-11-27, 05:10:22 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x008fe,), # Arabic Curly Fatha ..Arabic Damma With Dot
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bab,), # Sundanese Sign Virama ..Sundanese Sign Virama
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.2.0': (
# Source: DerivedGeneralCategory-6.2.0.txt
# Date: 2012-05-20, 00:42:34 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x008fe,), # Arabic Curly Fatha ..Arabic Damma With Dot
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bab,), # Sundanese Sign Virama ..Sundanese Sign Virama
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'6.3.0': (
# Source: DerivedGeneralCategory-6.3.0.txt
# Date: 2013-07-05, 14:08:45 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x008fe,), # Arabic Curly Fatha ..Arabic Damma With Dot
(0x00900, 0x00902,), # Devanagari Sign Inverted..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bab,), # Sundanese Sign Virama ..Sundanese Sign Virama
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01dc0, 0x01de6,), # Combining Dotted Grave A..Combining Latin Small Le
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe26,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x11080, 0x11081,), # Kaithi Sign Candrabindu ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'7.0.0': (
# Source: DerivedGeneralCategory-7.0.0.txt
# Date: 2014-02-07, 18:42:12 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e4, 0x00902,), # Arabic Curly Fatha ..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d01, 0x00d01,), # Malayalam Sign Candrabin..Malayalam Sign Candrabin
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df5,), # Combining Dotted Grave A..Combining Up Tack Above
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69f, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2d,), # Combining Ligature Left ..Combining Conjoining Mac
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11301, 0x11301,), # Grantha Sign Candrabindu..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'8.0.0': (
# Source: DerivedGeneralCategory-8.0.0.txt
# Date: 2015-02-13, 13:47:11 GMT [MD]
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d01, 0x00d01,), # Malayalam Sign Candrabin..Malayalam Sign Candrabin
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df5,), # Combining Dotted Grave A..Combining Up Tack Above
(0x01dfc, 0x01dff,), # Combining Double Inverte..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c4,), # Saurashtra Sign Virama ..Saurashtra Sign Virama
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111ca, 0x111cc,), # Sharada Sign Nukta ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'9.0.0': (
# Source: DerivedGeneralCategory-9.0.0.txt
# Date: 2016-06-01, 10:34:26 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d4, 0x008e1,), # Arabic Small High Word A..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d01, 0x00d01,), # Malayalam Sign Candrabin..Malayalam Sign Candrabin
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df5,), # Combining Dotted Grave A..Combining Up Tack Above
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111ca, 0x111cc,), # Sharada Sign Nukta ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun ..Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta ..Newa Sign Nukta
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama ..Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'10.0.0': (
# Source: DerivedGeneralCategory-10.0.0.txt
# Date: 2017-03-08, 08:41:49 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d4, 0x008e1,), # Arabic Small High Word A..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111ca, 0x111cc,), # Sharada Sign Nukta ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun ..Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133c, 0x1133c,), # Grantha Sign Nukta ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta ..Newa Sign Nukta
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x11a01, 0x11a06,), # Zanabazar Square Vowel S..Zanabazar Square Vowel S
(0x11a09, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoin..Zanabazar Square Subjoin
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama ..Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara ..Masaram Gondi Ra-kara
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'11.0.0': (
# Source: DerivedGeneralCategory-11.0.0.txt
# Date: 2018-02-21, 05:34:04 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan ..Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark ..Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c04, 0x00c04,), # Telugu Sign Combining An..Telugu Sign Combining An
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00eb9,), # Lao Vowel Sign I ..Lao Vowel Sign Uu
(0x00ebb, 0x00ebc,), # Lao Vowel Sign Mai Kon ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay..Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bc,), # Javanese Vowel Sign Pepe..Javanese Vowel Sign Pepe
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun ..Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta ..Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark ..Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoin..Zanabazar Square Subjoin
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama ..Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara ..Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusv..Gunjala Gondi Sign Anusv
(0x11d97, 0x11d97,), # Gunjala Gondi Virama ..Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'12.0.0': (
# Source: DerivedGeneralCategory-12.0.0.txt
# Date: 2019-01-22, 08:18:28 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan ..Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark ..Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c04, 0x00c04,), # Telugu Sign Combining An..Telugu Sign Combining An
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay..Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun ..Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta ..Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark ..Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama ..Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoin..Zanabazar Square Subjoin
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama ..Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara ..Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusv..Gunjala Gondi Sign Anusv
(0x11d97, 0x11d97,), # Gunjala Gondi Virama ..Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modi..Miao Sign Consonant Modi
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'12.1.0': (
# Source: DerivedGeneralCategory-12.1.0.txt
# Date: 2019-03-10, 10:53:08 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan ..Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark ..Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b56, 0x00b56,), # Oriya Ai Length Mark ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c04, 0x00c04,), # Telugu Sign Combining An..Telugu Sign Combining An
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01abe,), # Combining Doubled Circum..Combining Parentheses Ov
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay..Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun ..Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta ..Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark ..Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama ..Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoin..Zanabazar Square Subjoin
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama ..Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara ..Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusv..Gunjala Gondi Sign Anusv
(0x11d97, 0x11d97,), # Gunjala Gondi Virama ..Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modi..Miao Sign Consonant Modi
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
'13.0.0': (
# Source: DerivedGeneralCategory-13.0.0.txt
# Date: 2019-10-21, 14:30:32 GMT
#
(0x00300, 0x0036f,), # Combining Grave Accent ..Combining Latin Small Le
(0x00483, 0x00489,), # Combining Cyrillic Titlo..Combining Cyrillic Milli
(0x00591, 0x005bd,), # Hebrew Accent Etnahta ..Hebrew Point Meteg
(0x005bf, 0x005bf,), # Hebrew Point Rafe ..Hebrew Point Rafe
(0x005c1, 0x005c2,), # Hebrew Point Shin Dot ..Hebrew Point Sin Dot
(0x005c4, 0x005c5,), # Hebrew Mark Upper Dot ..Hebrew Mark Lower Dot
(0x005c7, 0x005c7,), # Hebrew Point Qamats Qata..Hebrew Point Qamats Qata
(0x00610, 0x0061a,), # Arabic Sign Sallallahou ..Arabic Small Kasra
(0x0064b, 0x0065f,), # Arabic Fathatan ..Arabic Wavy Hamza Below
(0x00670, 0x00670,), # Arabic Letter Superscrip..Arabic Letter Superscrip
(0x006d6, 0x006dc,), # Arabic Small High Ligatu..Arabic Small High Seen
(0x006df, 0x006e4,), # Arabic Small High Rounde..Arabic Small High Madda
(0x006e7, 0x006e8,), # Arabic Small High Yeh ..Arabic Small High Noon
(0x006ea, 0x006ed,), # Arabic Empty Centre Low ..Arabic Small Low Meem
(0x00711, 0x00711,), # Syriac Letter Superscrip..Syriac Letter Superscrip
(0x00730, 0x0074a,), # Syriac Pthaha Above ..Syriac Barrekh
(0x007a6, 0x007b0,), # Thaana Abafili ..Thaana Sukun
(0x007eb, 0x007f3,), # Nko Combining Short High..Nko Combining Double Dot
(0x007fd, 0x007fd,), # Nko Dantayalan ..Nko Dantayalan
(0x00816, 0x00819,), # Samaritan Mark In ..Samaritan Mark Dagesh
(0x0081b, 0x00823,), # Samaritan Mark Epentheti..Samaritan Vowel Sign A
(0x00825, 0x00827,), # Samaritan Vowel Sign Sho..Samaritan Vowel Sign U
(0x00829, 0x0082d,), # Samaritan Vowel Sign Lon..Samaritan Mark Nequdaa
(0x00859, 0x0085b,), # Mandaic Affrication Mark..Mandaic Gemination Mark
(0x008d3, 0x008e1,), # Arabic Small Low Waw ..Arabic Small High Sign S
(0x008e3, 0x00902,), # Arabic Turned Damma Belo..Devanagari Sign Anusvara
(0x0093a, 0x0093a,), # Devanagari Vowel Sign Oe..Devanagari Vowel Sign Oe
(0x0093c, 0x0093c,), # Devanagari Sign Nukta ..Devanagari Sign Nukta
(0x00941, 0x00948,), # Devanagari Vowel Sign U ..Devanagari Vowel Sign Ai
(0x0094d, 0x0094d,), # Devanagari Sign Virama ..Devanagari Sign Virama
(0x00951, 0x00957,), # Devanagari Stress Sign U..Devanagari Vowel Sign Uu
(0x00962, 0x00963,), # Devanagari Vowel Sign Vo..Devanagari Vowel Sign Vo
(0x00981, 0x00981,), # Bengali Sign Candrabindu..Bengali Sign Candrabindu
(0x009bc, 0x009bc,), # Bengali Sign Nukta ..Bengali Sign Nukta
(0x009c1, 0x009c4,), # Bengali Vowel Sign U ..Bengali Vowel Sign Vocal
(0x009cd, 0x009cd,), # Bengali Sign Virama ..Bengali Sign Virama
(0x009e2, 0x009e3,), # Bengali Vowel Sign Vocal..Bengali Vowel Sign Vocal
(0x009fe, 0x009fe,), # Bengali Sandhi Mark ..Bengali Sandhi Mark
(0x00a01, 0x00a02,), # Gurmukhi Sign Adak Bindi..Gurmukhi Sign Bindi
(0x00a3c, 0x00a3c,), # Gurmukhi Sign Nukta ..Gurmukhi Sign Nukta
(0x00a41, 0x00a42,), # Gurmukhi Vowel Sign U ..Gurmukhi Vowel Sign Uu
(0x00a47, 0x00a48,), # Gurmukhi Vowel Sign Ee ..Gurmukhi Vowel Sign Ai
(0x00a4b, 0x00a4d,), # Gurmukhi Vowel Sign Oo ..Gurmukhi Sign Virama
(0x00a51, 0x00a51,), # Gurmukhi Sign Udaat ..Gurmukhi Sign Udaat
(0x00a70, 0x00a71,), # Gurmukhi Tippi ..Gurmukhi Addak
(0x00a75, 0x00a75,), # Gurmukhi Sign Yakash ..Gurmukhi Sign Yakash
(0x00a81, 0x00a82,), # Gujarati Sign Candrabind..Gujarati Sign Anusvara
(0x00abc, 0x00abc,), # Gujarati Sign Nukta ..Gujarati Sign Nukta
(0x00ac1, 0x00ac5,), # Gujarati Vowel Sign U ..Gujarati Vowel Sign Cand
(0x00ac7, 0x00ac8,), # Gujarati Vowel Sign E ..Gujarati Vowel Sign Ai
(0x00acd, 0x00acd,), # Gujarati Sign Virama ..Gujarati Sign Virama
(0x00ae2, 0x00ae3,), # Gujarati Vowel Sign Voca..Gujarati Vowel Sign Voca
(0x00afa, 0x00aff,), # Gujarati Sign Sukun ..Gujarati Sign Two-circle
(0x00b01, 0x00b01,), # Oriya Sign Candrabindu ..Oriya Sign Candrabindu
(0x00b3c, 0x00b3c,), # Oriya Sign Nukta ..Oriya Sign Nukta
(0x00b3f, 0x00b3f,), # Oriya Vowel Sign I ..Oriya Vowel Sign I
(0x00b41, 0x00b44,), # Oriya Vowel Sign U ..Oriya Vowel Sign Vocalic
(0x00b4d, 0x00b4d,), # Oriya Sign Virama ..Oriya Sign Virama
(0x00b55, 0x00b56,), # (nil) ..Oriya Ai Length Mark
(0x00b62, 0x00b63,), # Oriya Vowel Sign Vocalic..Oriya Vowel Sign Vocalic
(0x00b82, 0x00b82,), # Tamil Sign Anusvara ..Tamil Sign Anusvara
(0x00bc0, 0x00bc0,), # Tamil Vowel Sign Ii ..Tamil Vowel Sign Ii
(0x00bcd, 0x00bcd,), # Tamil Sign Virama ..Tamil Sign Virama
(0x00c00, 0x00c00,), # Telugu Sign Combining Ca..Telugu Sign Combining Ca
(0x00c04, 0x00c04,), # Telugu Sign Combining An..Telugu Sign Combining An
(0x00c3e, 0x00c40,), # Telugu Vowel Sign Aa ..Telugu Vowel Sign Ii
(0x00c46, 0x00c48,), # Telugu Vowel Sign E ..Telugu Vowel Sign Ai
(0x00c4a, 0x00c4d,), # Telugu Vowel Sign O ..Telugu Sign Virama
(0x00c55, 0x00c56,), # Telugu Length Mark ..Telugu Ai Length Mark
(0x00c62, 0x00c63,), # Telugu Vowel Sign Vocali..Telugu Vowel Sign Vocali
(0x00c81, 0x00c81,), # Kannada Sign Candrabindu..Kannada Sign Candrabindu
(0x00cbc, 0x00cbc,), # Kannada Sign Nukta ..Kannada Sign Nukta
(0x00cbf, 0x00cbf,), # Kannada Vowel Sign I ..Kannada Vowel Sign I
(0x00cc6, 0x00cc6,), # Kannada Vowel Sign E ..Kannada Vowel Sign E
(0x00ccc, 0x00ccd,), # Kannada Vowel Sign Au ..Kannada Sign Virama
(0x00ce2, 0x00ce3,), # Kannada Vowel Sign Vocal..Kannada Vowel Sign Vocal
(0x00d00, 0x00d01,), # Malayalam Sign Combining..Malayalam Sign Candrabin
(0x00d3b, 0x00d3c,), # Malayalam Sign Vertical ..Malayalam Sign Circular
(0x00d41, 0x00d44,), # Malayalam Vowel Sign U ..Malayalam Vowel Sign Voc
(0x00d4d, 0x00d4d,), # Malayalam Sign Virama ..Malayalam Sign Virama
(0x00d62, 0x00d63,), # Malayalam Vowel Sign Voc..Malayalam Vowel Sign Voc
(0x00d81, 0x00d81,), # (nil) ..(nil)
(0x00dca, 0x00dca,), # Sinhala Sign Al-lakuna ..Sinhala Sign Al-lakuna
(0x00dd2, 0x00dd4,), # Sinhala Vowel Sign Ketti..Sinhala Vowel Sign Ketti
(0x00dd6, 0x00dd6,), # Sinhala Vowel Sign Diga ..Sinhala Vowel Sign Diga
(0x00e31, 0x00e31,), # Thai Character Mai Han-a..Thai Character Mai Han-a
(0x00e34, 0x00e3a,), # Thai Character Sara I ..Thai Character Phinthu
(0x00e47, 0x00e4e,), # Thai Character Maitaikhu..Thai Character Yamakkan
(0x00eb1, 0x00eb1,), # Lao Vowel Sign Mai Kan ..Lao Vowel Sign Mai Kan
(0x00eb4, 0x00ebc,), # Lao Vowel Sign I ..Lao Semivowel Sign Lo
(0x00ec8, 0x00ecd,), # Lao Tone Mai Ek ..Lao Niggahita
(0x00f18, 0x00f19,), # Tibetan Astrological Sig..Tibetan Astrological Sig
(0x00f35, 0x00f35,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f37, 0x00f37,), # Tibetan Mark Ngas Bzung ..Tibetan Mark Ngas Bzung
(0x00f39, 0x00f39,), # Tibetan Mark Tsa -phru ..Tibetan Mark Tsa -phru
(0x00f71, 0x00f7e,), # Tibetan Vowel Sign Aa ..Tibetan Sign Rjes Su Nga
(0x00f80, 0x00f84,), # Tibetan Vowel Sign Rever..Tibetan Mark Halanta
(0x00f86, 0x00f87,), # Tibetan Sign Lci Rtags ..Tibetan Sign Yang Rtags
(0x00f8d, 0x00f97,), # Tibetan Subjoined Sign L..Tibetan Subjoined Letter
(0x00f99, 0x00fbc,), # Tibetan Subjoined Letter..Tibetan Subjoined Letter
(0x00fc6, 0x00fc6,), # Tibetan Symbol Padma Gda..Tibetan Symbol Padma Gda
(0x0102d, 0x01030,), # Myanmar Vowel Sign I ..Myanmar Vowel Sign Uu
(0x01032, 0x01037,), # Myanmar Vowel Sign Ai ..Myanmar Sign Dot Below
(0x01039, 0x0103a,), # Myanmar Sign Virama ..Myanmar Sign Asat
(0x0103d, 0x0103e,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01058, 0x01059,), # Myanmar Vowel Sign Vocal..Myanmar Vowel Sign Vocal
(0x0105e, 0x01060,), # Myanmar Consonant Sign M..Myanmar Consonant Sign M
(0x01071, 0x01074,), # Myanmar Vowel Sign Geba ..Myanmar Vowel Sign Kayah
(0x01082, 0x01082,), # Myanmar Consonant Sign S..Myanmar Consonant Sign S
(0x01085, 0x01086,), # Myanmar Vowel Sign Shan ..Myanmar Vowel Sign Shan
(0x0108d, 0x0108d,), # Myanmar Sign Shan Counci..Myanmar Sign Shan Counci
(0x0109d, 0x0109d,), # Myanmar Vowel Sign Aiton..Myanmar Vowel Sign Aiton
(0x0135d, 0x0135f,), # Ethiopic Combining Gemin..Ethiopic Combining Gemin
(0x01712, 0x01714,), # Tagalog Vowel Sign I ..Tagalog Sign Virama
(0x01732, 0x01734,), # Hanunoo Vowel Sign I ..Hanunoo Sign Pamudpod
(0x01752, 0x01753,), # Buhid Vowel Sign I ..Buhid Vowel Sign U
(0x01772, 0x01773,), # Tagbanwa Vowel Sign I ..Tagbanwa Vowel Sign U
(0x017b4, 0x017b5,), # Khmer Vowel Inherent Aq ..Khmer Vowel Inherent Aa
(0x017b7, 0x017bd,), # Khmer Vowel Sign I ..Khmer Vowel Sign Ua
(0x017c6, 0x017c6,), # Khmer Sign Nikahit ..Khmer Sign Nikahit
(0x017c9, 0x017d3,), # Khmer Sign Muusikatoan ..Khmer Sign Bathamasat
(0x017dd, 0x017dd,), # Khmer Sign Atthacan ..Khmer Sign Atthacan
(0x0180b, 0x0180d,), # Mongolian Free Variation..Mongolian Free Variation
(0x01885, 0x01886,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x018a9, 0x018a9,), # Mongolian Letter Ali Gal..Mongolian Letter Ali Gal
(0x01920, 0x01922,), # Limbu Vowel Sign A ..Limbu Vowel Sign U
(0x01927, 0x01928,), # Limbu Vowel Sign E ..Limbu Vowel Sign O
(0x01932, 0x01932,), # Limbu Small Letter Anusv..Limbu Small Letter Anusv
(0x01939, 0x0193b,), # Limbu Sign Mukphreng ..Limbu Sign Sa-i
(0x01a17, 0x01a18,), # Buginese Vowel Sign I ..Buginese Vowel Sign U
(0x01a1b, 0x01a1b,), # Buginese Vowel Sign Ae ..Buginese Vowel Sign Ae
(0x01a56, 0x01a56,), # Tai Tham Consonant Sign ..Tai Tham Consonant Sign
(0x01a58, 0x01a5e,), # Tai Tham Sign Mai Kang L..Tai Tham Consonant Sign
(0x01a60, 0x01a60,), # Tai Tham Sign Sakot ..Tai Tham Sign Sakot
(0x01a62, 0x01a62,), # Tai Tham Vowel Sign Mai ..Tai Tham Vowel Sign Mai
(0x01a65, 0x01a6c,), # Tai Tham Vowel Sign I ..Tai Tham Vowel Sign Oa B
(0x01a73, 0x01a7c,), # Tai Tham Vowel Sign Oa A..Tai Tham Sign Khuen-lue
(0x01a7f, 0x01a7f,), # Tai Tham Combining Crypt..Tai Tham Combining Crypt
(0x01ab0, 0x01ac0,), # Combining Doubled Circum..(nil)
(0x01b00, 0x01b03,), # Balinese Sign Ulu Ricem ..Balinese Sign Surang
(0x01b34, 0x01b34,), # Balinese Sign Rerekan ..Balinese Sign Rerekan
(0x01b36, 0x01b3a,), # Balinese Vowel Sign Ulu ..Balinese Vowel Sign Ra R
(0x01b3c, 0x01b3c,), # Balinese Vowel Sign La L..Balinese Vowel Sign La L
(0x01b42, 0x01b42,), # Balinese Vowel Sign Pepe..Balinese Vowel Sign Pepe
(0x01b6b, 0x01b73,), # Balinese Musical Symbol ..Balinese Musical Symbol
(0x01b80, 0x01b81,), # Sundanese Sign Panyecek ..Sundanese Sign Panglayar
(0x01ba2, 0x01ba5,), # Sundanese Consonant Sign..Sundanese Vowel Sign Pan
(0x01ba8, 0x01ba9,), # Sundanese Vowel Sign Pam..Sundanese Vowel Sign Pan
(0x01bab, 0x01bad,), # Sundanese Sign Virama ..Sundanese Consonant Sign
(0x01be6, 0x01be6,), # Batak Sign Tompi ..Batak Sign Tompi
(0x01be8, 0x01be9,), # Batak Vowel Sign Pakpak ..Batak Vowel Sign Ee
(0x01bed, 0x01bed,), # Batak Vowel Sign Karo O ..Batak Vowel Sign Karo O
(0x01bef, 0x01bf1,), # Batak Vowel Sign U For S..Batak Consonant Sign H
(0x01c2c, 0x01c33,), # Lepcha Vowel Sign E ..Lepcha Consonant Sign T
(0x01c36, 0x01c37,), # Lepcha Sign Ran ..Lepcha Sign Nukta
(0x01cd0, 0x01cd2,), # Vedic Tone Karshana ..Vedic Tone Prenkha
(0x01cd4, 0x01ce0,), # Vedic Sign Yajurvedic Mi..Vedic Tone Rigvedic Kash
(0x01ce2, 0x01ce8,), # Vedic Sign Visarga Svari..Vedic Sign Visarga Anuda
(0x01ced, 0x01ced,), # Vedic Sign Tiryak ..Vedic Sign Tiryak
(0x01cf4, 0x01cf4,), # Vedic Tone Candra Above ..Vedic Tone Candra Above
(0x01cf8, 0x01cf9,), # Vedic Tone Ring Above ..Vedic Tone Double Ring A
(0x01dc0, 0x01df9,), # Combining Dotted Grave A..Combining Wide Inverted
(0x01dfb, 0x01dff,), # Combining Deletion Mark ..Combining Right Arrowhea
(0x020d0, 0x020f0,), # Combining Left Harpoon A..Combining Asterisk Above
(0x02cef, 0x02cf1,), # Coptic Combining Ni Abov..Coptic Combining Spiritu
(0x02d7f, 0x02d7f,), # Tifinagh Consonant Joine..Tifinagh Consonant Joine
(0x02de0, 0x02dff,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0302a, 0x0302d,), # Ideographic Level Tone M..Ideographic Entering Ton
(0x03099, 0x0309a,), # Combining Katakana-hirag..Combining Katakana-hirag
(0x0a66f, 0x0a672,), # Combining Cyrillic Vzmet..Combining Cyrillic Thous
(0x0a674, 0x0a67d,), # Combining Cyrillic Lette..Combining Cyrillic Payer
(0x0a69e, 0x0a69f,), # Combining Cyrillic Lette..Combining Cyrillic Lette
(0x0a6f0, 0x0a6f1,), # Bamum Combining Mark Koq..Bamum Combining Mark Tuk
(0x0a802, 0x0a802,), # Syloti Nagri Sign Dvisva..Syloti Nagri Sign Dvisva
(0x0a806, 0x0a806,), # Syloti Nagri Sign Hasant..Syloti Nagri Sign Hasant
(0x0a80b, 0x0a80b,), # Syloti Nagri Sign Anusva..Syloti Nagri Sign Anusva
(0x0a825, 0x0a826,), # Syloti Nagri Vowel Sign ..Syloti Nagri Vowel Sign
(0x0a82c, 0x0a82c,), # (nil) ..(nil)
(0x0a8c4, 0x0a8c5,), # Saurashtra Sign Virama ..Saurashtra Sign Candrabi
(0x0a8e0, 0x0a8f1,), # Combining Devanagari Dig..Combining Devanagari Sig
(0x0a8ff, 0x0a8ff,), # Devanagari Vowel Sign Ay..Devanagari Vowel Sign Ay
(0x0a926, 0x0a92d,), # Kayah Li Vowel Ue ..Kayah Li Tone Calya Plop
(0x0a947, 0x0a951,), # Rejang Vowel Sign I ..Rejang Consonant Sign R
(0x0a980, 0x0a982,), # Javanese Sign Panyangga ..Javanese Sign Layar
(0x0a9b3, 0x0a9b3,), # Javanese Sign Cecak Telu..Javanese Sign Cecak Telu
(0x0a9b6, 0x0a9b9,), # Javanese Vowel Sign Wulu..Javanese Vowel Sign Suku
(0x0a9bc, 0x0a9bd,), # Javanese Vowel Sign Pepe..Javanese Consonant Sign
(0x0a9e5, 0x0a9e5,), # Myanmar Sign Shan Saw ..Myanmar Sign Shan Saw
(0x0aa29, 0x0aa2e,), # Cham Vowel Sign Aa ..Cham Vowel Sign Oe
(0x0aa31, 0x0aa32,), # Cham Vowel Sign Au ..Cham Vowel Sign Ue
(0x0aa35, 0x0aa36,), # Cham Consonant Sign La ..Cham Consonant Sign Wa
(0x0aa43, 0x0aa43,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa4c, 0x0aa4c,), # Cham Consonant Sign Fina..Cham Consonant Sign Fina
(0x0aa7c, 0x0aa7c,), # Myanmar Sign Tai Laing T..Myanmar Sign Tai Laing T
(0x0aab0, 0x0aab0,), # Tai Viet Mai Kang ..Tai Viet Mai Kang
(0x0aab2, 0x0aab4,), # Tai Viet Vowel I ..Tai Viet Vowel U
(0x0aab7, 0x0aab8,), # Tai Viet Mai Khit ..Tai Viet Vowel Ia
(0x0aabe, 0x0aabf,), # Tai Viet Vowel Am ..Tai Viet Tone Mai Ek
(0x0aac1, 0x0aac1,), # Tai Viet Tone Mai Tho ..Tai Viet Tone Mai Tho
(0x0aaec, 0x0aaed,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0aaf6, 0x0aaf6,), # Meetei Mayek Virama ..Meetei Mayek Virama
(0x0abe5, 0x0abe5,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abe8, 0x0abe8,), # Meetei Mayek Vowel Sign ..Meetei Mayek Vowel Sign
(0x0abed, 0x0abed,), # Meetei Mayek Apun Iyek ..Meetei Mayek Apun Iyek
(0x0fb1e, 0x0fb1e,), # Hebrew Point Judeo-spani..Hebrew Point Judeo-spani
(0x0fe00, 0x0fe0f,), # Variation Selector-1 ..Variation Selector-16
(0x0fe20, 0x0fe2f,), # Combining Ligature Left ..Combining Cyrillic Titlo
(0x101fd, 0x101fd,), # Phaistos Disc Sign Combi..Phaistos Disc Sign Combi
(0x102e0, 0x102e0,), # Coptic Epact Thousands M..Coptic Epact Thousands M
(0x10376, 0x1037a,), # Combining Old Permic Let..Combining Old Permic Let
(0x10a01, 0x10a03,), # Kharoshthi Vowel Sign I ..Kharoshthi Vowel Sign Vo
(0x10a05, 0x10a06,), # Kharoshthi Vowel Sign E ..Kharoshthi Vowel Sign O
(0x10a0c, 0x10a0f,), # Kharoshthi Vowel Length ..Kharoshthi Sign Visarga
(0x10a38, 0x10a3a,), # Kharoshthi Sign Bar Abov..Kharoshthi Sign Dot Belo
(0x10a3f, 0x10a3f,), # Kharoshthi Virama ..Kharoshthi Virama
(0x10ae5, 0x10ae6,), # Manichaean Abbreviation ..Manichaean Abbreviation
(0x10d24, 0x10d27,), # Hanifi Rohingya Sign Har..Hanifi Rohingya Sign Tas
(0x10eab, 0x10eac,), # (nil) ..(nil)
(0x10f46, 0x10f50,), # Sogdian Combining Dot Be..Sogdian Combining Stroke
(0x11001, 0x11001,), # Brahmi Sign Anusvara ..Brahmi Sign Anusvara
(0x11038, 0x11046,), # Brahmi Vowel Sign Aa ..Brahmi Virama
(0x1107f, 0x11081,), # Brahmi Number Joiner ..Kaithi Sign Anusvara
(0x110b3, 0x110b6,), # Kaithi Vowel Sign U ..Kaithi Vowel Sign Ai
(0x110b9, 0x110ba,), # Kaithi Sign Virama ..Kaithi Sign Nukta
(0x11100, 0x11102,), # Chakma Sign Candrabindu ..Chakma Sign Visarga
(0x11127, 0x1112b,), # Chakma Vowel Sign A ..Chakma Vowel Sign Uu
(0x1112d, 0x11134,), # Chakma Vowel Sign Ai ..Chakma Maayyaa
(0x11173, 0x11173,), # Mahajani Sign Nukta ..Mahajani Sign Nukta
(0x11180, 0x11181,), # Sharada Sign Candrabindu..Sharada Sign Anusvara
(0x111b6, 0x111be,), # Sharada Vowel Sign U ..Sharada Vowel Sign O
(0x111c9, 0x111cc,), # Sharada Sandhi Mark ..Sharada Extra Short Vowe
(0x111cf, 0x111cf,), # (nil) ..(nil)
(0x1122f, 0x11231,), # Khojki Vowel Sign U ..Khojki Vowel Sign Ai
(0x11234, 0x11234,), # Khojki Sign Anusvara ..Khojki Sign Anusvara
(0x11236, 0x11237,), # Khojki Sign Nukta ..Khojki Sign Shadda
(0x1123e, 0x1123e,), # Khojki Sign Sukun ..Khojki Sign Sukun
(0x112df, 0x112df,), # Khudawadi Sign Anusvara ..Khudawadi Sign Anusvara
(0x112e3, 0x112ea,), # Khudawadi Vowel Sign U ..Khudawadi Sign Virama
(0x11300, 0x11301,), # Grantha Sign Combining A..Grantha Sign Candrabindu
(0x1133b, 0x1133c,), # Combining Bindu Below ..Grantha Sign Nukta
(0x11340, 0x11340,), # Grantha Vowel Sign Ii ..Grantha Vowel Sign Ii
(0x11366, 0x1136c,), # Combining Grantha Digit ..Combining Grantha Digit
(0x11370, 0x11374,), # Combining Grantha Letter..Combining Grantha Letter
(0x11438, 0x1143f,), # Newa Vowel Sign U ..Newa Vowel Sign Ai
(0x11442, 0x11444,), # Newa Sign Virama ..Newa Sign Anusvara
(0x11446, 0x11446,), # Newa Sign Nukta ..Newa Sign Nukta
(0x1145e, 0x1145e,), # Newa Sandhi Mark ..Newa Sandhi Mark
(0x114b3, 0x114b8,), # Tirhuta Vowel Sign U ..Tirhuta Vowel Sign Vocal
(0x114ba, 0x114ba,), # Tirhuta Vowel Sign Short..Tirhuta Vowel Sign Short
(0x114bf, 0x114c0,), # Tirhuta Sign Candrabindu..Tirhuta Sign Anusvara
(0x114c2, 0x114c3,), # Tirhuta Sign Virama ..Tirhuta Sign Nukta
(0x115b2, 0x115b5,), # Siddham Vowel Sign U ..Siddham Vowel Sign Vocal
(0x115bc, 0x115bd,), # Siddham Sign Candrabindu..Siddham Sign Anusvara
(0x115bf, 0x115c0,), # Siddham Sign Virama ..Siddham Sign Nukta
(0x115dc, 0x115dd,), # Siddham Vowel Sign Alter..Siddham Vowel Sign Alter
(0x11633, 0x1163a,), # Modi Vowel Sign U ..Modi Vowel Sign Ai
(0x1163d, 0x1163d,), # Modi Sign Anusvara ..Modi Sign Anusvara
(0x1163f, 0x11640,), # Modi Sign Virama ..Modi Sign Ardhacandra
(0x116ab, 0x116ab,), # Takri Sign Anusvara ..Takri Sign Anusvara
(0x116ad, 0x116ad,), # Takri Vowel Sign Aa ..Takri Vowel Sign Aa
(0x116b0, 0x116b5,), # Takri Vowel Sign U ..Takri Vowel Sign Au
(0x116b7, 0x116b7,), # Takri Sign Nukta ..Takri Sign Nukta
(0x1171d, 0x1171f,), # Ahom Consonant Sign Medi..Ahom Consonant Sign Medi
(0x11722, 0x11725,), # Ahom Vowel Sign I ..Ahom Vowel Sign Uu
(0x11727, 0x1172b,), # Ahom Vowel Sign Aw ..Ahom Sign Killer
(0x1182f, 0x11837,), # Dogra Vowel Sign U ..Dogra Sign Anusvara
(0x11839, 0x1183a,), # Dogra Sign Virama ..Dogra Sign Nukta
(0x1193b, 0x1193c,), # (nil) ..(nil)
(0x1193e, 0x1193e,), # (nil) ..(nil)
(0x11943, 0x11943,), # (nil) ..(nil)
(0x119d4, 0x119d7,), # Nandinagari Vowel Sign U..Nandinagari Vowel Sign V
(0x119da, 0x119db,), # Nandinagari Vowel Sign E..Nandinagari Vowel Sign A
(0x119e0, 0x119e0,), # Nandinagari Sign Virama ..Nandinagari Sign Virama
(0x11a01, 0x11a0a,), # Zanabazar Square Vowel S..Zanabazar Square Vowel L
(0x11a33, 0x11a38,), # Zanabazar Square Final C..Zanabazar Square Sign An
(0x11a3b, 0x11a3e,), # Zanabazar Square Cluster..Zanabazar Square Cluster
(0x11a47, 0x11a47,), # Zanabazar Square Subjoin..Zanabazar Square Subjoin
(0x11a51, 0x11a56,), # Soyombo Vowel Sign I ..Soyombo Vowel Sign Oe
(0x11a59, 0x11a5b,), # Soyombo Vowel Sign Vocal..Soyombo Vowel Length Mar
(0x11a8a, 0x11a96,), # Soyombo Final Consonant ..Soyombo Sign Anusvara
(0x11a98, 0x11a99,), # Soyombo Gemination Mark ..Soyombo Subjoiner
(0x11c30, 0x11c36,), # Bhaiksuki Vowel Sign I ..Bhaiksuki Vowel Sign Voc
(0x11c38, 0x11c3d,), # Bhaiksuki Vowel Sign E ..Bhaiksuki Sign Anusvara
(0x11c3f, 0x11c3f,), # Bhaiksuki Sign Virama ..Bhaiksuki Sign Virama
(0x11c92, 0x11ca7,), # Marchen Subjoined Letter..Marchen Subjoined Letter
(0x11caa, 0x11cb0,), # Marchen Subjoined Letter..Marchen Vowel Sign Aa
(0x11cb2, 0x11cb3,), # Marchen Vowel Sign U ..Marchen Vowel Sign E
(0x11cb5, 0x11cb6,), # Marchen Sign Anusvara ..Marchen Sign Candrabindu
(0x11d31, 0x11d36,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3a, 0x11d3a,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3c, 0x11d3d,), # Masaram Gondi Vowel Sign..Masaram Gondi Vowel Sign
(0x11d3f, 0x11d45,), # Masaram Gondi Vowel Sign..Masaram Gondi Virama
(0x11d47, 0x11d47,), # Masaram Gondi Ra-kara ..Masaram Gondi Ra-kara
(0x11d90, 0x11d91,), # Gunjala Gondi Vowel Sign..Gunjala Gondi Vowel Sign
(0x11d95, 0x11d95,), # Gunjala Gondi Sign Anusv..Gunjala Gondi Sign Anusv
(0x11d97, 0x11d97,), # Gunjala Gondi Virama ..Gunjala Gondi Virama
(0x11ef3, 0x11ef4,), # Makasar Vowel Sign I ..Makasar Vowel Sign U
(0x16af0, 0x16af4,), # Bassa Vah Combining High..Bassa Vah Combining High
(0x16b30, 0x16b36,), # Pahawh Hmong Mark Cim Tu..Pahawh Hmong Mark Cim Ta
(0x16f4f, 0x16f4f,), # Miao Sign Consonant Modi..Miao Sign Consonant Modi
(0x16f8f, 0x16f92,), # Miao Tone Right ..Miao Tone Below
(0x16fe4, 0x16fe4,), # (nil) ..(nil)
(0x1bc9d, 0x1bc9e,), # Duployan Thick Letter Se..Duployan Double Mark
(0x1d167, 0x1d169,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d17b, 0x1d182,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d185, 0x1d18b,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d1aa, 0x1d1ad,), # Musical Symbol Combining..Musical Symbol Combining
(0x1d242, 0x1d244,), # Combining Greek Musical ..Combining Greek Musical
(0x1da00, 0x1da36,), # Signwriting Head Rim ..Signwriting Air Sucking
(0x1da3b, 0x1da6c,), # Signwriting Mouth Closed..Signwriting Excitement
(0x1da75, 0x1da75,), # Signwriting Upper Body T..Signwriting Upper Body T
(0x1da84, 0x1da84,), # Signwriting Location Hea..Signwriting Location Hea
(0x1da9b, 0x1da9f,), # Signwriting Fill Modifie..Signwriting Fill Modifie
(0x1daa1, 0x1daaf,), # Signwriting Rotation Mod..Signwriting Rotation Mod
(0x1e000, 0x1e006,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e008, 0x1e018,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e01b, 0x1e021,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e023, 0x1e024,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e026, 0x1e02a,), # Combining Glagolitic Let..Combining Glagolitic Let
(0x1e130, 0x1e136,), # Nyiakeng Puachue Hmong T..Nyiakeng Puachue Hmong T
(0x1e2ec, 0x1e2ef,), # Wancho Tone Tup ..Wancho Tone Koini
(0x1e8d0, 0x1e8d6,), # Mende Kikakui Combining ..Mende Kikakui Combining
(0x1e944, 0x1e94a,), # Adlam Alif Lengthener ..Adlam Nukta
(0xe0100, 0xe01ef,), # Variation Selector-17 ..Variation Selector-256
),
}
| 79.322424
| 82
| 0.642636
| 35,464
| 310,230
| 5.62156
| 0.031356
| 0.105862
| 0.014195
| 0.007424
| 0.992968
| 0.992235
| 0.992235
| 0.991829
| 0.991648
| 0.991648
| 0
| 0.154255
| 0.256455
| 310,230
| 3,910
| 83
| 79.342711
| 0.710023
| 0.602572
| 0
| 0.989378
| 1
| 0
| 0.000712
| 0
| 0
| 0
| 0.448557
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
db527b174c47d34a25b5a1249587ee1d764fbd0f
| 159
|
py
|
Python
|
sampleTest.py
|
yajiviki/python-espncricinfo
|
530091790caf86f9c9cdf31ecaf8bcfa4f212b68
|
[
"MIT"
] | null | null | null |
sampleTest.py
|
yajiviki/python-espncricinfo
|
530091790caf86f9c9cdf31ecaf8bcfa4f212b68
|
[
"MIT"
] | null | null | null |
sampleTest.py
|
yajiviki/python-espncricinfo
|
530091790caf86f9c9cdf31ecaf8bcfa4f212b68
|
[
"MIT"
] | null | null | null |
from espncricinfo.summary import Summary
from espncricinfo.match import Match
from espncricinfo.match import Match
m = Match('1226866')
print(m.description)
| 19.875
| 40
| 0.81761
| 21
| 159
| 6.190476
| 0.428571
| 0.369231
| 0.323077
| 0.415385
| 0.492308
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049645
| 0.113208
| 159
| 7
| 41
| 22.714286
| 0.87234
| 0
| 0
| 0.4
| 0
| 0
| 0.044025
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db5d5bdd3d46b9f9d4e89bce01af64b99a8fa98e
| 286
|
py
|
Python
|
src/models/query_response.py
|
ahmed-gohary/CarRentalSystem
|
285cbbe9052903e60f8a38676af256c8a6b54fb8
|
[
"Xnet",
"X11"
] | null | null | null |
src/models/query_response.py
|
ahmed-gohary/CarRentalSystem
|
285cbbe9052903e60f8a38676af256c8a6b54fb8
|
[
"Xnet",
"X11"
] | null | null | null |
src/models/query_response.py
|
ahmed-gohary/CarRentalSystem
|
285cbbe9052903e60f8a38676af256c8a6b54fb8
|
[
"Xnet",
"X11"
] | null | null | null |
class InsertResponse:
def __init__(self, entry_id, error_msg: str):
self.entry_id = entry_id
self.error_msg = error_msg
class UpdateResponse:
def __init__(self, entry_id, error_msg: str):
self.entry_id = entry_id
self.error_msg = error_msg
| 19.066667
| 49
| 0.671329
| 40
| 286
| 4.3
| 0.275
| 0.244186
| 0.255814
| 0.186047
| 0.77907
| 0.77907
| 0.77907
| 0.77907
| 0.77907
| 0.77907
| 0
| 0
| 0.251748
| 286
| 14
| 50
| 20.428571
| 0.803738
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
db930d4ebf786bf43c14bf2c8d1e2a9940afea65
| 4,449
|
py
|
Python
|
tests/mpd/protocol/test_audio_output.py
|
hugovk/mopidy
|
3c479d1e4506b532dcd246920d5da8b7a8a15290
|
[
"Apache-2.0"
] | null | null | null |
tests/mpd/protocol/test_audio_output.py
|
hugovk/mopidy
|
3c479d1e4506b532dcd246920d5da8b7a8a15290
|
[
"Apache-2.0"
] | null | null | null |
tests/mpd/protocol/test_audio_output.py
|
hugovk/mopidy
|
3c479d1e4506b532dcd246920d5da8b7a8a15290
|
[
"Apache-2.0"
] | null | null | null |
from tests.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.core.mixer.set_mute(False)
self.send_request('enableoutput "0"')
self.assertInResponse("OK")
assert self.core.mixer.get_mute().get() is True
def test_enableoutput_unknown_outputid(self):
self.send_request('enableoutput "7"')
self.assertInResponse("ACK [50@0] {enableoutput} No such audio output")
def test_disableoutput(self):
self.core.mixer.set_mute(True)
self.send_request('disableoutput "0"')
self.assertInResponse("OK")
assert self.core.mixer.get_mute().get() is False
def test_disableoutput_unknown_outputid(self):
self.send_request('disableoutput "7"')
self.assertInResponse("ACK [50@0] {disableoutput} No such audio output")
def test_outputs_when_unmuted(self):
self.core.mixer.set_mute(False)
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 0")
self.assertInResponse("OK")
def test_outputs_when_muted(self):
self.core.mixer.set_mute(True)
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 1")
self.assertInResponse("OK")
def test_outputs_toggleoutput(self):
self.core.mixer.set_mute(False)
self.send_request('toggleoutput "0"')
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 1")
self.assertInResponse("OK")
self.send_request('toggleoutput "0"')
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 0")
self.assertInResponse("OK")
self.send_request('toggleoutput "0"')
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 1")
self.assertInResponse("OK")
def test_outputs_toggleoutput_unknown_outputid(self):
self.send_request('toggleoutput "7"')
self.assertInResponse("ACK [50@0] {toggleoutput} No such audio output")
class AudioOutputHandlerNoneMixerTest(protocol.BaseTestCase):
enable_mixer = False
def test_enableoutput(self):
assert self.core.mixer.get_mute().get() is None
self.send_request('enableoutput "0"')
self.assertInResponse(
"ACK [52@0] {enableoutput} problems enabling output"
)
assert self.core.mixer.get_mute().get() is None
def test_disableoutput(self):
assert self.core.mixer.get_mute().get() is None
self.send_request('disableoutput "0"')
self.assertInResponse(
"ACK [52@0] {disableoutput} problems disabling output"
)
assert self.core.mixer.get_mute().get() is None
def test_outputs_when_unmuted(self):
self.core.mixer.set_mute(False)
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 0")
self.assertInResponse("OK")
def test_outputs_when_muted(self):
self.core.mixer.set_mute(True)
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 0")
self.assertInResponse("OK")
def test_outputs_toggleoutput(self):
self.core.mixer.set_mute(False)
self.send_request('toggleoutput "0"')
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 0")
self.assertInResponse("OK")
self.send_request('toggleoutput "0"')
self.send_request("outputs")
self.assertInResponse("outputid: 0")
self.assertInResponse("outputname: Mute")
self.assertInResponse("outputenabled: 0")
self.assertInResponse("OK")
| 30.682759
| 80
| 0.662396
| 472
| 4,449
| 6.103814
| 0.108051
| 0.298507
| 0.109337
| 0.068726
| 0.859077
| 0.859077
| 0.777161
| 0.745227
| 0.745227
| 0.745227
| 0
| 0.012931
| 0.217802
| 4,449
| 144
| 81
| 30.895833
| 0.814943
| 0
| 0
| 0.808081
| 0
| 0
| 0.204091
| 0
| 0
| 0
| 0
| 0
| 0.494949
| 1
| 0.131313
| false
| 0
| 0.010101
| 0
| 0.171717
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
db9e77b2addaa76f4f40083d143864f2a0a8c980
| 32,217
|
py
|
Python
|
tests/unit/gapic/v1/test_subscriber_client_v1.py
|
asrikumar2002/Google-API
|
4a7211b3b10f47e3c6b046686d81261002e9ec36
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/v1/test_subscriber_client_v1.py
|
asrikumar2002/Google-API
|
4a7211b3b10f47e3c6b046686d81261002e9ec36
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/v1/test_subscriber_client_v1.py
|
asrikumar2002/Google-API
|
4a7211b3b10f47e3c6b046686d81261002e9ec36
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pytest
from google.cloud.pubsub_v1.gapic import subscriber_client
from google.cloud.pubsub_v1.proto import pubsub_pb2
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import timestamp_pb2
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
response = None
if self.channel_stub.responses:
response = self.channel_stub.responses.pop()
if isinstance(response, Exception):
raise response
if response:
return response
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self, method, request_serializer=None, response_deserializer=None):
return MultiCallableStub(method, self)
def stream_stream(
self, method, request_serializer=None, response_deserializer=None
):
return MultiCallableStub(method, self)
class CustomException(Exception):
pass
class TestSubscriberClient(object):
def test_create_subscription(self):
# Setup Expected Response
name_2 = "name2-1052831874"
topic_2 = "topic2-1139259102"
ack_deadline_seconds = 2135351438
retain_acked_messages = False
enable_message_ordering = True
expected_response = {
"name": name_2,
"topic": topic_2,
"ack_deadline_seconds": ack_deadline_seconds,
"retain_acked_messages": retain_acked_messages,
"enable_message_ordering": enable_message_ordering,
}
expected_response = pubsub_pb2.Subscription(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
topic = client.topic_path("[PROJECT]", "[TOPIC]")
response = client.create_subscription(name, topic)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.Subscription(name=name, topic=topic)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
topic = client.topic_path("[PROJECT]", "[TOPIC]")
with pytest.raises(CustomException):
client.create_subscription(name, topic)
def test_get_subscription(self):
# Setup Expected Response
name = "name3373707"
topic = "topic110546223"
ack_deadline_seconds = 2135351438
retain_acked_messages = False
enable_message_ordering = True
expected_response = {
"name": name,
"topic": topic,
"ack_deadline_seconds": ack_deadline_seconds,
"retain_acked_messages": retain_acked_messages,
"enable_message_ordering": enable_message_ordering,
}
expected_response = pubsub_pb2.Subscription(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
response = client.get_subscription(subscription)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
with pytest.raises(CustomException):
client.get_subscription(subscription)
def test_update_subscription(self):
# Setup Expected Response
name = "name3373707"
topic = "topic110546223"
ack_deadline_seconds_2 = 921632575
retain_acked_messages = False
enable_message_ordering = True
expected_response = {
"name": name,
"topic": topic,
"ack_deadline_seconds": ack_deadline_seconds_2,
"retain_acked_messages": retain_acked_messages,
"enable_message_ordering": enable_message_ordering,
}
expected_response = pubsub_pb2.Subscription(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
ack_deadline_seconds = 42
subscription = {"ack_deadline_seconds": ack_deadline_seconds}
paths_element = "ack_deadline_seconds"
paths = [paths_element]
update_mask = {"paths": paths}
response = client.update_subscription(subscription, update_mask)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.UpdateSubscriptionRequest(
subscription=subscription, update_mask=update_mask
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
ack_deadline_seconds = 42
subscription = {"ack_deadline_seconds": ack_deadline_seconds}
paths_element = "ack_deadline_seconds"
paths = [paths_element]
update_mask = {"paths": paths}
with pytest.raises(CustomException):
client.update_subscription(subscription, update_mask)
def test_list_subscriptions(self):
# Setup Expected Response
next_page_token = ""
subscriptions_element = {}
subscriptions = [subscriptions_element]
expected_response = {
"next_page_token": next_page_token,
"subscriptions": subscriptions,
}
expected_response = pubsub_pb2.ListSubscriptionsResponse(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
project = client.project_path("[PROJECT]")
paged_list_response = client.list_subscriptions(project)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.subscriptions[0] == resources[0]
assert len(channel.requests) == 1
expected_request = pubsub_pb2.ListSubscriptionsRequest(project=project)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_subscriptions_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
project = client.project_path("[PROJECT]")
paged_list_response = client.list_subscriptions(project)
with pytest.raises(CustomException):
list(paged_list_response)
def test_delete_subscription(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
client.delete_subscription(subscription)
assert len(channel.requests) == 1
expected_request = pubsub_pb2.DeleteSubscriptionRequest(
subscription=subscription
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_subscription_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
with pytest.raises(CustomException):
client.delete_subscription(subscription)
def test_modify_ack_deadline(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
ack_ids = []
ack_deadline_seconds = 2135351438
client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds)
assert len(channel.requests) == 1
expected_request = pubsub_pb2.ModifyAckDeadlineRequest(
subscription=subscription,
ack_ids=ack_ids,
ack_deadline_seconds=ack_deadline_seconds,
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_modify_ack_deadline_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
ack_ids = []
ack_deadline_seconds = 2135351438
with pytest.raises(CustomException):
client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds)
def test_acknowledge(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
ack_ids = []
client.acknowledge(subscription, ack_ids)
assert len(channel.requests) == 1
expected_request = pubsub_pb2.AcknowledgeRequest(
subscription=subscription, ack_ids=ack_ids
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_acknowledge_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
ack_ids = []
with pytest.raises(CustomException):
client.acknowledge(subscription, ack_ids)
def test_pull(self):
# Setup Expected Response
expected_response = {}
expected_response = pubsub_pb2.PullResponse(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
max_messages = 496131527
response = client.pull(subscription, max_messages)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.PullRequest(
subscription=subscription, max_messages=max_messages
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_pull_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
max_messages = 496131527
with pytest.raises(CustomException):
client.pull(subscription, max_messages)
def test_streaming_pull(self):
# Setup Expected Response
received_messages_element = {}
received_messages = [received_messages_element]
expected_response = {"received_messages": received_messages}
expected_response = pubsub_pb2.StreamingPullResponse(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[iter([expected_response])])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
stream_ack_deadline_seconds = 1875467245
request = {
"subscription": subscription,
"stream_ack_deadline_seconds": stream_ack_deadline_seconds,
}
request = pubsub_pb2.StreamingPullRequest(**request)
requests = [request]
response = client.streaming_pull(requests)
resources = list(response)
assert len(resources) == 1
assert expected_response == resources[0]
assert len(channel.requests) == 1
actual_requests = channel.requests[0][1]
assert len(actual_requests) == 1
actual_request = list(actual_requests)[0]
assert request == actual_request
def test_streaming_pull_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
stream_ack_deadline_seconds = 1875467245
request = {
"subscription": subscription,
"stream_ack_deadline_seconds": stream_ack_deadline_seconds,
}
request = pubsub_pb2.StreamingPullRequest(**request)
requests = [request]
with pytest.raises(CustomException):
client.streaming_pull(requests)
def test_modify_push_config(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
push_config = {}
client.modify_push_config(subscription, push_config)
assert len(channel.requests) == 1
expected_request = pubsub_pb2.ModifyPushConfigRequest(
subscription=subscription, push_config=push_config
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_modify_push_config_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
push_config = {}
with pytest.raises(CustomException):
client.modify_push_config(subscription, push_config)
def test_list_snapshots(self):
# Setup Expected Response
next_page_token = ""
snapshots_element = {}
snapshots = [snapshots_element]
expected_response = {"next_page_token": next_page_token, "snapshots": snapshots}
expected_response = pubsub_pb2.ListSnapshotsResponse(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
project = client.project_path("[PROJECT]")
paged_list_response = client.list_snapshots(project)
resources = list(paged_list_response)
assert len(resources) == 1
assert expected_response.snapshots[0] == resources[0]
assert len(channel.requests) == 1
expected_request = pubsub_pb2.ListSnapshotsRequest(project=project)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_list_snapshots_exception(self):
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
project = client.project_path("[PROJECT]")
paged_list_response = client.list_snapshots(project)
with pytest.raises(CustomException):
list(paged_list_response)
def test_create_snapshot(self):
# Setup Expected Response
name_2 = "name2-1052831874"
topic = "topic110546223"
expected_response = {"name": name_2, "topic": topic}
expected_response = pubsub_pb2.Snapshot(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]")
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
response = client.create_snapshot(name, subscription)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.CreateSnapshotRequest(
name=name, subscription=subscription
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_create_snapshot_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]")
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
with pytest.raises(CustomException):
client.create_snapshot(name, subscription)
def test_update_snapshot(self):
# Setup Expected Response
name = "name3373707"
topic = "topic110546223"
expected_response = {"name": name, "topic": topic}
expected_response = pubsub_pb2.Snapshot(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
seconds = 123456
expire_time = {"seconds": seconds}
snapshot = {"expire_time": expire_time}
paths_element = "expire_time"
paths = [paths_element]
update_mask = {"paths": paths}
response = client.update_snapshot(snapshot, update_mask)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.UpdateSnapshotRequest(
snapshot=snapshot, update_mask=update_mask
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_update_snapshot_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
seconds = 123456
expire_time = {"seconds": seconds}
snapshot = {"expire_time": expire_time}
paths_element = "expire_time"
paths = [paths_element]
update_mask = {"paths": paths}
with pytest.raises(CustomException):
client.update_snapshot(snapshot, update_mask)
def test_delete_snapshot(self):
channel = ChannelStub()
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]")
client.delete_snapshot(snapshot)
assert len(channel.requests) == 1
expected_request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_delete_snapshot_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]")
with pytest.raises(CustomException):
client.delete_snapshot(snapshot)
def test_seek(self):
# Setup Expected Response
expected_response = {}
expected_response = pubsub_pb2.SeekResponse(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
response = client.seek(subscription)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = pubsub_pb2.SeekRequest(subscription=subscription)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_seek_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
with pytest.raises(CustomException):
client.seek(subscription)
def test_set_iam_policy(self):
# Setup Expected Response
version = 351608024
etag = b"21"
expected_response = {"version": version, "etag": etag}
expected_response = policy_pb2.Policy(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
policy = {}
response = client.set_iam_policy(resource, policy)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = iam_policy_pb2.SetIamPolicyRequest(
resource=resource, policy=policy
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_set_iam_policy_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
policy = {}
with pytest.raises(CustomException):
client.set_iam_policy(resource, policy)
def test_get_iam_policy(self):
# Setup Expected Response
version = 351608024
etag = b"21"
expected_response = {"version": version, "etag": etag}
expected_response = policy_pb2.Policy(**expected_response)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
response = client.get_iam_policy(resource)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_get_iam_policy_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
with pytest.raises(CustomException):
client.get_iam_policy(resource)
def test_test_iam_permissions(self):
# Setup Expected Response
expected_response = {}
expected_response = iam_policy_pb2.TestIamPermissionsResponse(
**expected_response
)
# Mock the API response
channel = ChannelStub(responses=[expected_response])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup Request
resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
permissions = []
response = client.test_iam_permissions(resource, permissions)
assert expected_response == response
assert len(channel.requests) == 1
expected_request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
def test_test_iam_permissions_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
with patch as create_channel:
create_channel.return_value = channel
client = subscriber_client.SubscriberClient()
# Setup request
resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]")
permissions = []
with pytest.raises(CustomException):
client.test_iam_permissions(resource, permissions)
| 38.171801
| 88
| 0.670547
| 3,241
| 32,217
| 6.409442
| 0.068497
| 0.067588
| 0.024262
| 0.03466
| 0.867183
| 0.828142
| 0.797718
| 0.786405
| 0.779185
| 0.752371
| 0
| 0.013899
| 0.24515
| 32,217
| 843
| 89
| 38.217082
| 0.840289
| 0.066021
| 0
| 0.710392
| 0
| 0
| 0.099927
| 0.057835
| 0
| 0
| 0
| 0
| 0.09029
| 1
| 0.069847
| false
| 0.001704
| 0.015332
| 0.003407
| 0.097104
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbc5316f2c51b37d3166af2074795df5701d97d9
| 87
|
py
|
Python
|
pypc/operations/io/__init__.py
|
xpenalosa/PyPc
|
fff3ae29b800d127d261492098aecbbf6719bd07
|
[
"MIT"
] | null | null | null |
pypc/operations/io/__init__.py
|
xpenalosa/PyPc
|
fff3ae29b800d127d261492098aecbbf6719bd07
|
[
"MIT"
] | null | null | null |
pypc/operations/io/__init__.py
|
xpenalosa/PyPc
|
fff3ae29b800d127d261492098aecbbf6719bd07
|
[
"MIT"
] | null | null | null |
from pypc.operations.io.input import InpOp
from pypc.operations.io.output import OutOp
| 29
| 43
| 0.83908
| 14
| 87
| 5.214286
| 0.642857
| 0.219178
| 0.493151
| 0.547945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 87
| 2
| 44
| 43.5
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dbc9e3c415eb73585bf4b4e35fb58518bc87250d
| 102,776
|
py
|
Python
|
opac/tests/test_main_views.py
|
rafaelpezzuto/opac
|
9b54202350e262a27cb9cb756a892185b288df24
|
[
"BSD-2-Clause"
] | 12
|
2016-11-23T17:49:07.000Z
|
2019-10-07T00:21:03.000Z
|
opac/tests/test_main_views.py
|
rafaelpezzuto/opac
|
9b54202350e262a27cb9cb756a892185b288df24
|
[
"BSD-2-Clause"
] | 1,589
|
2015-12-03T17:47:10.000Z
|
2022-03-31T23:43:36.000Z
|
opac/tests/test_main_views.py
|
rafaelpezzuto/opac
|
9b54202350e262a27cb9cb756a892185b288df24
|
[
"BSD-2-Clause"
] | 21
|
2015-11-13T18:59:33.000Z
|
2021-10-03T22:34:29.000Z
|
# coding: utf-8
import unittest
import pathlib
from unittest.mock import patch, Mock
from urllib.parse import urlparse, parse_qs
import flask
import warnings
from flask import url_for, g, current_app
from flask import render_template
from bs4 import BeautifulSoup
from flask_babelex import gettext as _
from .base import BaseTestCase
from . import utils
from webapp.config.lang_names import display_original_lang_name
from webapp.main.views import RetryableError, NonRetryableError
class MainTestCase(BaseTestCase):
def test_home_page(self):
"""
Teste da página inicial, deve retorna utf-8 como conjunto de caracter e
o template ``collection/index.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
with self.client as c:
response = c.get(url_for('main.index'))
self.assertStatus(response, 200)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assert_template_used("collection/index.html")
def test_should_obtain_the_latest_metric_counts_from_collection(self):
with current_app.app_context():
collection = utils.makeOneCollection({
"metrics" : {
"total_journal" : 0,
"total_issue" : 0,
"total_article" : 0,
"total_citation" : 0
},
})
with self.client as client:
response = client.get(url_for('main.index'))
collection = g.get('collection')
self.assertEqual(0, collection.metrics.total_journal)
utils.makeOneJournal({'is_public': True, 'current_status': 'current'})
utils.makeOneArticle({'is_public': True})
with self.client as client:
response = client.get(url_for('main.index'))
self.assertEqual(1, collection.metrics.total_article)
self.assertEqual(1, collection.metrics.total_journal)
def test_g_object_has_collection_object(self):
"""
COM:
- uma nova collection criada com o mesmo acronimo da setting: OPAC_CONFIG
QUANDO:
- solicitamo uma pagina
VERIFICAMOS:
- que no contexto, a variável 'g' tenha asociado uma instancia da collection
"""
with current_app.app_context():
# with
collection_db_record = utils.makeOneCollection()
# when
with self.client as c:
response = c.get(url_for('main.index'))
# then
self.assertStatus(response, 200)
self.assertTrue(hasattr(g, 'collection'))
g_collection = g.get('collection')
self.assertEqual(g_collection._id, collection_db_record._id)
def test_change_set_locale(self):
"""
Teste para alterar o idioma da interface, nesse teste a URL:
'/set_locale/<string:lang_code>' deve criar uma variável na sessão com
o valor informado.
"""
with self.client as c:
response = c.get(url_for('main.set_locale', lang_code='es'))
self.assertEqual(302, response.status_code)
self.assertEqual(flask.session['lang'], 'es')
def test_redirect_when_change_set_locale(self):
"""
Teste para verificar se o redirecionamento da ``view function``
``set_locale`` retorna para a página esperada.
"""
with self.client as c:
response = c.get(url_for('main.set_locale', lang_code='es'),
headers={'Referer': '/journals/alpha'},
follow_redirects=True)
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
def test_change_set_locale_with_unknow_lang(self):
"""
Teste para alterar o idioma da interface, nesse teste a URL:
'/set_locale/<string:lang_code>' deve retornar uma página com
``status_code``400 e manter o idioma padrão ``pt_BR``.
"""
expected_message = '<p>Código de idioma inválido</p>'
with self.client as c:
response = c.get(url_for('main.set_locale', lang_code='en_US'))
self.assertEqual(400, response.status_code)
self.assertIn('Código de idioma inválido',
response.data.decode('utf-8'))
self.assertTemplateUsed('errors/400.html')
self.assertEqual(expected_message,
self.get_context_variable('message'))
@unittest.skip("Revisar/Refazer, agora a lista é carregada com ajax")
def test_collection_list_alpha(self):
"""
Teste para avaliar o retorno da ``view function`` collection_list_alpha,
ao cadastrarmos 10 periódico a interface deve retornar uma listagem
contendo elementos esperado também deve retornar o template
``collection/list_alpha.html``.
"""
utils.makeOneCollection()
journals = utils.makeAnyJournal(items=10)
response = self.client.get(url_for('main.collection_list') + '#alpha')
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
for journal in journals:
self.assertIn('journals/%s' % journal.id,
response.data.decode('utf-8'))
self.assertListEqual(sorted([journal.id for journal in journals]),
sorted([journal.id for journal in self.get_context_variable('journals')]))
def test_collection_list_alpha_without_journals(self):
"""
Teste para avaliar o retorno da ``view function`` collection_list_alpha
quando não existe periódicos cadastrados deve retorna a msg
``Nenhum periódico encontrado`` no corpo da resposta.
"""
utils.makeOneCollection()
response = self.client.get(url_for('main.collection_list'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
self.assertIn('Nenhum periódico encontrado',
response.data.decode('utf-8'))
@unittest.skip("Revisar/Refazer, agora a lista é carregada com ajax")
def test_collection_list_theme(self):
"""
Teste para avaliar o retorno da ``view function`` collection_list_theme
ao cadastrarmos 60 periódico a interface deve retornar uma listagem
contendo elementos esperado tambémdeve retornar o template
``collection/list_theme.html``.
"""
utils.makeOneCollection()
journals = utils.makeAnyJournal(items=30,
attrib={"study_areas": ["Engineering"]})
journals = utils.makeAnyJournal(items=30,
attrib={"study_areas": ["Human Sciences",
"Biological Sciences", "Engineering"]})
response = self.client.get(url_for('main.collection_list') + '#theme')
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
for journal in journals:
self.assertIn('journals/%s' % journal.id,
response.data.decode('utf-8'))
def test_collection_list_theme_without_journals(self):
"""
Teste para avaliar o retorno da ``view function`` collection_list_theme
quando não existe periódicos cadastrados deve retorna a msg
``Nenhum periódico encontrado`` no corpo da resposta.
"""
utils.makeOneCollection()
response = self.client.get(url_for('main.collection_list'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
self.assertIn('Nenhum periódico encontrado',
response.data.decode('utf-8'))
@unittest.skip("Revisar/Refazer, agora a lista é carregada com ajax")
def test_collection_list_institution(self):
"""
Teste para a ``view function`` collection_list_institution, será avaliado
somente o template utilizado pois essa função depende de definição do atributo
instituição no manager.
"""
utils.makeOneCollection()
warnings.warn("Necessário definir o atributo instituição no modelo do Manager")
response = self.client.get(url_for('main.collection_list') + '#publisher')
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
def test_collection_list_institution_without_journals(self):
"""
Teste para avaliar o retorno da ``view function`` collection_list_institution
quando não existe periódicos cadastrados deve retorna a msg
``Nenhum periódico encontrado`` no corpo da resposta.
"""
utils.makeOneCollection()
response = self.client.get(url_for('main.collection_list'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
self.assertIn('Nenhum periódico encontrado',
response.data.decode('utf-8'))
def test_collection_list_feed(self):
"""
Teste para verificar a reposta da ``view funciton``collection_list_feed
Se cadastra 10 periódicos, deve retornar na interface do rss, utilizando
o template ``collection/list_feed_content.html```.
"""
with current_app.app_context():
utils.makeOneCollection()
journals = utils.makeAnyJournal(items=10)
issues = []
for journal in journals:
issue = utils.makeOneIssue({'journal': journal.id})
utils.makeAnyArticle(
issue=issue,
attrib={'journal': journal.id, 'issue': issue.id}
)
issues.append(issue)
response = self.client.get(url_for('main.collection_list_feed'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_feed_content.html')
for journal in journals:
self.assertIn('%s' % journal.url_segment,
response.data.decode('utf-8'))
for issue in issues:
self.assertIn('%s' % issue.url_segment,
response.data.decode('utf-8'))
def test_collection_list_feed_without_journals(self):
"""
Teste para avaliar o retorno da ``view function`` collection_list_feed
quando não existe periódicos cadastrados deve retorna a msg
``Nenhum periódico encontrado`` no corpo da resposta.
"""
with current_app.app_context():
utils.makeOneCollection()
response = self.client.get(url_for('main.collection_list_feed'))
self.assertStatus(response, 200)
self.assertIn('Nenhum periódico encontrado',
response.data.decode('utf-8'))
def test_collection_list_feed_without_issues(self):
"""
Teste para verificar a reposta da ``view funciton``collection_list_feed
Se cadastra 10 periódicos sem número, deve retornar na interface do
rss, utilizando o template ``collection/list_feed_content.html```.
"""
with current_app.app_context():
utils.makeOneCollection()
journals = utils.makeAnyJournal(items=10)
response = self.client.get(url_for('main.collection_list_feed'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_feed_content.html')
for journal in journals:
self.assertIn('%s' % journal.url_segment,
response.data.decode('utf-8'))
def test_journal_feed(self):
"""
Teste da ``view function`` ``journal_feed``, deve retornar um rss
que usa o template ``issue/feed_content.html`` e o título do periódico no
corpo da página.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Revista X'})
issue = utils.makeOneIssue({'journal': journal})
utils.makeAnyArticle(
issue=issue,
attrib={
'journal': journal.id,
'issue': issue.id}
)
response = self.client.get(url_for('main.journal_feed',
url_seg=journal.url_segment))
self.assertTrue(200, response.status_code)
self.assertTemplateUsed('issue/feed_content.html')
def test_journal_feed_has_doi(self):
"""
Teste da ``view function`` ``journal_feed``, deve retornar um rss
que usa o template ``issue/feed_content.html`` e os respectivos artigo com DOI.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Revista X'})
issue = utils.makeOneIssue({'journal': journal})
utils.makeAnyArticle(
issue=issue,
attrib={
'journal': journal.id,
'issue': issue.id,
'doi': '10.2105/AJPH.2009.160184'
}
)
response = self.client.get(url_for('main.journal_feed',
url_seg=journal.url_segment))
self.assertTrue(200, response.status_code)
self.assertTemplateUsed('issue/feed_content.html')
self.assertIn('<id>10.2105/AJPH.2009.160184</id>', response.data.decode('utf-8'))
def test_journal_feed_with_unknow_id(self):
"""
Teste da ``view function`` ``journal_feed`` com um id desconhecido
deve retornar uma página com ``status_code`` 404 e msg
``Periódico não encontrado``.
"""
utils.makeAnyJournal(items=6)
unknow_id = '0k2qhs8slwnui8'
response = self.client.get(url_for('main.journal_feed',
url_seg=unknow_id))
self.assertStatus(response, 404)
self.assertIn('Periódico não encontrado',
response.data.decode('utf-8'))
def test_journal_feed_with_attrib_is_public_false(self):
"""
Teste da ``view function`` ``journal_feed`` acessando um periódico
com atributo is_public=False, deve retorna uma página com ``status_code``
404 e msg cadastrada no atributo ``reason``.
"""
unpublish_reason = 'plágio'
journal = utils.makeOneJournal({
'is_public': False,
'unpublish_reason': unpublish_reason})
response = self.client.get(url_for('main.journal_feed',
url_seg=journal.url_segment))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
# ISSUE
def test_issue_feed(self):
"""
Teste da ``view function`` ``issue_feed``, deve retornar um rss
que usa o template ``issue/feed_content.html`` e o título do periódico no
corpo da página.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'number': '31',
'volume': '10',
'journal': journal})
utils.makeAnyArticle(
issue=issue,
attrib={'journal': issue.journal.id, 'issue': issue.id}
)
response = self.client.get(url_for('main.issue_feed',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/feed_content.html')
self.assertIn('Vol. 10 No. 31', response.data.decode('utf-8'))
def test_issue_feed_has_doi(self):
"""
Teste da ``view function`` ``issue_feed``, deve retornar um rss
que usa o template ``issue/feed_content.html`` e os respectivos artigo com DOI.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'number': '31',
'volume': '10',
'journal': journal})
utils.makeAnyArticle(
issue=issue,
attrib={
'journal': issue.journal.id,
'issue': issue.id,
'doi': '10.2105/AJPH.2009.160184'}
)
response = self.client.get(url_for('main.issue_feed',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/feed_content.html')
self.assertIn('<id>10.2105/AJPH.2009.160184</id>', response.data.decode('utf-8'))
def test_issue_feed_unknow_issue_id(self):
"""
Teste para avaliar o retorno da ``view function`` ``issue_feed``
quando é acessado utilizando um identificador do issue desconhecido,
deve retorna status_code 404 com a msg ``Número não encontrado``.
"""
journal = utils.makeOneJournal()
utils.makeOneIssue({'journal': journal})
unknow_url_seg = '2015.v6n3'
response = self.client.get(url_for('main.issue_feed',
url_seg=journal.url_segment,
url_seg_issue=unknow_url_seg))
self.assertStatus(response, 404)
self.assertIn('Número não encontrado', response.data.decode('utf-8'))
def test_issue_feed_with_attrib_is_public_false(self):
"""
Teste da ``view function`` ``issue_feed`` acessando um número
com atributo is_public=False, deve retorna uma página com ``status_code``
404 e msg cadastrada no atributo ``reason``.
"""
unpublish_reason = 'número incorreto'
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({
'is_public': False,
'unpublish_reason': unpublish_reason,
'journal': journal})
response = self.client.get(url_for('main.issue_feed',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 404)
self.assertIn('número incorreto', response.data.decode('utf-8'))
def test_issue_feed_with_journal_attrib_is_public_false(self):
"""
Teste da ``view function`` ``issue_toc`` acessando um número
com atributo is_public=True, porém com um periódico com atributo
is_public=False deve retorna uma página com ``status_code`` 404 e msg
cadastrada no atributo ``reason`` do periódico.
"""
unpublish_reason = 'Revista removida da coleção'
journal = utils.makeOneJournal({
'is_public': False,
'unpublish_reason': unpublish_reason})
issue = utils.makeOneIssue({
'is_public': True,
'journal': journal.id})
response = self.client.get(url_for('main.issue_feed',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
# ARTICLE
def test_article_detail_v3(self):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt'],
'translated_titles': [
{'language': 'es', 'name': u'Artículo en español'},
{'language': 'pt', 'name': u'Artigo en Português'},
],
'issue': issue,
'journal': journal,
'url_segment': '10-11'})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid))
self.assertStatus(response, 200)
def test_article_detail_v3_redirects_to_original_language(self):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt'],
'translated_titles': [
{'language': 'es', 'name': u'Artículo en español'},
{'language': 'pt', 'name': u'Artigo en Português'},
],
'issue': issue,
'journal': journal,
'url_segment': '10-11'})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='ru'))
self.assertRedirects(
response,
url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
format='html',
)
)
def test_article_detail_pid_redirect(self):
"""
Teste da ``view function`` ``article_detail_pid``, verifica somente o
redirecionamento.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'pid': 'S0102-311X2018000100101',
'url_segment': '10-11'})
response = self.client.get(url_for('main.article_detail_pid',
pid='S0102-311X2018000100101'))
#TODO: Alterar o código para 301 (Movido Permanentemente)
self.assertStatus(response, 302)
def test_article_detail_pid_redirect_follow(self):
"""
Teste da ``view function`` ``article_detail_pid``,
deve retornar uma página que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'pid': 'S0102-311X2018000100101',
'url_segment': '10-11'})
response = self.client.get(url_for('main.article_detail_pid',
pid='S0102-311X2018000100101'),
follow_redirects=True)
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail.html')
self.assertEqual(self.get_context_variable('article').id, article.id)
self.assertEqual(self.get_context_variable('journal').id, article.journal.id)
self.assertEqual(self.get_context_variable('issue').id, article.issue.id)
@patch('requests.get')
def test_article_detail_v3_translate_version_(self, mocked_requests_get):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
mocked_response = Mock()
mocked_response.status_code = 200
mocked_response.content = b'<html/>'
mocked_requests_get.return_value = mocked_response
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'htmls': [
{'lang': 'de', 'url': 'https://link/de_artigo.html'},
{'lang': 'pt', 'url': 'https://link/pt_artigo.html'},
{'lang': 'bla', 'url': 'https://link/bla_artigo.html'},
]
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail.html')
content = response.data.decode('utf-8')
urls = {html['lang']: url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang=html['lang'])
for html in article.htmls
}
self.assertIn('{}">Deutsch<'.format(urls['de']), content)
self.assertIn('{}">bla<'.format(urls['bla']), content)
self.assertIn('{}">Português<'.format(urls['pt']), content)
self.assertEqual(
content.count('{}">Deutsch<'.format(urls['de'])), 1)
self.assertEqual(
content.count('{}">Português<'.format(urls['pt'])), 1)
self.assertEqual(
content.count('{}">bla<'.format(urls['bla'])), 1)
@patch('requests.get')
def test_article_detail_v3_has_citation_title_in_pt(self, mocked_requests_get):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
mocked_response = Mock()
mocked_response.status_code = 200
mocked_response.content = b'<html/>'
mocked_requests_get.return_value = mocked_response
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt'],
'translated_titles': [
{'language': 'es', 'name': u'Artículo título'},
{'language': 'pt', 'name': u'Artigo título'},
],
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'htmls': [
{'lang': 'es', 'url': 'https://link/es_artigo.html'},
{'lang': 'de', 'url': 'https://link/de_artigo.html'},
{'lang': 'pt', 'url': 'https://link/pt_artigo.html'},
{'lang': 'bla', 'url': 'https://link/bla_artigo.html'},
]
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail.html')
content = response.data.decode('utf-8')
self.assertIn(
'<meta name="citation_language" content="pt"></meta>',
content
)
self.assertIn(
u'<meta name="citation_title" content="Artigo título"></meta>',
content
)
@patch('requests.get')
def test_article_detail_v3_has_citation_title_in_es(self, mocked_requests_get):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
mocked_response = Mock()
mocked_response.status_code = 200
mocked_response.content = b'<html/>'
mocked_requests_get.return_value = mocked_response
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt'],
'translated_titles': [
{'language': 'es', 'name': u'Título del Artículo'},
{'language': 'pt', 'name': u'Título do Artigo'},
],
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'htmls': [
{'lang': 'es', 'url': 'https://link/es_artigo.html'},
{'lang': 'pt', 'url': 'https://link/pt_artigo.html'},
{'lang': 'de', 'url': 'https://link/de_artigo.html'},
{'lang': 'bla', 'url': 'https://link/bla_artigo.html'},
]
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='es'))
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail.html')
content = response.data.decode('utf-8')
self.assertIn(
'<meta name="citation_language" content="es"></meta>',
content
)
self.assertIn(
u'<meta name="citation_title" content="Título del Artículo"></meta>',
content
)
def test_article_detail_v3_links_to_gscholar(self):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({"title":"Título do periódico"})
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11'})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 200)
page_content = response.data.decode('utf-8')
self.assertTemplateUsed('article/detail.html')
self.assertEqual(self.get_context_variable('article').id, article.id)
self.assertEqual(self.get_context_variable('journal').id, article.journal.id)
self.assertEqual(self.get_context_variable('issue').id, article.issue.id)
result = self.get_context_variable('related_links')
self.assertEqual(result[0][0], 'Google')
self.assertEqual(result[1][0], 'Google Scholar')
self.assertIn('Article Y', result[0][2])
self.assertIn('Article Y', result[1][2])
self.assertIn('Google', page_content)
self.assertIn('/scholar', page_content)
def test_article_detail_v3_links_to_gscholar_for_article_without_title(self):
"""
Teste da ``view function`` ``article_detail_v3``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({"title":"Título do periódico"})
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({'issue': issue,
'journal': journal,
'url_segment': '10-11'})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 200)
page_content = response.data.decode('utf-8')
self.assertTemplateUsed('article/detail.html')
self.assertEqual(self.get_context_variable('article').id, article.id)
self.assertEqual(self.get_context_variable('journal').id, article.journal.id)
self.assertEqual(self.get_context_variable('issue').id, article.issue.id)
result = self.get_context_variable('related_links')
self.assertEqual(result[0][0], 'Google')
self.assertEqual(result[1][0], 'Google Scholar')
self.assertIn(journal.title, result[0][2])
self.assertIn(journal.title, result[1][2])
self.assertIn('Google', page_content)
self.assertIn('/scholar', page_content)
def test_legacy_url_aop_article_detail(self):
"""
Teste da ``view function`` ``router_legacy``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
aop_pid = '1111-11111111111111111'
article = utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'aop_pid': aop_pid})
url = '%s?script=sci_arttext&pid=%s' % (
url_for('main.router_legacy'), aop_pid)
response = self.client.get(url, follow_redirects=True)
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail.html')
self.assertEqual(self.get_context_variable('article').id, article.id)
self.assertEqual(self.get_context_variable('journal').id, article.journal.id)
self.assertEqual(self.get_context_variable('issue').id, article.issue.id)
def test_legacy_url_aop_article_detail_wrong_aop_pid(self):
"""
Teste da ``view function`` ``router_legacy``, deve retornar uma página
que usa o template ``article/detail.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'aop_pid': '1111-11111111111111110'})
url = '%s?script=sci_arttext&pid=%s' % (
url_for('main.router_legacy'), '1111-11111111111111111')
response = self.client.get(url)
self.assertStatus(response, 404)
self.assertIn('Artigo não encontrado', response.data.decode('utf-8'))
@unittest.skip(u'precisa de integração com SSM para retornar o SSM')
def test_legacy_url_pdf_article_detail(self):
"""
Teste da view ``router_legacy``, deve retornar uma página de pdf quando
na querystring tem: ?script=sci_pdf&pid={PID VALIDO}
e que usa o template ``article/detail_pdf.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
pid = '1111-11111111111111111'
article = utils.makeOneArticle({'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'pid': pid})
url = '%s?script=sci_pdf&pid=%s' % (
url_for('main.router_legacy'), pid)
response = self.client.get(url)
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail_pdf.html')
self.assertEqual(self.get_context_variable('article').id, article.id)
self.assertEqual(self.get_context_variable('journal').id, article.journal.id)
self.assertEqual(self.get_context_variable('issue').id, article.issue.id)
def test_legacy_url_pdf_article_detail_wrong_pid(self):
"""
Teste da view ``router_legacy``, deve retornar uma página de erro (404 not found)
na querystring tem: ?script=sci_pdf&pid={PID INVALIDO}
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
valid_pid = '1111-11111111111111111'
invalid_pid = 'ABCD-22222222222222222'
utils.makeOneArticle({
'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'pid': valid_pid})
url = '%s?script=sci_pdf&pid=%s' % (
url_for('main.router_legacy'), invalid_pid)
response = self.client.get(url)
self.assertStatus(response, 404)
self.assertIn('Artigo não encontrado', response.data.decode('utf-8'))
def test_legacy_url_article_detail_pid_not_found(self):
"""
Teste da view ``router_legacy_article``, deve retornar uma página de erro (404 not found)
na querystring tem: ?pid={PID INVALIDO}
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
valid_pid = '1111-11111111111111111'
invalid_pid = 'ABCD-22222222222222222'
utils.makeOneArticle({
'title': 'Article Y',
'issue': issue,
'journal': journal,
'pid': valid_pid})
url = '%s?pid=%s&lng=en' % (
url_for('main.router_legacy_article', text_or_abstract="fbtext"),
invalid_pid
)
response = self.client.get(url)
self.assertStatus(response, 404)
self.assertIn('Artigo não encontrado', response.data.decode('utf-8'))
def test_legacy_url_article_detail_no_public_article(self):
"""
Teste da view ``router_legacy_article``, deve retornar uma página de erro (404 not found)
para artigo não público
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
v1_pid = '0101-0101(99)123456'
v2_pid = '1111-11111111111111111'
utils.makeOneArticle({
'title': 'Article Y',
'issue': issue,
'journal': journal,
'is_public': False,
'pid': v2_pid,
'scielo_pids': {
'v1': v1_pid,
'v2': v2_pid,
}
})
url = '%s?pid=%s&lng=en' % (
url_for('main.router_legacy_article', text_or_abstract="fbtext"),
v1_pid
)
response = self.client.get(url)
self.assertStatus(response, 404)
self.assertIn('Artigo não encontrado', response.data.decode('utf-8'))
def test_legacy_url_redirects_to_article_detail_v3(self):
"""
Teste da view ``router_legacy_article``, deve retornar redirecionar
para os detalhes do artigo (main.article_detail_v3)
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
v1_pid = '0101-0101(99)123456'
v2_pid = '1111-11111111111111111'
article = utils.makeOneArticle({
'title': 'Article Y',
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'pid': v2_pid,
'scielo_pids': {
'v1': v1_pid,
'v2': v2_pid,
}
})
url = '%s?pid=%s&lng=en' % (
url_for('main.router_legacy_article', text_or_abstract="fbtext"),
v1_pid
)
response = self.client.get(url)
self.assertRedirects(
response,
url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
),
)
def test_article_detail_v3_without_articles(self):
"""
Teste para avaliar o retorno da ``view function`` ``article_detail_v3``
quando não existe artigos cadastrados deve retornar ``status_code`` 404
e a msg ``Artigo não encontrado``
"""
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3='unknown-article',
lang='pt'))
self.assertStatus(response, 404)
self.assertIn('Artigo não encontrado', response.data.decode('utf-8'))
def test_article_detail_v3_with_journal_attrib_is_public_false(self):
"""
Teste da ``view function`` ``article_detail_v3`` acessando um artigo
com atributo is_public=True, porém com um periódico com atributo
is_public=False deve retorna uma página com ``status_code`` 404 e msg
cadastrada no atributo ``reason`` do periódico.
"""
unpublish_reason = 'Revista removida da coleção'
journal = utils.makeOneJournal({
'is_public': False,
'unpublish_reason': unpublish_reason})
issue = utils.makeOneIssue({
'is_public': True,
'journal': journal})
article = utils.makeOneArticle({
'issue': issue,
'journal': journal})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
def test_article_detail_with_issue_attrib_is_public_false(self):
"""
Teste da ``view function`` ``article_detail_v3`` acessando um artigo
com atributo is_public=False, porém com um periódico com atributo
is_public=True deve retorna uma página com ``status_code`` 404 e msg
cadastrada no atributo ``reason`` do número.
"""
unpublish_reason = 'Facículo rejeitado'
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({
'is_public': False,
'unpublish_reason': unpublish_reason,
'journal': journal.id})
article = utils.makeOneArticle({
'issue': issue.id,
'journal': journal.id})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
def test_article_detail_with_article_attrib_is_public_false(self):
"""
Teste da ``view function`` ``article_detail_v3`` acessando um artigo
com atributo is_public=False, deve retorna uma página com
``status_code`` 404 e msg cadastrada no atributo ``reason`` do artigo.
"""
unpublish_reason = 'Artigo com problemas de licença'
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal.id})
article = utils.makeOneArticle({
'is_public': False,
'unpublish_reason': unpublish_reason,
'issue': issue,
'journal': journal})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='pt'))
self.assertStatus(response, 404)
def test_pdf_url(self):
"""
Testa se as URLs para os PDFs estão sendo montados com seus respectivos idiomas.
Exemplo de URL para o PDF: ``/pdf/ssp/2001.v78/e937749/en``
"""
with current_app.app_context():
journal = utils.makeOneJournal({'print_issn': '0000-0000', 'acronym': 'cta'},)
issue = utils.makeOneIssue({
'journal': journal.id,
'label': 'v39s2',
'year': '2009',
'volume': '39',
'number': '1',
'suppl_text': '',
})
article = utils.makeOneArticle({
'journal': journal.id,
'issue': issue.id,
'elocation': 'e1',
'original_language': 'pt',
'languages': ["es", "en"],
'pdfs': [
{
'lang': 'en',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aen.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-en.pdf',
'type': 'pdf'
},
{
'lang': 'pt',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651apt.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-pt.pdf',
'type': 'pdf'
},
{
'lang': 'es',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aes.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-es.pdf',
'type': 'pdf'
}
]
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang='en'), follow_redirects=False)
self.assertStatus(response, 200)
self.assertTemplateUsed('article/detail.html')
content = response.data.decode('utf-8')
#TODO: Há maneira melhor de executar estas asserções?
self.assertTrue(
'/j/cta/a/%s/?lang=en&format=pdf' % article.aid in content or '/j/cta/a/%s/?format=pdf&lang=en' % article.aid in content
)
self.assertTrue(
'/j/cta/a/%s/?lang=pt&format=pdf' % article.aid in content or '/j/cta/a/%s/?format=pdf&lang=pt' % article.aid in content
)
self.assertTrue(
'/j/cta/a/%s/?lang=es&format=pdf' % article.aid in content or '/j/cta/a/%s/?format=pdf&lang=es' % article.aid in content
)
def test_pdf_url_redirects_to_original_language(self):
"""
Testa se as URLs para os PDFs estão sendo montados com seus respectivos idiomas.
Exemplo de URL para o PDF: ``/pdf/ssp/2001.v78/e937749/en``
"""
with current_app.app_context():
journal = utils.makeOneJournal({'print_issn': '0000-0000', 'acronym': 'cta'},)
issue = utils.makeOneIssue({
'journal': journal.id,
'label': 'v39s2',
'year': '2009',
'volume': '39',
'number': '1',
'suppl_text': '',
})
article = utils.makeOneArticle({
'journal': journal.id,
'issue': issue.id,
'elocation': 'e1',
'pdfs': [
{
'lang': 'en',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aen.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-en.pdf',
'type': 'pdf'
},
{
'lang': 'pt',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651apt.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-pt.pdf',
'type': 'pdf'
},
{
'lang': 'es',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aes.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-es.pdf',
'type': 'pdf'
}
]
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
format='pdf',
lang='ru'), follow_redirects=False)
self.assertStatus(response, 301)
@patch("webapp.main.views.fetch_data")
def test_xml_url_redirect_to_xml_with_original_language(self, mk_fetch_data):
"""
Testa se as URLs para os XMLs estão sendo montados com o idioma original do artigo,
quando existir o param ``lang``.
Formato da URL para o teste: ``/j/<acron>/a/<article_pid_v3>/?format=xml&lang=<lang>``
"""
test_xml_path = pathlib.Path("opac/tests/fixtures/document.xml")
mk_fetch_data.return_value = test_xml_path.read_bytes()
with current_app.app_context():
journal = utils.makeOneJournal({'print_issn': '0000-0000', 'acronym': 'cta'},)
issue = utils.makeOneIssue({
'journal': journal.id,
'label': 'v39s2',
'year': '2009',
'volume': '39',
'number': '1',
'suppl_text': '',
})
article = utils.makeOneArticle({
'journal': journal.id,
'issue': issue.id,
'elocation': 'e1',
'original_language': 'pt',
'languages': ["es", "en", "pt"],
'xml': "https://kernel:6543/documents/kSiec9encE0f2dp"
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
format='xml',
lang="pt"))
self.assertStatus(response, 200)
self.assertEqual(test_xml_path.read_bytes(), response.data)
@patch("webapp.main.views.fetch_data")
def test_xml_ok(self, mk_fetch_data):
"""
Testa se retorna XML para ``format=xml``.
Formato da URL para o teste: ``/j/<acron>/a/<article_pid_v3>/?format=xml``
"""
test_xml_path = pathlib.Path("opac/tests/fixtures/document.xml")
mk_fetch_data.return_value = test_xml_path.read_bytes()
with current_app.app_context():
journal = utils.makeOneJournal({'print_issn': '0000-0000', 'acronym': 'cta'},)
issue = utils.makeOneIssue({
'journal': journal.id,
'label': 'v39s2',
'year': '2009',
'volume': '39',
'number': '1',
'suppl_text': '',
})
article = utils.makeOneArticle({
'journal': journal.id,
'issue': issue.id,
'elocation': 'e1',
'original_language': 'pt',
'languages': ["es", "en"],
'xml': "https://kernel:6543/documents/kSiec9encE0f2dp"
})
response = self.client.get(url_for('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
format='xml'))
self.assertStatus(response, 200)
self.assertEqual(test_xml_path.read_bytes(), response.data)
@patch("webapp.main.views.render_html")
def test_when_render_html_raises_a_non_retryable_error_it_should_return_a_status_code_404(
self, mk_render_html
):
mk_render_html.side_effect = NonRetryableError
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({"journal": journal})
article = utils.makeOneArticle(
{
"title": "A",
"original_language": "en",
"issue": issue,
"journal": journal,
"url_segment": "10",
}
)
response = self.client.get(
url_for(
"main.article_detail_v3",
url_seg=journal.url_segment,
article_pid_v3=article.aid,
)
)
self.assertStatus(response, 404)
@patch("webapp.main.views.render_html")
def test_when_render_html_raises_a_retryable_error_the_article_detail_v3_should_return_a_status_code_500(
self, mk_render_html
):
mk_render_html.side_effect = RetryableError
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({"journal": journal})
article = utils.makeOneArticle(
{
"title": "A",
"original_language": "en",
"issue": issue,
"journal": journal,
"url_segment": "10",
}
)
response = self.client.get(
url_for(
"main.article_detail_v3",
url_seg=journal.url_segment,
article_pid_v3=article.aid,
)
)
self.assertStatus(response, 500)
@patch("webapp.main.views.fetch_data")
def test_when_fetch_data_raises_a_retryable_error_the_article_detail_v3_should_return_a_500_status_code(
self, mk_fetch_data
):
mk_fetch_data.side_effect = RetryableError
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({"journal": journal})
article = utils.makeOneArticle(
{
"title": "A",
"original_language": "en",
"issue": issue,
"journal": journal,
"url_segment": "10",
"pdfs": [
{
"lang": "en",
"url": "http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aen.pdf",
"file_path": "/pdf/cta/v39s2/0101-2061-cta-fst30618-en.pdf",
"type": "pdf"
},
]
}
)
response = self.client.get(
url_for(
"main.article_detail_v3",
url_seg=journal.url_segment,
article_pid_v3=article.aid,
format="pdf",
lang="en",
),
follow_redirects=False
)
self.assertStatus(response, 500)
# HOMEPAGE
def test_collection_sponsors_at_homepage(self):
"""
acessar na homepage deve mostrar os sponsors no rodapé
"""
# with
with current_app.app_context():
collection = utils.makeOneCollection()
sponsor1 = utils.makeOneSponsor(
{
'order': 0,
'name': 'spo1',
'url': 'http://sponsor1.com',
'logo_url': 'http://sponsor1.com/logo1.png'
}
)
sponsor2 = utils.makeOneSponsor(
{
'order': 1,
'name': 'spo2',
'url': 'http://sponsor2.com',
'logo_url': 'http://sponsor2.com/logo1.png'
}
)
sponsor3 = utils.makeOneSponsor(
{
'order': 2,
'name': 'spo3',
'url': 'http://sponsor2.com',
'logo_url': 'http://sponsor2.com/logo1.png'
}
)
collection.sponsors = [
sponsor1,
sponsor2,
sponsor3,
]
collection.save()
# when
response = self.client.get(url_for('main.index'))
# then
self.assertStatus(response, 200)
self.assertIn('<div class="partners">', response.data.decode('utf-8'))
self.assertIn('"/about/"', response.data.decode('utf-8'))
self.assertNotIn(
'/collection/about/', response.data.decode('utf-8'))
for sponsor in [sponsor1, sponsor2, sponsor3]:
self.assertIn(sponsor.name, response.data.decode('utf-8'))
self.assertIn(sponsor.url, response.data.decode('utf-8'))
self.assertIn(sponsor.logo_url, response.data.decode('utf-8'))
def test_collection_address_at_homepage_footer(self):
"""
acessar na homepage deve mostrar o endereço da coleção
"""
# with
with current_app.app_context():
collection_data = {
'address1': 'foo address',
'address2': 'foo address',
}
collection = utils.makeOneCollection(attrib=collection_data)
# when
response = self.client.get(url_for('main.index'))
# then
self.assertStatus(response, 200)
self.assertIn(collection['address1'], response.data.decode('utf-8'))
self.assertIn(collection['address2'], response.data.decode('utf-8'))
def test_collection_address_at_about_page_footer(self):
"""
acessar na pagina Sobre o SciELO deve mostrar o endereço da coleção
"""
# with
with current_app.app_context():
collection_data = {
'address1': 'foo address',
'address2': 'foo address',
}
collection = utils.makeOneCollection(attrib=collection_data)
# when
response = self.client.get(url_for('main.about_collection'))
# then
self.assertStatus(response, 200)
self.assertIn(collection['address1'], response.data.decode('utf-8'))
self.assertIn(collection['address2'], response.data.decode('utf-8'))
def test_collection_address_at_journal_list_page_footer(self):
"""
acessar na pagina Alfabética deve mostrar o endereço da coleção
"""
# with
with current_app.app_context():
collection_data = {
'address1': 'foo address',
'address2': 'foo address',
}
collection = utils.makeOneCollection(attrib=collection_data)
# when
response = self.client.get(url_for('main.collection_list'))
# then
self.assertStatus(response, 200)
self.assertIn(collection['address1'], response.data.decode('utf-8'))
self.assertIn(collection['address2'], response.data.decode('utf-8'))
def test_home_page_last_issues(self):
"""
Teste da página inicial, deve retorna utf-8 como conjunto de caracter e
o template ``collection/index.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
issues = [
{'volume': '2', 'number': '5B', 'year': '2011'},
{'volume': '12', 'suppl_text': 'suppl', 'year': '2015'},
{'volume': '23', 'year': '2016'},
{'number': '43', 'year': '2017'},
]
journals = utils.makeAnyJournal(items=len(issues))
for journal, _issue in zip(journals, issues):
_issue.update({'journal': journal})
journal.last_issue = utils.makeOneIssue(_issue)
for journal, expected_issue in zip(journals, issues):
context = {
'journal': journal
}
response_data = render_template(
"news/includes/issue_last_row.html",
**context)
self.assertIn(
'Ano: </strong><b>{}'.format(
expected_issue.get('year')),
response_data)
fields = ['volume', 'number', 'suppl_text']
labels = ['Volume', 'Número', 'Suplemento']
for label, field in zip(labels, fields):
value = expected_issue.get(field)
if value is None:
assert_function = self.assertNotIn
else:
assert_function = self.assertIn
assert_function(
'{}: </strong><b>{}'.format(label, value),
response_data)
def test_get_robots_txt_file(self):
"""
Teste de acesso ao arquivo robots.txt.
"""
with current_app.app_context():
utils.makeOneCollection()
with self.client as c:
response = c.get('/robots.txt')
self.assertStatus(response, 200)
self.assertIn('User-agent: *', response.data.decode('utf-8'))
self.assertIn('Disallow: /', response.data.decode('utf-8'))
class PageTestCase(BaseTestCase):
def test_pages_list(self):
"""
Teste para avaliar o retorno da ``view function`` pages,
ao cadastrar 3 páginas a interface deve retornar uma listagem
contendo elementos esperados e também deve retornar o template
``collection/about.html``.
"""
utils.makeOneCollection()
pages = [
utils.makeOnePage({'name': 'Criterios SciELO',
'language': 'es_ES'}),
utils.makeOnePage({'name': 'Critérios SciELO',
'language': 'pt_BR'}),
utils.makeOnePage({'name': 'FAQ SciELO',
'language': 'pt_BR'}),
utils.makeOnePage({'name': 'Equipe SciELO',
'language': 'pt_BR'})
]
response = self.client.get(url_for('main.about_collection'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/about.html')
for page in pages:
if page.language == 'pt_BR':
self.assertIn(
'/about/%s' % (page.slug_name),
response.data.decode('utf-8'))
self.assertListEqual(
sorted([page.slug_name for page in pages[1:]]),
sorted(
[page.slug_name
for page in self.get_context_variable('pages')]))
def test_page(self):
"""
Teste da ``view function`` ``page``, deve retornar uma página
que usa o template ``collection/about.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
page = utils.makeOnePage({'name': 'Critérios SciELO',
'language': 'pt_BR'})
response = self.client.get(url_for('main.about_collection',
slug_name=page.slug_name))
self.assertEqual(200, response.status_code)
self.assertTemplateUsed('collection/about.html')
self.assertIn('Critérios SciELO', response.data.decode('utf-8'))
self.assertIn('"/about/"', response.data.decode('utf-8'))
self.assertEqual(
self.get_context_variable('page').slug_name, page.slug_name)
def test_page_with_unknown_name(self):
"""
Teste da ``view function`` ``page`` com um id desconhecido
deve retornar uma página com ``status_code`` 404 e msg
``Página não encontrada``.
"""
with current_app.app_context():
utils.makeOneCollection()
unknown_page_name = 'xxjfsfadfa0k2qhs8slwnui8'
response = self.client.get(url_for('main.about_collection',
slug_name=unknown_page_name))
self.assertStatus(response, 404)
class TestJournaDetail(BaseTestCase):
# JOURNAL
def test_journal_detail(self):
"""
Teste da ``view function`` ``journal_detail``, deve retornar uma página
que usa o template ``journal/detail.html`` e o título do periódico no
corpo da página.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Revista X'})
response = self.client.get(url_for('main.journal_detail',
url_seg=journal.url_segment))
self.assertTrue(200, response.status_code)
self.assertTemplateUsed('journal/detail.html')
self.assertIn('Revista X',
response.data.decode('utf-8'))
self.assertEqual(self.get_context_variable('journal').id, journal.id)
def test_journal_detail_legacy_url(self):
"""
Teste da ``view function`` ``journal_detail_legacy_url``, deve retorna status_code 301
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Revista X'})
response = self.client.get("/journal/acron")
self.assertTrue(301, response.status_code)
def test_journal_detail_url_journal_acron(self):
"""
Teste da ``view function`` ``journal_detail_url_journal_acron``, deve retorna status_code 301
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Revista X'})
response = self.client.get("/acron")
self.assertTrue(301, response.status_code)
def test_journal_detail_legacy_url_follow_redirect(self):
"""
Teste da ``view function`` ``journal_detail_legacy_url``, deve retornar uma página
que usa o template ``journal/detail.html`` e o título do periódico no
corpo da página.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Revista X'})
response = self.client.get(
url_for(
'main.journal_detail_legacy_url', journal_seg=journal.url_segment),
follow_redirects=True)
self.assertTrue(200, response.status_code)
self.assertTemplateUsed('journal/detail.html')
self.assertIn('Revista X',
response.data.decode('utf-8'))
self.assertEqual(self.get_context_variable('journal').id, journal.id)
def test_journal_detail_with_unknow_id(self):
"""
Teste da ``view function`` ``journal_detail`` com um id desconhecido
deve retornar uma página com ``status_code`` 404 e msg
``Periódico não encontrado``.
"""
utils.makeAnyJournal(items=6)
unknow_url_seg = '0k2qhs8slwnui8'
response = self.client.get(url_for('main.journal_detail',
url_seg=unknow_url_seg))
self.assertStatus(response, 404)
self.assertIn('Periódico não encontrado',
response.data.decode('utf-8'))
def test_journal_detail_with_attrib_is_public_false(self):
"""
Teste da ``view function`` ``journal_detail`` acessando um periódico
com atributo is_public=False, deve retorna uma página com ``status_code``
404 e msg cadastrada no atributo ``reason``.
"""
unpublish_reason = 'plágio'
journal = utils.makeOneJournal({
'is_public': False,
'unpublish_reason': unpublish_reason})
response = self.client.get(url_for('main.journal_detail',
url_seg=journal.url_segment))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
class TestJournalGrid(BaseTestCase):
def test_issue_grid(self):
"""
Teste da ``view function`` ``issue_grid`` acessando a grade de números
de um periódico, nesse teste deve ser retornado todos os números com
o atributo is_public=True de um número, sendo que o template deve ser
``issue/grid.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issues = utils.makeAnyIssue(attrib={'journal': journal.id})
response = self.client.get(url_for('main.issue_grid',
url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/grid.html')
for issue in issues:
self.assertIn('/journal_acron', response.data.decode('utf-8'))
def test_issue_grid_without_issues(self):
"""
Teste para avaliar o retorno da ``view function`` ``issue_grid``
quando não existe número cadastrado deve retornar ``status_code`` 200
e a msg ``Nenhum número encontrado para esse perióico``
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
response = self.client.get(
url_for('main.issue_grid', url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/grid.html')
self.assertIn('Nenhum número encontrado para esse periódico',
response.data.decode('utf-8'))
def test_issue_grid_with_unknow_journal_id(self):
"""
Teste para avaliar o retorno da ``view function`` ``issue_grid``
quando é acessado utilizando um identificador do periódico desconhecido,
deve retornar status_code 404 com a msg ```Periódico não encontrado``.
"""
journal = utils.makeOneJournal()
utils.makeAnyIssue(attrib={'journal': journal.id})
unknow_url_seg = '9km2g78o2mnu7'
response = self.client.get(
url_for('main.issue_grid', url_seg=unknow_url_seg))
self.assertStatus(response, 404)
self.assertIn('Periódico não encontrado',
response.data.decode('utf-8'))
def test_issue_grid_with_attrib_is_public_false(self):
"""
Teste da ``view function`` ``issue_grid`` acessando um periódico
com atributo is_public=False, deve retorna uma página com ``status_code``
404 e msg cadastrada no atributo ``reason``.
"""
unpublish_reason = 'Problema de Direito Autoral'
journal = utils.makeOneJournal({'is_public': False,
'unpublish_reason': unpublish_reason})
response = self.client.get(url_for('main.issue_grid',
url_seg=journal.url_segment))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
def test_issue_grid_legacy_redirects(self):
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issues = utils.makeAnyIssue(attrib={'journal': journal.id})
response = self.client.get('/grid/{}'.format(journal.url_segment))
self.assertStatus(response, 301)
def test_issue_grid_social_meta_tags(self):
"""
Teste para verificar a página da grade do periódico apresenta as
tags de compartilhamento com redes sociais.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Social Meta tags'})
response = self.client.get(
url_for('main.issue_grid', url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/grid.html')
self.assertIn('<meta property="og:url" content="http://0.0.0.0:8000/j/journal_acron/grid" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:type" content="website" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:title" content="Social Meta tags" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:description" content="Esse periódico tem com objetivo xpto" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:image" content="http://0.0.0.0:8000/None" />', response.data.decode('utf-8'))
class TestIssueToc(BaseTestCase):
def test_issue_toc(self):
"""
Teste da ``view function`` ``issue_toc`` acessando a página do número,
deve retorna status_code 200 e o template ``issue/toc.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'number': '31',
'volume': '10',
'journal': journal})
response = self.client.get(url_for('main.issue_toc',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/toc.html')
# self.assertIn(u'Vol. 10 No. 31', response.data.decode('utf-8'))
self.assertEqual(self.get_context_variable('issue').id, issue.id)
def test_issue_toc_unknow_issue_id(self):
"""
Teste para avaliar o retorno da ``view function`` ``issue_toc``
quando é acessado utilizando um identificador do issue desconhecido,
deve retorna status_code 404 com a msg ``Número não encontrado``.
"""
journal = utils.makeOneJournal()
utils.makeOneIssue({'journal': journal})
unknow_url_seg = '2014.v3n2'
unknow_url = url_for(
'main.issue_toc',
url_seg=journal.url_segment,
url_seg_issue=unknow_url_seg)
response = self.client.get(unknow_url)
self.assertStatus(response, 404)
self.assertIn('Número não encontrado', response.data.decode('utf-8'))
def test_issue_toc_with_attrib_is_public_false(self):
"""
Teste da ``view function`` ``issue_toc`` acessando um número
com atributo is_public=False, deve retorna uma página com ``status_code``
404 e msg cadastrada no atributo ``reason``.
"""
unpublish_reason = 'Número incorreto'
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({
'is_public': False,
'unpublish_reason': unpublish_reason,
'journal': journal})
response = self.client.get(url_for('main.issue_toc',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
def test_issue_toc_with_journal_attrib_is_public_false(self):
"""
Teste da ``view function`` ``issue_toc`` acessando um número
com atributo is_public=True, porém com um periódico com atributo
is_public=False deve retorna uma página com ``status_code`` 404 e msg
cadastrada no atributo ``reason`` do periódico.
"""
unpublish_reason = 'Revista removida da coleção'
journal = utils.makeOneJournal({
'is_public': False,
'unpublish_reason': unpublish_reason})
issue = utils.makeOneIssue({
'is_public': True,
'journal': journal.id})
response = self.client.get(url_for('main.issue_toc',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 404)
self.assertIn(unpublish_reason, response.data.decode('utf-8'))
def test_issue_toc_legacy_redirects_to_issue_toc(self):
"""
Teste da ``view function`` ``issue_toc`` acessando a página do número,
deve retorna status_code 200 e o template ``issue/toc.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'number': '31',
'volume': '10',
'journal': journal})
response = self.client.get(url_for('main.issue_toc_legacy',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 301)
def test_issue_toc_legacy_redirects_to_aop_toc(self):
"""
Teste da ``view function`` ``issue_toc`` acessando a página do número,
deve retorna status_code 200 e o template ``issue/toc.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'number': 'ahead',
'type': 'ahead',
'journal': journal})
response = self.client.get(url_for('main.issue_toc_legacy',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 301)
self.assertRedirects(
response,
url_for(
'main.aop_toc',
url_seg=journal.url_segment
),
)
def test_issue_toc_redirects_to_aop_toc(self):
"""
Teste da ``view function`` ``issue_toc`` acessando a página do número,
deve retorna status_code 200 e o template ``issue/toc.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'number': 'ahead',
'type': 'ahead',
'journal': journal})
response = self.client.get(url_for('main.issue_toc',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 301)
self.assertRedirects(
response,
url_for(
'main.aop_toc',
url_seg=journal.url_segment
),
)
def test_issue_toc_social_meta_tags(self):
"""
Teste para verificar a página da TOC do periódico apresenta as
tags de compartilhamento com redes sociais.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'title': 'Social Meta tags'})
issue = utils.makeOneIssue({'number': '31',
'volume': '10',
'journal': journal})
response = self.client.get(url_for('main.issue_toc',
url_seg=journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/toc.html')
self.assertIn(
'<meta property="og:url" content="http://0.0.0.0:8000/j/journal_acron/i/2021.v10n31supplX/" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:type" content="website" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:title" content="Social Meta tags" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:description" content="Esse periódico tem com objetivo xpto" />', response.data.decode('utf-8'))
self.assertIn('<meta property="og:image" content="http://0.0.0.0:8000/None" />', response.data.decode('utf-8'))
class TestAOPToc(BaseTestCase):
def test_aop_toc_returns_one_aop_with_one_article(self):
"""
Teste da ``view function`` ``aop_toc`` acessando a página do número,
deve retornar status_code 200 e o template ``issue/toc.html``.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue(
{
'number': 'ahead',
'journal': journal,
'type': 'ahead',
}
)
article = utils.makeOneArticle(
{
'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt'],
'translated_titles': [
{'language': 'es', 'name': u'Artículo en español'},
{'language': 'pt', 'name': u'Artigo en Português'},
],
'issue': issue,
'journal': journal,
'url_segment': 'ahead'
}
)
url = url_for('main.aop_toc', url_seg=journal.url_segment)
response = self.client.get(url)
self.assertStatus(response, 200)
self.assertEqual(
len(self.get_context_variable('articles')),
1
)
self.assertTemplateUsed('issue/toc.html')
def test_aop_toc_returns_not_found_because_of_there_is_no_aop(self):
"""
Teste da ``view function`` ``aop_toc`` acessando a página do número,
deve retornar status_code 404.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
url = url_for('main.aop_toc', url_seg=journal.url_segment)
response = self.client.get(url)
self.assertStatus(response, 404)
def test_aop_toc_returns_not_found_because_of_not_published_articles(self):
"""
Teste da ``view function`` ``aop_toc`` acessando a página do número,
deve retornar status_code 404.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue(
{
'number': 'ahead',
'journal': journal,
'type': 'ahead',
}
)
article = utils.makeOneArticle(
{
'issue': issue,
'journal': journal,
'is_public': False,
}
)
url = url_for('main.aop_toc', url_seg=journal.url_segment)
response = self.client.get(url)
self.assertStatus(response, 404)
def test_aop_toc_returns_not_found_because_of_not_published_aop(self):
"""
Teste da ``view function`` ``aop_toc`` acessando a página do número,
deve retornar status_code 404.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue(
{
'number': 'ahead',
'journal': journal,
'type': 'ahead',
'is_public': False,
}
)
url = url_for('main.aop_toc', url_seg=journal.url_segment)
response = self.client.get(url)
self.assertStatus(response, 404)
def test_aop_toc_returns_not_found_because_of_aop_has_no_article(self):
"""
Teste da ``view function`` ``aop_toc`` acessando a página do número,
deve retornar status_code 404.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue(
{
'number': 'ahead',
'journal': journal,
'type': 'ahead',
}
)
url = url_for('main.aop_toc', url_seg=journal.url_segment)
response = self.client.get(url)
self.assertStatus(response, 404)
def test_aop_toc_returns_not_found_because_of_journal_is_not_public(self):
"""
Teste da ``view function`` ``aop_toc`` acessando a página do número,
deve retornar status_code 404.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal({'is_public': False})
issue = utils.makeOneIssue(
{
'number': 'ahead',
'journal': journal,
'type': 'ahead',
}
)
url = url_for('main.aop_toc', url_seg=journal.url_segment)
response = self.client.get(url)
self.assertStatus(response, 404)
class TestArticleDetailV3Meta(BaseTestCase):
def test_article_detail_v3_creates_meta_citation_pdf_url_only_for_the_selected_lang(self):
"""
Teste se ``view function`` ``article_detail_v3``,
cria a tag meta cujo name="citation_pdf_url" e conteúdo do endereço do
pdf no padrão
https://website/j/acron/a/pidv3/?format=pdf&lang=idioma_selecionado
`<meta name="citation_pdf_url"
content="https://website/j/acron/a/pidv3/?format=pdf&lang=idioma_selecionado"/>`
Verifica na view se o valor da variável FORCE_USE_HTTPS_GOOGLE_TAGS é True ou False,
no caso de True monta a URL para o PDF sempre com protocolo https, em caso de False
monta a URL com o ``scheme`` obtido pelo urlparsed.scheme.
FORCE_USE_HTTPS_GOOGLE_TAGS is False in testting.template
"""
with current_app.test_request_context() as context:
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({
'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt', 'en'],
'pdfs': [{
'lang': 'en',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aen.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-en.pdf',
'type': 'pdf'
},
{
'lang': 'pt',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651apt.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-pt.pdf',
'type': 'pdf'
},
{
'lang': 'es',
'url': 'http://minio:9000/documentstore/1678-457X/JDH74Jr4SyDVpnkMyrqkDhF/e5e09c7d5e4e5052868372df837de4e1ee9d651aes.pdf',
'file_path': '/pdf/cta/v39s2/0101-2061-cta-fst30618-es.pdf',
'type': 'pdf'
}
],
'issue': issue,
'journal': journal,
'url_segment': '10-11'
})
response = self.client.get(
url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang="es"
)
)
self.assertStatus(response, 200)
content = response.data.decode('utf-8')
soup = BeautifulSoup(content, 'html.parser')
meta_tags = soup.find_all(attrs={"name": "citation_pdf_url"})
self.assertEqual(len(meta_tags), 1)
content_url = urlparse(meta_tags[0].get("content"))
self.assertEqual(
"{}://{}/".format(content_url.scheme,
content_url.netloc),
context.request.url_root
)
self.assertEqual(
content_url.path, "/j/journal_acron/a/{}/".format(article.aid)
)
self.assertEqual(
parse_qs(content_url.query), {'format': ['pdf'], 'lang': ['es']}
)
def test_article_detail_v3_creates_meta_citation_xml_url(self):
"""
Teste se ``view function`` ``article_detail_v3``,
cria a tag meta cujo name="citation_xml_url" e conteúdo do endereço do
pdf no padrão
https://website/j/acron/a/pidv3/?format=xml
`<meta name="citation_xml_url"
content="https://website/j/acron/a/pidv3/?format=xml"/>`
Verifica na view se o valor da variável FORCE_USE_HTTPS_GOOGLE_TAGS é True ou False,
no caso de True monta a URL para o XML sempre com protocolo https, em caso de False
monta a URL com o ``scheme`` obtido pelo urlparsed.scheme.
FORCE_USE_HTTPS_GOOGLE_TAGS is False in testting.template
"""
with current_app.test_request_context() as context:
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({
'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt', 'en'],
'issue': issue,
'journal': journal,
'url_segment': '10-11'
})
response = self.client.get(
url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
)
)
self.assertStatus(response, 200)
content = response.data.decode('utf-8')
soup = BeautifulSoup(content, 'html.parser')
meta_tags = soup.find_all(attrs={"name": "citation_xml_url"})
self.assertEqual(len(meta_tags), 1)
content_url = urlparse(meta_tags[0].get("content"))
self.assertEqual(
"{}://{}/".format(content_url.scheme, content_url.netloc),
context.request.url_root
)
self.assertEqual(
content_url.path, "/j/journal_acron/a/{}/".format(article.aid)
)
self.assertEqual(
parse_qs(content_url.query), {'format': ['xml'], 'lang': ['en']}
)
def test_article_detail_v3_social_meta_tags(self):
"""
Teste para verificar a página do artigo apresenta as tags de compartilhamento
com redes sociais.
"""
with current_app.test_request_context() as context:
utils.makeOneCollection({ 'acronym': "DUMMY_TEST2" })
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({
'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt', 'en'],
'issue': issue,
'journal': journal,
'url_segment': '10-11'
})
response = self.client.get(
url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
)
)
self.assertStatus(response, 200)
content = response.data.decode('utf-8')
self.assertIn(
'<meta property="og:url" content="http://0.0.0.0:8000/j/journal_acron/a/%s/"/>' % article.aid, response.data.decode('utf-8'))
self.assertIn('<meta property="og:type" content="article"/>', response.data.decode('utf-8'))
self.assertIn('<meta property="og:title" content="%s"/>' % article.title, response.data.decode('utf-8'))
self.assertIn('<meta property="og:description" content="%s"/>' % article.abstract, response.data.decode('utf-8'))
self.assertIn('<meta property="og:image" content="http://0.0.0.0:8000/None"/>', response.data.decode('utf-8'))
def test_article_detail_v3_citation_author_tags(self):
"""
Teste para verificar a página do artigo apresenta as tags author com
afiliação e ORCID.
"""
with current_app.test_request_context() as context:
utils.makeOneCollection({ 'acronym': "DUMMY_TEST2" })
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
article = utils.makeOneArticle({
'title': 'Article Y',
'original_language': 'en',
'languages': ['es', 'pt', 'en'],
'issue': issue,
'journal': journal,
'url_segment': '10-11',
'authors_meta': [
{
"name" : "Arias, Sarah Muñoz",
"affiliation" : "Universidad Tecnológica de Pereira",
"orcid" : "0000-0002-3430-5422"
},
{
"name" : "Álvarez, Gloria Edith Guerrero",
"affiliation" : "Universidad Tecnológica de Pereira",
"orcid" : "0000-0002-0529-5835"
},
{
"name" : "Patiño, Paula Andrea González",
"affiliation" : "Universidad Tecnológica de Pereira",
"orcid" : "0000-0002-7323-9261"
}
]
})
response = self.client.get(
url_for(
'main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
)
)
self.assertStatus(response, 200)
content = response.data.decode('utf-8')
self.assertIn(
'<meta name="citation_author" content="Arias, Sarah Muñoz">', content)
self.assertIn('<meta name="citation_author_affiliation" content="Universidad Tecnológica de Pereira">', content)
self.assertIn('<meta name="citation_author_orcid" content="http://orcid.org/0000-0002-3430-5422">', content)
self.assertIn('<meta name="citation_author" content="Álvarez, Gloria Edith Guerrero">', content)
self.assertIn('<meta name="citation_author_affiliation" content="Universidad Tecnológica de Pereira">', content)
self.assertIn('<meta name="citation_author_orcid" content="http://orcid.org/0000-0002-0529-5835">', content)
self.assertIn('<meta name="citation_author" content="Patiño, Paula Andrea González">', content)
self.assertIn('<meta name="citation_author_affiliation" content="Universidad Tecnológica de Pereira">', content)
self.assertIn('<meta name="citation_author_orcid" content="http://orcid.org/0000-0002-7323-9261">', content)
| 39.959565
| 150
| 0.528256
| 9,938
| 102,776
| 5.284061
| 0.064601
| 0.02148
| 0.017329
| 0.032792
| 0.887952
| 0.868223
| 0.844153
| 0.829852
| 0.801268
| 0.78274
| 0
| 0.030757
| 0.362234
| 102,776
| 2,571
| 151
| 39.975107
| 0.770394
| 0.133484
| 0
| 0.708509
| 0
| 0.005432
| 0.171141
| 0.037808
| 0
| 0
| 0
| 0.001167
| 0.162945
| 1
| 0.054315
| false
| 0
| 0.008449
| 0
| 0.066989
| 0.002414
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91d48fe9d67b08039a567e9b5048948d7dc6397c
| 17,873
|
py
|
Python
|
src/diamond/handler/test/testtsdb.py
|
harrisonfeng/Diamond
|
f2bece462577a7c557be8a9f90f6b9340c3db571
|
[
"MIT"
] | 1
|
2020-08-20T09:37:23.000Z
|
2020-08-20T09:37:23.000Z
|
src/diamond/handler/test/testtsdb.py
|
harrisonfeng/Diamond
|
f2bece462577a7c557be8a9f90f6b9340c3db571
|
[
"MIT"
] | null | null | null |
src/diamond/handler/test/testtsdb.py
|
harrisonfeng/Diamond
|
f2bece462577a7c557be8a9f90f6b9340c3db571
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# coding=utf-8
##########################################################################
from test import unittest
from mock import patch, Mock
from diamond.metric import Metric
import urllib2
import configobj
import StringIO
import gzip
import contextlib
from diamond.handler.tsdb import TSDBHandler
@patch('diamond.handler.tsdb.urllib2.urlopen')
@patch('diamond.handler.tsdb.urllib2.Request')
class TestTSDBdHandler(unittest.TestCase):
def setUp(self):
self.url = 'http://127.0.0.1:4242/api/put'
def decompress(self, input):
infile = StringIO.StringIO()
infile.write(input)
with contextlib.closing(gzip.GzipFile(fileobj=infile, mode="r")) as f:
f.rewind()
out = f.read()
return out
def test_HTTPError(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
header = {'Content-Type': 'application/json'}
exception = urllib2.HTTPError(url=self.url, code=404, msg="Error",
hdrs=header, fp=None)
handler.side_effect = exception
handler.process(metric)
def test_single_metric(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_compression(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['compression'] = 1
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}]')
passed_headers = mock_urlopen.call_args[0][2]
passed_body = mock_urlopen.call_args[0][1]
assert passed_headers['Content-Encoding'] == 'gzip'
assert passed_headers['Content-Type'] == 'application/json'
assert self.decompress(passed_body) == body
def test_user_password(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['user'] = 'John Doe'
config['password'] = '123456789'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json',
'Authorization': 'Basic Sm9obiBEb2U6MTIzNDU2Nzg5'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_batch(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['batch'] = 2
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
metric2 = Metric('servers.myhostname.cpu.cpu_time',
123, raw_value=456, timestamp=5678910,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
handler.process(metric2)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname"}}, {"timestamp": 567891'
'0, "metric": "cpu.cpu_time", "value": 123, "tags": {"hostname"'
': "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_tags(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = 'tag1=tagv1 tag2=tagv2'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu_count", "value": '
'123, "tags": {"hostname": "myhostname", "tag1": "tagv1", '
'"tag2": "tagv2"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_prefix(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['prefix'] = 'diamond'
metric = Metric('servers.myhostname.cpu.cpu_count',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "diamond.cpu.cpu_count", '
'"value": 123, "tags": {"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_default(self, mock_urlopen, mock_request):
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.cpu.cpu0.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.user", "value": '
'123, "tags": {"cpuId": "cpu0", "myFirstTag": "myValue", '
'"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_0(self, mock_urlopen, mock_request):
"""
deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.cpu.cpu0.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.cpu0.user", "value": '
'123, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_default(self, mock_urlopen, mock_request):
"""
aggregate default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.cpu.total.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
assert not mock_urlopen.called, "should not process"
def test_cpu_metrics_taghandling_1(self, mock_urlopen, mock_request):
"""
aggregate deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.cpu.total.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.total.user", "value": '
'123, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_cpu_metrics_taghandling_2(self, mock_urlopen, mock_request):
"""
aggregate deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = True
config['skipAggregates'] = False
metric = Metric('servers.myhostname.cpu.total.user',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "cpu.user", "value": '
'123, "tags": {"cpuId": "total", "myFirstTag": "myValue", '
'"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_haproxy_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.haproxy.SOME-BACKEND.SOME-SERVER.'
'bin',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "haproxy.bin",'
' "value": 123, "tags": {"backend": "SOME-BACKEND",'
' "myFirstTag": "myValue", "hostname": "myhostname", "server": '
'"SOME-SERVER"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_haproxy_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.haproxy.SOME-BACKEND.SOME-SERVER.'
'bin',
123, raw_value=123, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "haproxy.SOME-BACKEND.SOME-'
'SERVER.bin", "value": 123, "tags": {"myFirstTag": "myValue", '
'"hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_diskspace_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.diskspace.MOUNT_POINT.byte_percent'
'free',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "diskspace.'
'byte_percentfree", "value": 80, "tags": {"mountpoint": '
'"MOUNT_POINT", "myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_diskspace_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.diskspace.MOUNT_POINT.byte_'
'percentfree',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "diskspace.MOUNT_POINT'
'.byte_percentfree", "value": 80, "tags": {"myFirstTag": '
'"myValue", "hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_iostat_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.iostat.DEV.io_in_progress',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "iostat.io_in_progress", '
'"value": 80, "tags": {"device": "DEV", "myFirstTag": '
'"myValue", "hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_iostat_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.iostat.DEV.io_in_progress',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "iostat.DEV.io_in_progress"'
', "value": 80, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_network_metrics_default(self, mock_urlopen, mock_request):
"""
taghandling default
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
metric = Metric('servers.myhostname.network.IF.rx_packets',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "network.rx_packets", '
'"value": 80, "tags": {"interface": "IF", "myFirstTag": '
'"myValue", "hostname": "myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
def test_network_metrics(self, mock_urlopen, mock_request):
"""
taghandling deactivate
"""
config = configobj.ConfigObj()
config['host'] = '127.0.0.1'
config['port'] = '4242'
config['tags'] = ['myFirstTag=myValue']
config['cleanMetrics'] = False
metric = Metric('servers.myhostname.network.IF.rx_packets',
80, raw_value=80, timestamp=1234567,
host='myhostname', metric_type='GAUGE')
handler = TSDBHandler(config)
handler.process(metric)
body = ('[{"timestamp": 1234567, "metric": "network.IF.rx_packets", '
'"value": 80, "tags": {"myFirstTag": "myValue", "hostname": '
'"myhostname"}}]')
header = {'Content-Type': 'application/json'}
mock_urlopen.assert_called_with(self.url, body, header)
| 41.955399
| 80
| 0.564483
| 1,748
| 17,873
| 5.641304
| 0.089245
| 0.04462
| 0.010648
| 0.012778
| 0.874962
| 0.843626
| 0.836731
| 0.836731
| 0.832066
| 0.832066
| 0
| 0.05456
| 0.282157
| 17,873
| 425
| 81
| 42.054118
| 0.71403
| 0.015218
| 0
| 0.715569
| 0
| 0.002994
| 0.302684
| 0.061105
| 0
| 0
| 0
| 0
| 0.062874
| 1
| 0.065868
| false
| 0.020958
| 0.026946
| 0
| 0.098802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5309e001f2f910f46d5bde9d7a7ce221764bef37
| 110
|
py
|
Python
|
qd3dt/models/backbones/__init__.py
|
mengmengliu1998/qd-3dt
|
9fcd1c0b165793e259deb46a64fcbbdc33735f2f
|
[
"BSD-3-Clause"
] | 384
|
2021-03-19T11:12:31.000Z
|
2022-03-31T01:59:16.000Z
|
qd3dt/models/backbones/__init__.py
|
mengmengliu1998/qd-3dt
|
9fcd1c0b165793e259deb46a64fcbbdc33735f2f
|
[
"BSD-3-Clause"
] | 22
|
2021-04-06T14:28:12.000Z
|
2022-03-22T18:02:00.000Z
|
qd3dt/models/backbones/__init__.py
|
mengmengliu1998/qd-3dt
|
9fcd1c0b165793e259deb46a64fcbbdc33735f2f
|
[
"BSD-3-Clause"
] | 73
|
2021-03-20T06:23:26.000Z
|
2022-03-16T08:00:21.000Z
|
from .resnet import ResNet, make_res_layer
from .dla import DLA
__all__ = ['ResNet', 'make_res_layer', 'DLA']
| 27.5
| 45
| 0.745455
| 17
| 110
| 4.352941
| 0.470588
| 0.27027
| 0.351351
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 110
| 4
| 45
| 27.5
| 0.770833
| 0
| 0
| 0
| 0
| 0
| 0.207207
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
530f2941554f8fe127b1bdc2fd83c7480f3364d2
| 18,571
|
py
|
Python
|
nexus_api_python_client/api/email_api.py
|
simonebruzzechesse/nexus-api-python-client
|
eaa1098dbd8778f6f3bda948268953b742f2ab64
|
[
"MIT"
] | 1
|
2021-11-14T12:43:38.000Z
|
2021-11-14T12:43:38.000Z
|
nexus_api_python_client/api/email_api.py
|
simonebruzzechesse/nexus-api-python-client
|
eaa1098dbd8778f6f3bda948268953b742f2ab64
|
[
"MIT"
] | null | null | null |
nexus_api_python_client/api/email_api.py
|
simonebruzzechesse/nexus-api-python-client
|
eaa1098dbd8778f6f3bda948268953b742f2ab64
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Nexus Repository Manager REST API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 3.20.1-01
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nexus_api_python_client.api_client import ApiClient
from nexus_api_python_client.exceptions import (
ApiTypeError,
ApiValueError
)
class EmailApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_email_configuration(self, **kwargs): # noqa: E501
"""Disable and clear the email configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_email_configuration(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_email_configuration_with_http_info(**kwargs) # noqa: E501
def delete_email_configuration_with_http_info(self, **kwargs): # noqa: E501
"""Disable and clear the email configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_email_configuration_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_email_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/beta/email', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_email_configuration(self, **kwargs): # noqa: E501
"""Retrieve the current email configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_configuration(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ApiEmailConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_email_configuration_with_http_info(**kwargs) # noqa: E501
def get_email_configuration_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve the current email configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_email_configuration_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ApiEmailConfiguration, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_email_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/beta/email', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiEmailConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def set_email_configuration(self, body, **kwargs): # noqa: E501
"""Set the current email configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_email_configuration(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ApiEmailConfiguration body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.set_email_configuration_with_http_info(body, **kwargs) # noqa: E501
def set_email_configuration_with_http_info(self, body, **kwargs): # noqa: E501
"""Set the current email configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_email_configuration_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ApiEmailConfiguration body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method set_email_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `set_email_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/beta/email', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def test_email_configuration(self, body, **kwargs): # noqa: E501
"""Send a test email to the email address provided in the request body # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_email_configuration(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str body: An email address to send a test email to (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.test_email_configuration_with_http_info(body, **kwargs) # noqa: E501
def test_email_configuration_with_http_info(self, body, **kwargs): # noqa: E501
"""Send a test email to the email address provided in the request body # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_email_configuration_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str body: An email address to send a test email to (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method test_email_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `test_email_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/beta/email/verify', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.790323
| 126
| 0.599752
| 2,056
| 18,571
| 5.16537
| 0.090467
| 0.036158
| 0.052731
| 0.033898
| 0.922881
| 0.918362
| 0.912618
| 0.908569
| 0.908569
| 0.894068
| 0
| 0.012246
| 0.331646
| 18,571
| 433
| 127
| 42.889146
| 0.843377
| 0.474988
| 0
| 0.777778
| 1
| 0
| 0.145833
| 0.050847
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.026455
| 0
| 0.121693
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
534b537bd5b376fed8afb038df18d92ddb67d149
| 1,678
|
py
|
Python
|
core/migrations/0013_auto_20210313_0323.py
|
lcbiplove/nepfdb
|
56e48bb0dcae34d409b7d75d210d2938e763a953
|
[
"MIT"
] | null | null | null |
core/migrations/0013_auto_20210313_0323.py
|
lcbiplove/nepfdb
|
56e48bb0dcae34d409b7d75d210d2938e763a953
|
[
"MIT"
] | null | null | null |
core/migrations/0013_auto_20210313_0323.py
|
lcbiplove/nepfdb
|
56e48bb0dcae34d409b7d75d210d2938e763a953
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-13 03:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0012_auto_20210312_1354'),
]
operations = [
migrations.AlterField(
model_name='award',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='movie',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='person',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='photo',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='production',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='review',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='user',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 34.244898
| 108
| 0.593564
| 176
| 1,678
| 5.482955
| 0.255682
| 0.087047
| 0.181347
| 0.210363
| 0.77513
| 0.77513
| 0.77513
| 0.77513
| 0.77513
| 0.77513
| 0
| 0.025748
| 0.282479
| 1,678
| 48
| 109
| 34.958333
| 0.775748
| 0.026818
| 0
| 0.666667
| 1
| 0
| 0.05886
| 0.014102
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7271b13791319bb90bb40d701668b95b6f937e34
| 6,417
|
py
|
Python
|
deepmath/deephol/utilities/proof_analysis_test.py
|
LaudateCorpus1/deepmath
|
b5b721f54de1d5d6a02d78f5da5995237f9995f9
|
[
"Apache-2.0"
] | 830
|
2016-11-07T21:46:27.000Z
|
2022-03-23T08:01:03.000Z
|
deepmath/deephol/utilities/proof_analysis_test.py
|
LaudateCorpus1/deepmath
|
b5b721f54de1d5d6a02d78f5da5995237f9995f9
|
[
"Apache-2.0"
] | 26
|
2016-11-07T22:06:31.000Z
|
2022-02-16T00:18:29.000Z
|
deepmath/deephol/utilities/proof_analysis_test.py
|
LaudateCorpus1/deepmath
|
b5b721f54de1d5d6a02d78f5da5995237f9995f9
|
[
"Apache-2.0"
] | 168
|
2016-11-07T21:48:55.000Z
|
2022-03-19T02:47:14.000Z
|
"""Tests for deepmath.deephol.proof_analysis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from deepmath.deephol.utilities import proof_analysis
from deepmath.deephol.utilities import proof_test_util
class ProofAnalysisTest(tf.test.TestCase):
def test_empty_log_reasons(self):
proof_log = proof_test_util.new_log(num_proofs=0)
self.assertEqual(proof_analysis.find_reasons(proof_log), ([], []))
def test_root_no_proof_reasons(self):
proof_log = proof_test_util.new_log(num_proofs=0)
proof_test_util.add_node(proof_log, [], False, True)
self.assertEqual(proof_analysis.find_reasons(proof_log), ([], []))
def test_root_not_marked(self):
proof_log = proof_test_util.new_log(num_proofs=0)
proof_test_util.add_node(proof_log, [], False, False)
self.assertEqual(proof_analysis.find_reasons(proof_log), ([], []))
def test_root_no_reasons(self):
proof_log = proof_test_util.new_log(num_proofs=0)
proof_test_util.add_node(proof_log, [], True, True)
self.assertIsNone(proof_analysis.find_reasons(proof_log))
def test_multi_root_has_simple_proof(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[2]], True, True)
proof_test_util.add_node(proof_log, [[2]], True, True)
proof_test_util.add_node(proof_log, [[]], True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [0, 1, 2])
self.assertEqual(reasons, [(0, 0, [2]), (1, 0, [2]), (2, 0, [])])
def test_multi_root_reorder_has_simple_proof(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[0]], True, True)
proof_test_util.add_node(proof_log, [[0]], True, True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [1, 2, 0])
self.assertEqual(reasons, [(1, 0, [0]), (2, 0, [0]), (0, 0, [])])
def test_multi_root_reorder2_has_simple_proof(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[0]], True, True)
proof_test_util.add_node(proof_log, [[1]], True, True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [1, 2, 0])
self.assertEqual(reasons, [(1, 0, [0]), (2, 0, [1]), (0, 0, [])])
def test_root_invalid_simple_proof(self):
proof_log = proof_test_util.new_log(num_proofs=0)
proof_test_util.add_node(proof_log, [[1]], True, True)
proof_test_util.add_node(proof_log, [], False)
self.assertIsNone(proof_analysis.find_reasons(proof_log))
def test_root_is_leaf(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[]], True, True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [0])
self.assertEqual(reasons, [(0, 0, [])])
def test_root_has_simple_proof(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[1]], True, True)
proof_test_util.add_node(proof_log, [[]], True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [0, 1])
self.assertEqual(reasons, [(0, 0, [1]), (1, 0, [])])
def test_root_has_simple_proof_order2(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[0]], True, True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [1, 0])
self.assertEqual(reasons, [(1, 0, [0]), (0, 0, [])])
def test_root_has_chain_ignores_unclosed(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[1]], True, True)
proof_test_util.add_node(proof_log, [[2]], True)
proof_test_util.add_node(proof_log, [[5], [4]], True)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [], True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [0, 1, 2, 4])
self.assertEqual(reasons, [(0, 0, [1]), (1, 0, [2]), (2, 1, [4]),
(4, 0, [])])
def test_root_has_chain_ignores_loop(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [[1]], True, True)
proof_test_util.add_node(proof_log, [[2]], True)
proof_test_util.add_node(proof_log, [[1], [4]], True)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [], True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [0, 1, 2, 4])
self.assertEqual(reasons, [(0, 0, [1]), (1, 0, [2]), (2, 1, [4]),
(4, 0, [])])
def test_root_has_chain_ignores_loop_order2(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [], True)
proof_test_util.add_node(proof_log, [[2]], True, True)
proof_test_util.add_node(proof_log, [[3]], True)
proof_test_util.add_node(proof_log, [[2], [5]], True)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[]], True)
reasons, nodes = proof_analysis.find_reasons(proof_log)
self.assertEqual(nodes, [1, 2, 3, 5])
self.assertEqual(reasons, [(1, 0, [2]), (2, 0, [3]), (3, 1, [5]),
(5, 0, [])])
def test_extract_proof(self):
proof_log = proof_test_util.new_log(num_proofs=1)
proof_test_util.add_node(proof_log, [], True)
proof_test_util.add_node(proof_log, [[2]], True, True)
proof_test_util.add_node(proof_log, [[3]], True)
proof_test_util.add_node(proof_log, [[2], [5]], True)
proof_test_util.add_node(proof_log, [[]], True)
proof_test_util.add_node(proof_log, [[]], True)
output_log = proof_analysis.extract_proof(proof_log)
self.assertEqual(len(output_log.nodes), 4)
for i, j in enumerate([1, 2, 3, 5]):
self.assertEqual(output_log.nodes[i].goal.conclusion,
proof_log.nodes[j].goal.conclusion)
if __name__ == '__main__':
tf.test.main()
| 44.255172
| 70
| 0.693315
| 994
| 6,417
| 4.087525
| 0.073441
| 0.145705
| 0.188777
| 0.169333
| 0.886537
| 0.861679
| 0.830667
| 0.8095
| 0.801624
| 0.798425
| 0
| 0.02635
| 0.154278
| 6,417
| 144
| 71
| 44.5625
| 0.722314
| 0.006545
| 0
| 0.598361
| 0
| 0
| 0.001256
| 0
| 0
| 0
| 0
| 0
| 0.204918
| 1
| 0.122951
| false
| 0
| 0.04918
| 0
| 0.180328
| 0.008197
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
72b27a086871314b0a500c6c08b536d79648c912
| 4,336
|
py
|
Python
|
Accounts/migrations/0002_auto_20210712_2009.py
|
Larry-Manuel/SMA-TEAM
|
14c50bfce4327bb70812341b094c0001b90843f6
|
[
"MIT"
] | null | null | null |
Accounts/migrations/0002_auto_20210712_2009.py
|
Larry-Manuel/SMA-TEAM
|
14c50bfce4327bb70812341b094c0001b90843f6
|
[
"MIT"
] | null | null | null |
Accounts/migrations/0002_auto_20210712_2009.py
|
Larry-Manuel/SMA-TEAM
|
14c50bfce4327bb70812341b094c0001b90843f6
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.5 on 2021-07-12 20:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Accounts', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='passwordreset',
name='expires_at',
field=models.DateTimeField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='passwordreset',
name='token',
field=models.CharField(blank=True, default=None, max_length=50, null=True, unique=True),
),
migrations.AlterField(
model_name='passwordreset',
name='token_used',
field=models.BooleanField(blank=True, default=False, null=True),
),
migrations.AlterField(
model_name='passwordreset',
name='user_id',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='user',
name='account_status',
field=models.CharField(blank=True, choices=[('Locked', 'Locked'), ('Unlocked', 'Unlocked')], default='Unlocked', max_length=10, null=True),
),
migrations.AlterField(
model_name='user',
name='bio',
field=models.TextField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(blank=True, default=None, max_length=255, null=True, unique=True),
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='followers_count',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='user',
name='follows_count',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='occupation',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='user',
name='password',
field=models.CharField(max_length=128, verbose_name='password'),
),
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(blank=True, default=None, max_length=255, null=True, unique=True),
),
migrations.AlterField(
model_name='user',
name='verified',
field=models.BooleanField(blank=True, default=False, null=True),
),
migrations.AlterField(
model_name='usersocial',
name='facebook_link',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='usersocial',
name='instagram_link',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='usersocial',
name='linkedin_link',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='usersocial',
name='twitter_link',
field=models.CharField(blank=True, default=None, max_length=255, null=True),
),
migrations.AlterField(
model_name='usersocial',
name='user_id',
field=models.OneToOneField(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 37.37931
| 151
| 0.585332
| 438
| 4,336
| 5.673516
| 0.19863
| 0.160966
| 0.201207
| 0.2334
| 0.788732
| 0.76338
| 0.715091
| 0.715091
| 0.644668
| 0.62495
| 0
| 0.017886
| 0.290821
| 4,336
| 115
| 152
| 37.704348
| 0.790244
| 0.010378
| 0
| 0.669725
| 1
| 0
| 0.094194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.055046
| 0.027523
| 0
| 0.055046
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
72d7d20650df98172186f8f2f011c97d8e35bcca
| 18,328
|
py
|
Python
|
tests/test_get_agents_decision_trees_bulk.py
|
craft-ai/craft-ai-client-python
|
3d8b3d9a49c0c70964deaeb9645130dd54f9a0b3
|
[
"BSD-3-Clause"
] | 14
|
2016-08-26T07:06:57.000Z
|
2020-09-22T07:41:21.000Z
|
tests/test_get_agents_decision_trees_bulk.py
|
craft-ai/craft-ai-client-python
|
3d8b3d9a49c0c70964deaeb9645130dd54f9a0b3
|
[
"BSD-3-Clause"
] | 94
|
2016-08-02T14:07:59.000Z
|
2021-10-06T11:50:52.000Z
|
tests/test_get_agents_decision_trees_bulk.py
|
craft-ai/craft-ai-client-python
|
3d8b3d9a49c0c70964deaeb9645130dd54f9a0b3
|
[
"BSD-3-Clause"
] | 8
|
2017-02-07T12:05:57.000Z
|
2021-10-14T09:45:30.000Z
|
import unittest
import semver
from craft_ai import Client, errors as craft_err
from craft_ai.constants import DEFAULT_DECISION_TREE_VERSION
from . import settings
from .utils import generate_entity_id
from .data import valid_data, invalid_data
NB_DECISION_TREES_TO_GET = 3
AGENT_ID_1_BASE = "get_dt_bulk_1"
AGENT_ID_2_BASE = "get_dt_bulk_2"
class TestGetDecisionTreesBulkSuccess(unittest.TestCase):
"""Checks that the client succeeds when getting
an/multiple decision tree(s) with OK input"""
@classmethod
def setUpClass(cls):
cls.client = Client(settings.CRAFT_CFG)
cls.agent_id1 = generate_entity_id(AGENT_ID_1_BASE + "Success")
cls.agent_id2 = generate_entity_id(AGENT_ID_2_BASE + "Success")
@classmethod
def tearDown(cls):
cls.client.delete_agent(cls.agent_id1)
cls.client.delete_agent(cls.agent_id2)
def setUp(self):
self.client.delete_agent(self.agent_id1)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id1)
self.client.add_agent_operations(
self.agent_id1, valid_data.VALID_OPERATIONS_SET
)
self.client.delete_agent(self.agent_id2)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id2)
self.client.add_agent_operations(
self.agent_id2, valid_data.VALID_OPERATIONS_SET
)
def clean_up_agent(self, aid):
# Makes sure that no agent with the standard ID remains
self.client.delete_agent(aid)
def clean_up_agents(self, aids):
# Makes sure that no agent with the standard ID remains
for aid in aids:
self.clean_up_agent(aid)
def test_get_one_decision_tree_with_correct_input(self):
"""get_agents_decision_trees_bulk should succeed when given a correct input.
It should give a proper JSON response with a list containing a dict
with `id` field being string and 'tree' field being a dict. As we don't
specify the version the field 'tree''_version' should be the one by default.
"""
payload = [{"id": self.agent_id1, "timestamp": valid_data.VALID_LAST_TIMESTAMP}]
decision_trees = self.client.get_agents_decision_trees_bulk(payload)
self.assertIsInstance(decision_trees, list)
self.assertIsInstance(decision_trees[0], dict)
self.assertIsInstance(decision_trees[0].get("tree"), dict)
self.assertNotEqual(decision_trees[0].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[0].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], int(DEFAULT_DECISION_TREE_VERSION))
self.assertNotEqual(decision_trees[0].get("tree").get("configuration"), None)
self.assertNotEqual(decision_trees[0].get("tree").get("trees"), None)
self.addCleanup(self.clean_up_agents, [self.agent_id1, self.agent_id2])
def test_get_all_decision_trees_with_correct_input(self):
"""get_agents_decision_trees_bulk should succeed when given an correct input.
It should give a proper JSON response with a list containing dicts
with `id` field being string and 'tree' field being a dict. As we don't
specify the version the field 'tree''_version' should be the one by default.
"""
payload = [
{"id": self.agent_id1, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
{"id": self.agent_id2, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
]
decision_trees = self.client.get_agents_decision_trees_bulk(payload)
self.assertIsInstance(decision_trees, list)
self.assertIsInstance(decision_trees[0], dict)
self.assertEqual(decision_trees[0].get("id"), self.agent_id1)
self.assertIsInstance(decision_trees[0].get("tree"), dict)
self.assertNotEqual(decision_trees[0].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[0].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], int(DEFAULT_DECISION_TREE_VERSION))
self.assertNotEqual(decision_trees[0].get("tree").get("configuration"), None)
self.assertNotEqual(decision_trees[0].get("tree").get("trees"), None)
self.assertIsInstance(decision_trees[1], dict)
self.assertEqual(decision_trees[1].get("id"), self.agent_id2)
self.assertIsInstance(decision_trees[1].get("tree"), dict)
self.assertNotEqual(decision_trees[1].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[1].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], int(DEFAULT_DECISION_TREE_VERSION))
self.assertNotEqual(decision_trees[1].get("tree").get("configuration"), None)
self.assertNotEqual(decision_trees[1].get("tree").get("trees"), None)
self.addCleanup(self.clean_up_agents, [self.agent_id1, self.agent_id2])
def test_get_decision_trees_bulk_specific_version(self):
"""get_agents_decision_trees_bulk should succeed when given a specific version.
The version asked is the version 1.
It should give a proper JSON response with a list containing a dict
with `id` field being string and 'tree' field being a dict with the
field '_version''major' being the version given as a parameter.
"""
payload = [
{"id": self.agent_id1, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
{"id": self.agent_id2, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
]
version = 1
decision_trees = self.client.get_agents_decision_trees_bulk(payload, version)
self.assertNotEqual(decision_trees[0].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[0].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], version)
self.assertNotEqual(decision_trees[1].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[1].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], version)
self.addCleanup(self.clean_up_agents, [self.agent_id1, self.agent_id2])
def test_get_decision_trees_bulk_specific_version2(self):
"""get_agents_decision_trees_bulk should succeed when given a specific version.
The version asked is the version 2.
It should give a proper JSON response with a list containing a dict
with `id` field being string and 'tree' field being a dict with the
field '_version''major' being the version given as a parameter.
"""
payload = [
{"id": self.agent_id1, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
{"id": self.agent_id2, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
]
version = 2
decision_trees = self.client.get_agents_decision_trees_bulk(payload, version)
self.assertNotEqual(decision_trees[0].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[0].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], version)
self.assertNotEqual(decision_trees[1].get("tree").get("_version"), None)
tree_version = semver.VersionInfo.parse(
decision_trees[1].get("tree").get("_version")
).to_dict()
self.assertEqual(tree_version["major"], version)
self.addCleanup(self.clean_up_agents, [self.agent_id1, self.agent_id2])
def test_get_decision_trees_bulk_without_timestamp(self):
"""get_agents_decision_trees_bulk should succeed when given no timestamp.
It should give a proper JSON response with a list containing a dict
with `id` field being string and 'tree' field being a dict and the
timestamp should be the same as the one of the last operation.
"""
payload = [{"id": self.agent_id1}, {"id": self.agent_id2}]
decision_trees = self.client.get_agents_decision_trees_bulk(payload)
true_payload = [
{"id": self.agent_id1, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
{"id": self.agent_id2, "timestamp": valid_data.VALID_LAST_TIMESTAMP},
]
ground_truth_decision_tree = self.client.get_agents_decision_trees_bulk(
true_payload
)
self.assertEqual(
decision_trees[0].get("tree"), ground_truth_decision_tree[0].get("tree")
)
self.addCleanup(self.clean_up_agents, [self.agent_id1, self.agent_id2])
class TestGetGroupDecisionTreesBulkSuccess(unittest.TestCase):
"""Checks that the client succeeds when getting
an/multiple decision tree(s) with OK input"""
@classmethod
def setUpClass(cls):
cls.client = Client(settings.CRAFT_CFG)
cls.client = Client(settings.CRAFT_CFG)
cls.agents = []
@classmethod
def tearDownClass(cls):
for agent_id in cls.agents:
try:
cls.client.delete_agent(agent_id)
except craft_err.CraftAiError:
continue
def setUp(self):
for i in range(NB_DECISION_TREES_TO_GET):
self.agents.append(generate_entity_id(AGENT_ID_1_BASE + "GroupSucc"))
# Makes sure that no agent with the same ID already exists
for agent_id in self.agents:
self.client.delete_agent(agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, agent_id)
self.client.add_agent_operations(agent_id, valid_data.VALID_OPERATIONS_SET)
def clean_up_agent(self, aid):
# Makes sure that no agent with the standard ID remains
self.client.delete_agent(aid)
def clean_up_agents(self, aids):
# Makes sure that no agent with the standard ID remains
for aid in aids:
self.clean_up_agent(aid)
def test_get_group_decision_trees(self):
"""get_agents_decision_trees_bulk should succeed when given a lot of decision
trees to retrieve.
It should give a proper JSON response with a list containing dicts
with `id` field being string and 'tree' field being a dict.
"""
payload = []
for agent_id in self.agents:
payload.append(
{"id": agent_id, "timestamp": valid_data.VALID_LAST_TIMESTAMP}
)
decision_trees = self.client.get_agents_decision_trees_bulk(payload)
for decision_tree in decision_trees:
self.assertIsInstance(decision_tree, dict)
self.assertIsInstance(decision_tree.get("tree"), dict)
self.assertFalse("error" in decision_tree)
self.addCleanup(self.clean_up_agents, self.agents)
class TestGetDecisionTreesBulkFailure(unittest.TestCase):
"""Checks that the client fails when when getting
an/multiple decision tree(s) with bad input"""
@classmethod
def setUpClass(cls):
cls.client = Client(settings.CRAFT_CFG)
def setUp(self):
self.agent_name = generate_entity_id(AGENT_ID_1_BASE + "Failure")
def clean_up_agent(self, aid):
# Makes sure that no agent with the standard ID remains
self.client.delete_agent(aid)
def clean_up_agents(self, aids):
# Makes sure that no agent with the standard ID remains
for aid in aids:
self.clean_up_agent(aid)
def test_get_all_decision_trees_with_invalid_id(self):
"""get_agents_decision_trees_bulk should fail when given non-string/empty string ID
or unknown ID.
It should raise an error upon request for retrieval of multiple agents's
decision tree with an ID that is not of type string, since agent IDs
should always be strings.
"""
# Add an unknown id and a dictionary without an id field
payload = [
{
"id": invalid_data.UNKNOWN_ID,
"timestamp": valid_data.VALID_LAST_TIMESTAMP,
},
{"timestamp": valid_data.VALID_TIMESTAMP},
]
# Add all the invalid id to check
for empty_id in invalid_data.UNDEFINED_KEY:
payload.append(
{
"id": invalid_data.UNDEFINED_KEY[empty_id],
"timestamp": valid_data.VALID_LAST_TIMESTAMP,
}
)
self.assertRaises(
craft_err.CraftAiBadRequestError,
self.client.get_agents_decision_trees_bulk,
payload,
)
def test_get_all_decision_trees_invalid_timestamp(self):
"""get_agents_decision_trees_bulk should fail when given invalid timestamps
It should raise an error upon request for retrieval of multiple agents's
decision tree with an invalid timestamp, since timestamp should always be
a positive integer.
"""
payload = []
agents_lst = []
# Add all the invalid timestamp to check
for i, timestamp in enumerate(invalid_data.INVALID_TIMESTAMPS):
new_agent_id = generate_entity_id(
"test_get_all_decision_trees_invalid_timestamp"
)
self.client.delete_agent(new_agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, new_agent_id)
self.client.add_agent_operations(
new_agent_id, valid_data.VALID_OPERATIONS_SET
)
payload.append(
{
"id": new_agent_id,
"timestamp": invalid_data.INVALID_TIMESTAMPS[timestamp],
}
)
agents_lst.append(new_agent_id)
self.assertRaises(
craft_err.CraftAiBadRequestError,
self.client.get_agents_decision_trees_bulk,
payload,
)
self.addCleanup(self.clean_up_agents, agents_lst)
class TestGetDecisionTreesBulkSomeFailure(unittest.TestCase):
"""Checks that the client succeed when getting an/multiple agent(s)
with bad input and an/multiple agent(s) with valid input"""
@classmethod
def setUpClass(cls):
cls.client = Client(settings.CRAFT_CFG)
cls.agent_id = generate_entity_id(AGENT_ID_1_BASE + "SomeFailure")
@classmethod
def tearDownClass(cls):
try:
cls.client.delete_agent(cls.agent_id)
except craft_err.CraftAiError:
return
def setUp(self):
# Makes sure that no agent with the same ID already exists
self.client.delete_agent(self.agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, self.agent_id)
self.client.add_agent_operations(self.agent_id, valid_data.VALID_OPERATIONS_SET)
def clean_up_agent(self, aid):
# Makes sure that no agent with the standard ID remains
self.client.delete_agent(aid)
def clean_up_agents(self, aids):
# Makes sure that no agent with the standard ID remains
for aid in aids:
self.clean_up_agent(aid)
def test_get_some_decision_trees_with_invalid_id(self):
"""get_agents_decision_trees_bulk should succeed when given some non-string/empty string IDs
and some valid IDs.
It should give a proper JSON response with a list containing dicts.
The ones having invalid ids have the `error` field being a CraftAiBadRequestError.
The ones having valid ids have the `id` field being string and 'tree' field being a dict.
"""
# Add valid id and timestamp
payload = [{"id": self.agent_id, "timestamp": valid_data.VALID_LAST_TIMESTAMP}]
# Add an unknown id and a dictionary without an id field
payload.append(
[
{
"id": invalid_data.UNKNOWN_ID,
"timestamp": valid_data.VALID_LAST_TIMESTAMP,
},
{"timestamp": valid_data.VALID_TIMESTAMP},
]
)
# Add all the invalid id to check
for empty_id in invalid_data.UNDEFINED_KEY:
payload.append(
{
"id": invalid_data.UNDEFINED_KEY[empty_id],
"timestamp": valid_data.VALID_LAST_TIMESTAMP,
}
)
self.assertRaises(
craft_err.CraftAiBadRequestError,
self.client.get_agents_decision_trees_bulk,
payload,
)
self.addCleanup(self.clean_up_agents, [self.agent_id])
def test_get_all_decision_trees_invalid_timestamp(self):
"""get_agents_decision_trees_bulk should succeed when given some invalid timestamps
and some valid ones.
It should give a proper JSON response with a list containing dicts.
The ones having invalid timestamp have the `error` field being a CraftAiBadRequestError.
The ones having valid timestamp have the `id` field being string and 'tree' field being
a dict.
"""
# Add valid id and timestamp
payload = [{"id": self.agent_id, "timestamp": valid_data.VALID_LAST_TIMESTAMP}]
agents_lst = [self.agent_id]
# Add all the invalid timestamp to check
for i, timestamp in enumerate(invalid_data.INVALID_TIMESTAMPS):
new_agent_id = generate_entity_id(
"test_get_all_decision_trees_invalid_timestamp"
)
self.client.delete_agent(new_agent_id)
self.client.create_agent(valid_data.VALID_CONFIGURATION, new_agent_id)
self.client.add_agent_operations(
new_agent_id, valid_data.VALID_OPERATIONS_SET
)
payload.append(
{
"id": new_agent_id,
"timestamp": invalid_data.INVALID_TIMESTAMPS[timestamp],
}
)
agents_lst.append(new_agent_id)
self.assertRaises(
craft_err.CraftAiBadRequestError,
self.client.get_agents_decision_trees_bulk,
payload,
)
self.addCleanup(self.clean_up_agents, agents_lst)
| 40.370044
| 100
| 0.659046
| 2,308
| 18,328
| 4.979203
| 0.080589
| 0.082579
| 0.036547
| 0.040202
| 0.871911
| 0.844588
| 0.809433
| 0.778803
| 0.770275
| 0.765141
| 0
| 0.005707
| 0.254256
| 18,328
| 453
| 101
| 40.459161
| 0.835089
| 0.226921
| 0
| 0.62069
| 1
| 0
| 0.050486
| 0.006576
| 0
| 0
| 0
| 0
| 0.131034
| 1
| 0.1
| false
| 0
| 0.024138
| 0
| 0.141379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72e279eca010fb91248ac9c4c52e786ed68f89c5
| 4,036
|
py
|
Python
|
role_call_lib/delbertina.py
|
MagnaRisa/BeginnerIsland
|
8cac024965c6b23690a5a0cd344d44f3876762a8
|
[
"MIT"
] | null | null | null |
role_call_lib/delbertina.py
|
MagnaRisa/BeginnerIsland
|
8cac024965c6b23690a5a0cd344d44f3876762a8
|
[
"MIT"
] | null | null | null |
role_call_lib/delbertina.py
|
MagnaRisa/BeginnerIsland
|
8cac024965c6b23690a5a0cd344d44f3876762a8
|
[
"MIT"
] | 2
|
2021-01-24T03:55:31.000Z
|
2021-01-24T12:18:43.000Z
|
def write_name_delbertina(input_turtle):
# d
input_turtle.right(90)
input_turtle.forward(50)
input_turtle.left(90)
input_turtle.pendown()
input_turtle.forward(30)
input_turtle.left(90)
input_turtle.forward(50)
input_turtle.left(180)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.penup()
input_turtle.forward(10)
# e
input_turtle.left(90)
input_turtle.pendown()
input_turtle.forward(20)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(10)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.left(90)
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.penup()
input_turtle.forward(10)
# l
input_turtle.pendown()
input_turtle.left(90)
input_turtle.forward(40)
input_turtle.left(180)
input_turtle.forward(40)
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.penup()
input_turtle.forward(10)
# b
input_turtle.pendown()
input_turtle.left(90)
input_turtle.forward(50)
input_turtle.left(180)
input_turtle.forward(30)
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(20)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.right(180)
input_turtle.forward(30)
input_turtle.penup()
input_turtle.forward(10)
# e
input_turtle.left(90)
input_turtle.pendown()
input_turtle.forward(20)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(10)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.left(90)
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.penup()
input_turtle.forward(10)
# r
input_turtle.pendown()
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.left(180)
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.penup()
input_turtle.right(90)
input_turtle.forward(20)
input_turtle.left(90)
input_turtle.forward(10)
# t
input_turtle.pendown()
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.left(180)
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(10)
input_turtle.left(180)
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.penup()
input_turtle.forward(10)
# i
input_turtle.pendown()
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.penup()
input_turtle.forward(10)
input_turtle.pendown()
input_turtle.forward(10)
input_turtle.left(180)
input_turtle.penup()
input_turtle.forward(20)
input_turtle.pendown()
input_turtle.forward(20)
input_turtle.left(90)
input_turtle.forward(10)
input_turtle.penup()
input_turtle.forward(10)
# n
input_turtle.pendown()
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(20)
input_turtle.left(90)
input_turtle.penup()
input_turtle.forward(10)
# a
input_turtle.pendown()
input_turtle.left(90)
input_turtle.forward(20)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.left(90)
input_turtle.forward(10)
input_turtle.left(90)
input_turtle.forward(30)
input_turtle.left(180)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.right(90)
input_turtle.forward(30)
input_turtle.left(180)
input_turtle.forward(30)
| 26.207792
| 40
| 0.69995
| 561
| 4,036
| 4.777184
| 0.048128
| 0.58694
| 0.436567
| 0.298507
| 0.984701
| 0.984701
| 0.975373
| 0.934701
| 0.926493
| 0.919776
| 0
| 0.07598
| 0.191278
| 4,036
| 153
| 41
| 26.379085
| 0.745098
| 0.004708
| 0
| 0.986014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006993
| false
| 0
| 0
| 0
| 0.006993
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f4239d30de046f46dd2d9ceed067a7768d897f03
| 202,537
|
py
|
Python
|
nidaqmx/_task_modules/ai_channel_collection.py
|
zhindes/nidaqmx-python
|
b756fbd7f0c0f7deadb468d77ceacb03ed467885
|
[
"MIT"
] | null | null | null |
nidaqmx/_task_modules/ai_channel_collection.py
|
zhindes/nidaqmx-python
|
b756fbd7f0c0f7deadb468d77ceacb03ed467885
|
[
"MIT"
] | null | null | null |
nidaqmx/_task_modules/ai_channel_collection.py
|
zhindes/nidaqmx-python
|
b756fbd7f0c0f7deadb468d77ceacb03ed467885
|
[
"MIT"
] | null | null | null |
# Do not edit this file; it was automatically generated.
import ctypes
import numpy
from nidaqmx._lib import (
lib_importer, wrapped_ndpointer, ctypes_byte_str, c_bool32)
from nidaqmx.errors import check_for_error
from nidaqmx._task_modules.channels.ai_channel import AIChannel
from nidaqmx._task_modules.channel_collection import ChannelCollection
from nidaqmx.utils import unflatten_channel_string
from nidaqmx.constants import (
ACExcitWireMode, AccelChargeSensitivityUnits, AccelSensitivityUnits,
AccelUnits, AngleUnits, BridgeConfiguration, BridgeElectricalUnits,
BridgePhysicalUnits, BridgeUnits, CJCSource, ChargeUnits,
CurrentShuntResistorLocation, CurrentUnits,
EddyCurrentProxProbeSensitivityUnits, ExcitationSource,
ForceIEPESensorSensitivityUnits, ForceUnits, FrequencyUnits,
LVDTSensitivityUnits, LengthUnits, PressureUnits, RTDType,
RVDTSensitivityUnits, ResistanceConfiguration, ResistanceUnits,
SoundPressureUnits, StrainGageBridgeType, StrainGageRosetteType,
StrainUnits, TEDSUnits, TemperatureUnits, TerminalConfiguration,
ThermocoupleType, TorqueUnits, VelocityIEPESensorSensitivityUnits,
VelocityUnits, VoltageUnits)
class AIChannelCollection(ChannelCollection):
"""
Contains the collection of analog input channels for a DAQmx Task.
"""
def __init__(self, task_handle):
super(AIChannelCollection, self).__init__(task_handle)
def _create_chan(self, physical_channel, name_to_assign_to_channel=''):
"""
Creates and returns an AIChannel object.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels.
name_to_assign_to_channel (Optional[str]): Specifies a name to
assign to the virtual channel this method creates.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Specifies the newly created AIChannel object.
"""
if name_to_assign_to_channel:
num_channels = len(unflatten_channel_string(physical_channel))
if num_channels > 1:
name = '{0}0:{1}'.format(
name_to_assign_to_channel, num_channels-1)
else:
name = name_to_assign_to_channel
else:
name = physical_channel
return AIChannel(self._handle, name)
def add_ai_accel_4_wire_dc_voltage_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=AccelUnits.G, sensitivity=1000.0,
sensitivity_units=AccelSensitivityUnits.MILLIVOLTS_PER_G,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=0.0, use_excit_for_scaling=False,
custom_scale_name=""):
"""
Creates channel(s) to measure acceleration. Use this instance
for custom sensors that require excitation. You can use the
excitation to scale the measurement.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.AccelUnits]): Specifies
the units to use to return acceleration measurements
from the channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.AccelSensitivityUnits]):
Specifies the units of the **sensitivity** input.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
use_excit_for_scaling (Optional[bool]): Specifies if NI-
DAQmx divides the measurement by the excitation. You
should typically set **use_excit_for_scaling** to True
for ratiometric transducers. If you set
**use_excit_for_scaling** to True, set **max_val** and
**min_val** to reflect the scaling.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIAccel4WireDCVoltageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double, c_bool32,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value, sensitivity,
sensitivity_units.value, voltage_excit_source.value,
voltage_excit_val, use_excit_for_scaling, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_accel_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=AccelUnits.G, sensitivity=1000.0,
sensitivity_units=AccelSensitivityUnits.MILLIVOLTS_PER_G,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.004, custom_scale_name=""):
"""
Creates channel(s) that use an accelerometer to measure
acceleration.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.AccelUnits]): Specifies
the units to use to return acceleration measurements
from the channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.AccelSensitivityUnits]):
Specifies the units of the **sensitivity** input.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIAccelChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value, sensitivity,
sensitivity_units.value, current_excit_source.value,
current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_accel_charge_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=AccelUnits.G, sensitivity=100.0,
sensitivity_units=AccelChargeSensitivityUnits.PICO_COULOMBS_PER_G,
custom_scale_name=""):
"""
Creates channel(s) that use a charge-based sensor to measure
acceleration.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.AccelUnits]): Specifies
the units to use to return acceleration measurements
from the channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.AccelChargeSensitivityUnits]):
Specifies the units of the **sensitivity** input.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIAccelChargeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value, sensitivity,
sensitivity_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_bridge_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-0.002, max_val=0.002, units=BridgeUnits.VOLTS_PER_VOLT,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
custom_scale_name=""):
"""
Creates channel(s) that measure voltage ratios from a Wheatstone
bridge. Use this instance with bridge-based sensors that measure
phenomena other than strain, force, pressure, or torque, or that
scale data to physical units NI-DAQmx does not support.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.BridgeUnits]): Specifies
in which unit to return voltage ratios from the channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIBridgeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_charge_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT,
min_val=-0.000000001, max_val=0.000000001,
units=ChargeUnits.COULOMBS, custom_scale_name=""):
"""
Creates channel(s) that use a sensor with charge output.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ChargeUnits]): Specifies
the units to use to return charge measurements from the
channel.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIChargeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_current_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-0.01,
max_val=0.01, units=CurrentUnits.AMPS,
shunt_resistor_loc=CurrentShuntResistorLocation.LET_DRIVER_CHOOSE,
ext_shunt_resistor_val=249.0, custom_scale_name=""):
"""
Creates channel(s) to measure current.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.CurrentUnits]): Specifies
the units to use to return current measurements.
shunt_resistor_loc (Optional[nidaqmx.constants.CurrentShuntResistorLocation]):
Specifies the location of the shunt resistor. For
devices with built-in shunt resistors, specify the
location as **INTERNAL**. For devices that do not have
built-in shunt resistors, you must attach an external
one, set this input to **EXTERNAL** and use the
**ext_shunt_resistor_val** input to specify the value of
the resistor.
ext_shunt_resistor_val (Optional[float]): Specifies in ohms
the resistance of an external shunt resistor.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAICurrentChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
shunt_resistor_loc.value, ext_shunt_resistor_val,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_current_rms_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-0.01,
max_val=0.01, units=CurrentUnits.AMPS,
shunt_resistor_loc=CurrentShuntResistorLocation.LET_DRIVER_CHOOSE,
ext_shunt_resistor_val=249.0, custom_scale_name=""):
"""
Creates a channel to measure current RMS, the average (mean)
power of the acquired current.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.CurrentUnits]): Specifies
the units to use to return current measurements.
shunt_resistor_loc (Optional[nidaqmx.constants.CurrentShuntResistorLocation]):
Specifies the location of the shunt resistor. For
devices with built-in shunt resistors, specify the
location as **INTERNAL**. For devices that do not have
built-in shunt resistors, you must attach an external
one, set this input to **EXTERNAL** and use the
**ext_shunt_resistor_val** input to specify the value of
the resistor.
ext_shunt_resistor_val (Optional[float]): Specifies in ohms
the resistance of an external shunt resistor.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAICurrentRMSChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
shunt_resistor_loc.value, ext_shunt_resistor_val,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_force_bridge_polynomial_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=ForceUnits.POUNDS,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
forward_coeffs=None, reverse_coeffs=None,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
physical_units=BridgePhysicalUnits.POUNDS, custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure force
or load. Use this instance with sensors whose specifications
provide a polynomial to convert electrical values to physical
values. When you use this scaling type, NI-DAQmx requires
coefficients for a polynomial that converts electrical values to
physical values (forward), as well as coefficients for a
polynomial that converts physical values to electrical values
(reverse). If you only know one set of coefficients, use the
DAQmx Compute Reverse Polynomial Coefficients function to
generate the other set.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ForceUnits]): Specifies in
which unit to return force measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
forward_coeffs (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
reverse_coeffs (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if forward_coeffs is None:
forward_coeffs = []
if reverse_coeffs is None:
reverse_coeffs = []
forward_coeffs = numpy.float64(forward_coeffs)
reverse_coeffs = numpy.float64(reverse_coeffs)
cfunc = lib_importer.windll.DAQmxCreateAIForceBridgePolynomialChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, forward_coeffs, len(forward_coeffs),
reverse_coeffs, len(reverse_coeffs), electrical_units.value,
physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_force_bridge_table_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=ForceUnits.POUNDS,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
electrical_vals=None,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
physical_vals=None, physical_units=BridgePhysicalUnits.POUNDS,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure force
or load. Use this instance with sensors whose specifications
provide a table of electrical values and the corresponding
physical values. When you use this scaling type, NI-DAQmx
performs linear scaling between each pair of electrical and
physical values. The input limits specified with **min_val** and
**max_val** must fall within the smallest and largest physical
values. For any data outside those endpoints, NI-DAQmx coerces
that data to the endpoints.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ForceUnits]): Specifies in
which unit to return force measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
electrical_vals (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
physical_vals (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if electrical_vals is None:
electrical_vals = []
if physical_vals is None:
physical_vals = []
electrical_vals = numpy.float64(electrical_vals)
physical_vals = numpy.float64(physical_vals)
cfunc = lib_importer.windll.DAQmxCreateAIForceBridgeTableChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, electrical_vals, len(electrical_vals),
electrical_units.value, physical_vals, len(physical_vals),
physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_force_bridge_two_point_lin_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=ForceUnits.POUNDS,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
first_electrical_val=0.0, second_electrical_val=2.0,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
first_physical_val=0.0, second_physical_val=100.0,
physical_units=BridgePhysicalUnits.POUNDS, custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure force
or load. Use this instance with sensors whose specifications do
not provide a polynomial for scaling or a table of electrical
and physical values. When you use this scaling type, NI-DAQmx
uses two points of electrical and physical values to calculate
the slope and y-intercept of a linear equation and uses that
equation to scale electrical values to physical values.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ForceUnits]): Specifies in
which unit to return force measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
first_electrical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
second_electrical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
first_physical_val (Optional[float]): Specifies how to scale
electrical values from the sensor to physical units.
second_physical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIForceBridgeTwoPointLinChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, first_electrical_val,
second_electrical_val, electrical_units.value, first_physical_val,
second_physical_val, physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_force_iepe_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-2000.0,
max_val=2000.0, units=ForceUnits.NEWTONS, sensitivity=2.25,
sensitivity_units=ForceIEPESensorSensitivityUnits.MILLIVOLTS_PER_NEWTON,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.004, custom_scale_name=""):
"""
Creates channel(s) that use an IEPE force sensor to measure
force or load.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ForceUnits]): Specifies in
which unit to return force measurements from the
channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.ForceIEPESensorSensitivityUnits]):
Specifies the units of the **sensitivity** input.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIForceIEPEChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value, sensitivity,
sensitivity_units.value, current_excit_source.value,
current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_freq_voltage_chan(
self, physical_channel, name_to_assign_to_channel="", min_val=1,
max_val=100, units=FrequencyUnits.HZ, threshold_level=0.0,
hysteresis=0.0, custom_scale_name=""):
"""
Creates channel(s) that use a frequency-to-voltage converter to
measure frequency.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.FrequencyUnits]):
Specifies the units to use to return frequency
measurements.
threshold_level (Optional[float]): Specifies in volts the
level at which to recognize waveform repetitions. You
should select a voltage level that occurs only once
within the entire period of a waveform. You also can
select a voltage that occurs only once while the voltage
rises or falls.
hysteresis (Optional[float]): Specifies in volts a window
below **level**. The input voltage must pass below
**threshold_level** minus **hysteresis** before NI-DAQmx
recognizes a waveform repetition. Hysteresis can improve
measurement accuracy when the signal contains noise or
jitter.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIFreqVoltageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_double, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, threshold_level, hysteresis,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_microphone_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT,
units=SoundPressureUnits.PA, mic_sensitivity=10.0,
max_snd_press_level=100.0,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.004, custom_scale_name=""):
"""
Creates channel(s) that use a microphone to measure sound
pressure.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
units (Optional[nidaqmx.constants.SoundPressureUnits]):
Specifies the units to use to return sound pressure
measurements.
mic_sensitivity (Optional[float]): Is the sensitivity of the
microphone. Specify this value in mV/Pa.
max_snd_press_level (Optional[float]): Is the maximum
instantaneous sound pressure level you expect to
measure. This value is in decibels, referenced to 20
micropascals.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIMicrophoneChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, units.value, mic_sensitivity,
max_snd_press_level, current_excit_source.value,
current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_pos_eddy_curr_prox_probe_chan(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=0.00254, units=LengthUnits.METERS, sensitivity=200.0,
sensitivity_units=EddyCurrentProxProbeSensitivityUnits.MILLIVOLTS_PER_MIL,
custom_scale_name=""):
"""
Creates channel(s) that use an eddy current proximity probe to
measure position.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.LengthUnits]): Specifies
the units to use to return position measurements from
the channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.EddyCurrentProxProbeSensitivityUnits]):
Specifies the units of the **sensitivity** input.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIPosEddyCurrProxProbeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_double, ctypes.c_int,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, sensitivity,
sensitivity_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_pos_lvdt_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-0.1, max_val=0.1, units=LengthUnits.METERS,
sensitivity=50.0,
sensitivity_units=LVDTSensitivityUnits.MILLIVOLTS_PER_VOLT_PER_MILLIMETER,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=1.0, voltage_excit_freq=2500.0,
ac_excit_wire_mode=ACExcitWireMode.FOUR_WIRE,
custom_scale_name=""):
"""
Creates channel(s) that use an LVDT to measure linear position.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.LengthUnits]): Specifies
the units to use to return linear position measurements
from the channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.LVDTSensitivityUnits]):
Specifies the units of the **sensitivity** input.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
voltage_excit_freq (Optional[float]): Specifies in hertz the
excitation frequency that the sensor requires. Refer to
the sensor documentation to determine this value.
ac_excit_wire_mode (Optional[nidaqmx.constants.ACExcitWireMode]):
Is the number of leads on the sensor. Some sensors
require you to tie leads together to create a four- or
five- wire sensor. Refer to the sensor documentation for
more information.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIPosLVDTChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_double, ctypes.c_int,
ctypes.c_int, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, sensitivity,
sensitivity_units.value, voltage_excit_source.value,
voltage_excit_val, voltage_excit_freq, ac_excit_wire_mode.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_pos_rvdt_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-70.0, max_val=70.0, units=AngleUnits.DEGREES,
sensitivity=50.0,
sensitivity_units=RVDTSensitivityUnits.MILLIVOLTS_PER_VOLT_PER_DEGREE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=1.0, voltage_excit_freq=2500.0,
ac_excit_wire_mode=ACExcitWireMode.FOUR_WIRE,
custom_scale_name=""):
"""
Creates channel(s) that use an RVDT to measure angular position.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.AngleUnits]): Specifies
the units to use to return angular position measurements
from the channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.RVDTSensitivityUnits]):
Specifies the units of the **sensitivity** input.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
voltage_excit_freq (Optional[float]): Specifies in hertz the
excitation frequency that the sensor requires. Refer to
the sensor documentation to determine this value.
ac_excit_wire_mode (Optional[nidaqmx.constants.ACExcitWireMode]):
Is the number of leads on the sensor. Some sensors
require you to tie leads together to create a four- or
five- wire sensor. Refer to the sensor documentation for
more information.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIPosRVDTChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_double, ctypes.c_int,
ctypes.c_int, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, sensitivity,
sensitivity_units.value, voltage_excit_source.value,
voltage_excit_val, voltage_excit_freq, ac_excit_wire_mode.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_pressure_bridge_polynomial_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0,
units=PressureUnits.POUNDS_PER_SQ_INCH,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
forward_coeffs=None, reverse_coeffs=None,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
physical_units=BridgePhysicalUnits.POUNDS_PER_SQ_INCH,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
pressure. Use this instance with sensors whose specifications
provide a polynomial to convert electrical values to physical
values. When you use this scaling type, NI-DAQmx requires
coefficients for a polynomial that converts electrical values to
physical values (forward), as well as coefficients for a
polynomial that converts physical values to electrical values
(reverse). If you only know one set of coefficients, use the
DAQmx Compute Reverse Polynomial Coefficients function to
generate the other set.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.PressureUnits]): Specifies
in which unit to return pressure measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
forward_coeffs (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
reverse_coeffs (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if forward_coeffs is None:
forward_coeffs = []
if reverse_coeffs is None:
reverse_coeffs = []
forward_coeffs = numpy.float64(forward_coeffs)
reverse_coeffs = numpy.float64(reverse_coeffs)
cfunc = lib_importer.windll.DAQmxCreateAIPressureBridgePolynomialChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, forward_coeffs, len(forward_coeffs),
reverse_coeffs, len(reverse_coeffs), electrical_units.value,
physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_pressure_bridge_table_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0,
units=PressureUnits.POUNDS_PER_SQ_INCH,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
electrical_vals=None,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
physical_vals=None,
physical_units=BridgePhysicalUnits.POUNDS_PER_SQ_INCH,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
pressure. Use this instance with sensors whose specifications
provide a table of electrical values and the corresponding
physical values. When you use this scaling type, NI-DAQmx
performs linear scaling between each pair of electrical and
physical values. The input limits specified with **min_val** and
**max_val** must fall within the smallest and largest physical
values. For any data outside those endpoints, NI-DAQmx coerces
that data to the endpoints.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.PressureUnits]): Specifies
in which unit to return pressure measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
electrical_vals (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
physical_vals (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if electrical_vals is None:
electrical_vals = []
if physical_vals is None:
physical_vals = []
electrical_vals = numpy.float64(electrical_vals)
physical_vals = numpy.float64(physical_vals)
cfunc = lib_importer.windll.DAQmxCreateAIPressureBridgeTableChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, electrical_vals, len(electrical_vals),
electrical_units.value, physical_vals, len(physical_vals),
physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_pressure_bridge_two_point_lin_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0,
units=PressureUnits.POUNDS_PER_SQ_INCH,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
first_electrical_val=0.0, second_electrical_val=2.0,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
first_physical_val=0.0, second_physical_val=100.0,
physical_units=BridgePhysicalUnits.POUNDS_PER_SQ_INCH,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
pressure. Use this instance with sensors whose specifications do
not provide a polynomial for scaling or a table of electrical
and physical values. When you use this scaling type, NI-DAQmx
uses two points of electrical and physical values to calculate
the slope and y-intercept of a linear equation and uses that
equation to scale electrical values to physical values.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.PressureUnits]): Specifies
in which unit to return pressure measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
first_electrical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
second_electrical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
first_physical_val (Optional[float]): Specifies how to scale
electrical values from the sensor to physical units.
second_physical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIPressureBridgeTwoPointLinChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, first_electrical_val,
second_electrical_val, electrical_units.value, first_physical_val,
second_physical_val, physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_resistance_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=100.0, max_val=1000.0, units=ResistanceUnits.OHMS,
resistance_config=ResistanceConfiguration.TWO_WIRE,
current_excit_source=ExcitationSource.EXTERNAL,
current_excit_val=0.001, custom_scale_name=""):
"""
Creates channel(s) to measure resistance.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ResistanceUnits]):
Specifies the units to use to return resistance
measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIResistanceChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
current_excit_source.value, current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_rosette_strain_gage_chan(
self, physical_channel, rosette_type, gage_orientation,
rosette_meas_types, name_to_assign_to_channel="", min_val=-0.001,
max_val=0.001,
strain_config=StrainGageBridgeType.QUARTER_BRIDGE_I,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, gage_factor=2.0,
nominal_gage_resistance=350.0, poisson_ratio=0.3,
lead_wire_resistance=0.0):
"""
Creates channels to measure two-dimensional strain using a
rosette strain gage.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create the strain gage virtual
channels necessary to calculate the **rosette
measurements** channels.
rosette_type (nidaqmx.constants.StrainGageRosetteType):
Specifies information about the rosette configuration
and measurements.
gage_orientation (float): Specifies information about the
rosette configuration and measurements.
rosette_meas_types (List[int]): Specifies information about
the rosette configuration and measurements.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
creates a default channel name.
min_val (Optional[float]): Specifies the minimum strain you
expect to measure. This value applies to each strain
gage in the rosette.
max_val (Optional[float]): Specifies the maximum strain you
expect to measure. This value applies to each strain
gage in the rosette.
strain_config (Optional[nidaqmx.constants.StrainGageBridgeType]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
gage_factor (Optional[float]): Contains information about
the strain gage and measurement.
nominal_gage_resistance (Optional[float]): Contains
information about the strain gage and measurement.
poisson_ratio (Optional[float]): Contains information about
the strain gage and measurement.
lead_wire_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if rosette_meas_types is None:
rosette_meas_types = []
rosette_meas_types = numpy.int32(rosette_meas_types)
cfunc = lib_importer.windll.DAQmxCreateAIRosetteStrainGageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.int32, flags=('C','W')),
ctypes.c_uint, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double, ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, rosette_type.value, gage_orientation,
rosette_meas_types, len(rosette_meas_types), strain_config.value,
voltage_excit_source.value, voltage_excit_val, gage_factor,
nominal_gage_resistance, poisson_ratio, lead_wire_resistance)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_rtd_chan(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
rtd_type=RTDType.PT_3750,
resistance_config=ResistanceConfiguration.TWO_WIRE,
current_excit_source=ExcitationSource.EXTERNAL,
current_excit_val=0.0025, r_0=100.0):
"""
Creates channel(s) that use an RTD to measure temperature.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
rtd_type (Optional[nidaqmx.constants.RTDType]): Specifies
the type of RTD connected to the channel.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
r_0 (Optional[float]): Is the sensor resistance in ohms at 0
degrees Celsius. The Callendar-Van Dusen equation
requires this value. Refer to the sensor documentation
to determine this value.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIRTDChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_double, ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, rtd_type.value,
resistance_config.value, current_excit_source.value,
current_excit_val, r_0)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_strain_gage_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-0.001, max_val=0.001, units=StrainUnits.STRAIN,
strain_config=StrainGageBridgeType.FULL_BRIDGE_I,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, gage_factor=2.0,
initial_bridge_voltage=0.0, nominal_gage_resistance=350.0,
poisson_ratio=0.30, lead_wire_resistance=0.0,
custom_scale_name=""):
"""
Creates channel(s) to measure strain.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.StrainUnits]): Specifies
the units to use to return strain measurements.
strain_config (Optional[nidaqmx.constants.StrainGageBridgeType]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
gage_factor (Optional[float]): Contains information about
the strain gage and measurement.
initial_bridge_voltage (Optional[float]): Specifies
information about the bridge configuration and
measurement.
nominal_gage_resistance (Optional[float]): Contains
information about the strain gage and measurement.
poisson_ratio (Optional[float]): Contains information about
the strain gage and measurement.
lead_wire_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIStrainGageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, strain_config.value,
voltage_excit_source.value, voltage_excit_val, gage_factor,
initial_bridge_voltage, nominal_gage_resistance, poisson_ratio,
lead_wire_resistance, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_temp_built_in_sensor_chan(
self, physical_channel, name_to_assign_to_channel="",
units=TemperatureUnits.DEG_C):
"""
Creates channel(s) that use the built-in sensor of a terminal
block or device to measure temperature. On SCXI modules, for
example, the built-in sensor could be the CJC sensor.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAITempBuiltInSensorChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
units.value)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_thrmcpl_chan(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
thermocouple_type=ThermocoupleType.J,
cjc_source=CJCSource.CONSTANT_USER_VALUE, cjc_val=25.0,
cjc_channel=""):
"""
Creates channel(s) that use a thermocouple to measure
temperature.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
thermocouple_type (Optional[nidaqmx.constants.ThermocoupleType]):
Specifies the type of thermocouple connected to the
channel. Thermocouple types differ in composition and
measurement range.
cjc_source (Optional[nidaqmx.constants.CJCSource]):
Specifies the source of cold-junction compensation.
cjc_val (Optional[float]): Specifies in **units** the
temperature of the cold junction if you set
**cjc_source** to **CONSTANT_VALUE**.
cjc_channel (Optional[str]): Specifies the channel that
acquires the temperature of the thermocouple cold-
junction if you set **cjc_source** to **CHANNEL**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIThrmcplChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, thermocouple_type.value,
cjc_source.value, cjc_val, cjc_channel)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_thrmstr_chan_iex(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
resistance_config=ResistanceConfiguration.FOUR_WIRE,
current_excit_source=ExcitationSource.EXTERNAL,
current_excit_val=0.00015, a=0.001295361, b=0.0002343159,
c=0.0000001018703):
"""
Creates channel(s) that use a thermistor to measure temperature.
Use this instance when the thermistor requires current
excitation.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
a (Optional[float]): Contains the constants for the
Steinhart-Hart thermistor equation. Refer to the sensor
documentation to determine values for these constants.
b (Optional[float]): Contains the constants for the
Steinhart-Hart thermistor equation. Refer to the sensor
documentation to determine values for these constants.
c (Optional[float]): Contains the constants for the
Steinhart-Hart thermistor equation. Refer to the sensor
documentation to determine values for these constants.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIThrmstrChanIex
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
current_excit_source.value, current_excit_val, a, b, c)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_thrmstr_chan_vex(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
resistance_config=ResistanceConfiguration.FOUR_WIRE,
voltage_excit_source=ExcitationSource.EXTERNAL,
voltage_excit_val=2.5, a=0.001295361, b=0.0002343159,
c=0.0000001018703, r_1=5000.0):
"""
Creates channel(s) that use a thermistor to measure temperature.
Use this instance when the thermistor requires voltage
excitation.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
a (Optional[float]): Contains the constants for the
Steinhart-Hart thermistor equation. Refer to the sensor
documentation to determine values for these constants.
b (Optional[float]): Contains the constants for the
Steinhart-Hart thermistor equation. Refer to the sensor
documentation to determine values for these constants.
c (Optional[float]): Contains the constants for the
Steinhart-Hart thermistor equation. Refer to the sensor
documentation to determine values for these constants.
r_1 (Optional[float]): Specifies in ohms the value of the
reference resistor.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIThrmstrChanVex
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double, ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
voltage_excit_source.value, voltage_excit_val, a, b, c, r_1)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_torque_bridge_polynomial_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=TorqueUnits.INCH_POUNDS,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
forward_coeffs=None, reverse_coeffs=None,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
physical_units=BridgePhysicalUnits.INCH_POUNDS,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
torque. Use this instance with sensors whose specifications
provide a polynomial to convert electrical values to physical
values. When you use this scaling type, NI-DAQmx requires
coefficients for a polynomial that converts electrical values to
physical values (forward), as well as coefficients for a
polynomial that converts physical values to electrical values
(reverse). If you only know one set of coefficients, use the
DAQmx Compute Reverse Polynomial Coefficients function to
generate the other set.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TorqueUnits]): Specifies
in which unit to return torque measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
forward_coeffs (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
reverse_coeffs (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if forward_coeffs is None:
forward_coeffs = []
if reverse_coeffs is None:
reverse_coeffs = []
forward_coeffs = numpy.float64(forward_coeffs)
reverse_coeffs = numpy.float64(reverse_coeffs)
cfunc = lib_importer.windll.DAQmxCreateAITorqueBridgePolynomialChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, forward_coeffs, len(forward_coeffs),
reverse_coeffs, len(reverse_coeffs), electrical_units.value,
physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_torque_bridge_table_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=TorqueUnits.INCH_POUNDS,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
electrical_vals=None,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
physical_vals=None,
physical_units=BridgePhysicalUnits.INCH_POUNDS,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
torque. Use this instance with sensors whose specifications
provide a table of electrical values and the corresponding
physical values. When you use this scaling type, NI-DAQmx
performs linear scaling between each pair of electrical and
physical values. The input limits specified with **min_val** and
**max_val** must fall within the smallest and largest physical
values. For any data outside those endpoints, NI-DAQmx coerces
that data to the endpoints.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TorqueUnits]): Specifies
in which unit to return torque measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
electrical_vals (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
physical_vals (Optional[List[float]]): Specifies how to
scale electrical values from the sensor to physical
units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
if electrical_vals is None:
electrical_vals = []
if physical_vals is None:
physical_vals = []
electrical_vals = numpy.float64(electrical_vals)
physical_vals = numpy.float64(physical_vals)
cfunc = lib_importer.windll.DAQmxCreateAITorqueBridgeTableChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
wrapped_ndpointer(dtype=numpy.float64,
flags=('C','W')), ctypes.c_uint, ctypes.c_int,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, electrical_vals, len(electrical_vals),
electrical_units.value, physical_vals, len(physical_vals),
physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_torque_bridge_two_point_lin_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=TorqueUnits.INCH_POUNDS,
bridge_config=BridgeConfiguration.FULL_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, nominal_bridge_resistance=350.0,
first_electrical_val=0.0, second_electrical_val=2.0,
electrical_units=BridgeElectricalUnits.MILLIVOLTS_PER_VOLT,
first_physical_val=0.0, second_physical_val=100.0,
physical_units=BridgePhysicalUnits.INCH_POUNDS,
custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
torque. Use this instance with sensors whose specifications do
not provide a polynomial for scaling or a table of electrical
and physical values. When you use this scaling type, NI-DAQmx
uses two points of electrical and physical values to calculate
the slope and y-intercept of a linear equation and uses that
equation to scale electrical values to physical values.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TorqueUnits]): Specifies
in which unit to return torque measurements from the
channel.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
nominal_bridge_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
first_electrical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
second_electrical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
electrical_units (Optional[nidaqmx.constants.BridgeElectricalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
first_physical_val (Optional[float]): Specifies how to scale
electrical values from the sensor to physical units.
second_physical_val (Optional[float]): Specifies how to
scale electrical values from the sensor to physical
units.
physical_units (Optional[nidaqmx.constants.BridgePhysicalUnits]):
Specifies how to scale electrical values from the sensor
to physical units.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAITorqueBridgeTwoPointLinChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, bridge_config.value,
voltage_excit_source.value, voltage_excit_val,
nominal_bridge_resistance, first_electrical_val,
second_electrical_val, electrical_units.value, first_physical_val,
second_physical_val, physical_units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_velocity_iepe_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-50.0,
max_val=50.0, units=VelocityUnits.INCHES_PER_SECOND,
sensitivity=100.0,
sensitivity_units=VelocityIEPESensorSensitivityUnits.MILLIVOLTS_PER_INCH_PER_SECOND,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.002, custom_scale_name=""):
"""
Creates channel(s) that use an IEPE velocity sensor to measure
velocity.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.VelocityUnits]): Specifies
in which unit to return velocity measurements from the
channel.
sensitivity (Optional[float]): Is the sensitivity of the
sensor. This value is in the units you specify with the
**sensitivity_units** input. Refer to the sensor
documentation to determine this value.
sensitivity_units (Optional[nidaqmx.constants.VelocityIEPESensorSensitivityUnits]):
Specifies the units of the **sensitivity** input.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIVelocityIEPEChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value, sensitivity,
sensitivity_units.value, current_excit_source.value,
current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_voltage_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=VoltageUnits.VOLTS, custom_scale_name=""):
"""
Creates channel(s) to measure voltage. If the measurement
requires the use of internal excitation or you need excitation
to scale the voltage, use the AI Custom Voltage with Excitation
instance of this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.VoltageUnits]): Specifies
the units to use to return voltage measurements.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIVoltageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_voltage_chan_with_excit(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-10.0,
max_val=10.0, units=VoltageUnits.VOLTS,
bridge_config=BridgeConfiguration.NO_BRIDGE,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=0.0, use_excit_for_scaling=False,
custom_scale_name=""):
"""
Creates channel(s) to measure voltage. Use this instance for
custom sensors that require excitation. You can use the
excitation to scale the measurement.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.VoltageUnits]): Specifies
the units to use to return voltage measurements.
bridge_config (Optional[nidaqmx.constants.BridgeConfiguration]):
Specifies what type of Wheatstone bridge the sensor is.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
use_excit_for_scaling (Optional[bool]): Specifies if NI-
DAQmx divides the measurement by the excitation. You
should typically set **use_excit_for_scaling** to True
for ratiometric transducers. If you set
**use_excit_for_scaling** to True, set **max_val** and
**min_val** to reflect the scaling.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIVoltageChanWithExcit
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_double, c_bool32,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
bridge_config.value, voltage_excit_source.value,
voltage_excit_val, use_excit_for_scaling, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_ai_voltage_rms_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=VoltageUnits.VOLTS, custom_scale_name=""):
"""
Creates channel(s) to measure voltage RMS, the average (mean)
power of the acquired voltage.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.VoltageUnits]): Specifies
the units to use to return voltage measurements.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateAIVoltageRMSChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_accel_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=AccelUnits.G,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.004, custom_scale_name=""):
"""
Creates channel(s) that use an accelerometer to measure
acceleration. You must configure the physical channel(s) with
TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.AccelUnits]): Specifies
the units to use to return acceleration measurements
from the channel.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIAccelChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
current_excit_source.value, current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_bridge_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-0.002, max_val=0.002, units=TEDSUnits.FROM_TEDS,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, custom_scale_name=""):
"""
Creates channel(s) that measure a Wheatstone bridge. You must
configure the physical channel(s) with TEDS information to use
this function. Use this instance with bridge-based sensors that
measure phenomena other than strain, force, pressure, or torque,
or that scale data to physical units NI-DAQmx does not support.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TEDSUnits]): Specifies in
which unit to return measurements from the channel.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIBridgeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_current_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-0.01,
max_val=0.01, units=TEDSUnits.FROM_TEDS,
shunt_resistor_loc=CurrentShuntResistorLocation.LET_DRIVER_CHOOSE,
ext_shunt_resistor_val=249.0, custom_scale_name=""):
"""
Creates channel(s) to measure current. You must configure the
physical channel(s) with TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TEDSUnits]): Specifies the
units to use to return measurements.
shunt_resistor_loc (Optional[nidaqmx.constants.CurrentShuntResistorLocation]):
Specifies the location of the shunt resistor. For
devices with built-in shunt resistors, specify the
location as **INTERNAL**. For devices that do not have
built-in shunt resistors, you must attach an external
one, set this input to **EXTERNAL** and use the
**ext_shunt_resistor_val** input to specify the value of
the resistor.
ext_shunt_resistor_val (Optional[float]): Specifies in ohms
the resistance of an external shunt resistor.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAICurrentChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
shunt_resistor_loc.value, ext_shunt_resistor_val,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_force_bridge_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=ForceUnits.POUNDS,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure force
or load. You must configure the physical channel(s) with TEDS
information to use this function. NI-DAQmx scales electrical
values to physical values according to that TEDS information.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ForceUnits]): Specifies in
which unit to return force measurements from the
channel.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIForceBridgeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_force_iepe_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-2000.0,
max_val=2000.0, units=ForceUnits.NEWTONS,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.001, custom_scale_name=""):
"""
Creates channel(s) that use an IEPE force sensor to measure
force or load. You must configure the physical channel(s) with
TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.ForceUnits]): Specifies in
which unit to return force measurements from the
channel.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIForceIEPEChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
current_excit_source.value, current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_microphone_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT,
units=SoundPressureUnits.PA, max_snd_press_level=100.0,
current_excit_source=ExcitationSource.INTERNAL,
current_excit_val=0.004, custom_scale_name=""):
"""
Creates channel(s) that use a microphone to measure sound
pressure. You must configure the physical channel(s) with TEDS
information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. You must use
physical channels that you configured with TEDS
information. The DAQmx physical channel constant lists
all physical channels on devices and modules installed
in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
units (Optional[nidaqmx.constants.SoundPressureUnits]):
Specifies the units to use to return sound pressure
measurements.
max_snd_press_level (Optional[float]): Is the maximum
instantaneous sound pressure level you expect to
measure. This value is in decibels, referenced to 20
micropascals.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIMicrophoneChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, units.value, max_snd_press_level,
current_excit_source.value, current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_pos_lvdt_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-0.1, max_val=0.1, units=LengthUnits.METERS,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=1.0, voltage_excit_freq=2500.0,
ac_excit_wire_mode=ACExcitWireMode.FOUR_WIRE,
custom_scale_name=""):
"""
Creates channel(s) that use an LVDT to measure linear position.
You must configure the physical channel(s) with TEDS information
to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.LengthUnits]): Specifies
the units to use to return linear position measurements
from the channel.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
voltage_excit_freq (Optional[float]): Specifies in hertz the
excitation frequency that the sensor requires. Refer to
the sensor documentation to determine this value.
ac_excit_wire_mode (Optional[nidaqmx.constants.ACExcitWireMode]):
Is the number of leads on the sensor. Some sensors
require you to tie leads together to create a four- or
five- wire sensor. Refer to the sensor documentation for
more information.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIPosLVDTChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, voltage_excit_freq, ac_excit_wire_mode.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_pos_rvdt_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-70.0, max_val=70.0, units=AngleUnits.DEGREES,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=1.0, voltage_excit_freq=2500.0,
ac_excit_wire_mode=ACExcitWireMode.FOUR_WIRE,
custom_scale_name=""):
"""
Creates channel(s) that use an RVDT to measure angular position.
You must configure the physical channel(s) with TEDS information
to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.AngleUnits]): Specifies
the units to use to return angular position measurements
from the channel.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
voltage_excit_freq (Optional[float]): Specifies in hertz the
excitation frequency that the sensor requires. Refer to
the sensor documentation to determine this value.
ac_excit_wire_mode (Optional[nidaqmx.constants.ACExcitWireMode]):
Is the number of leads on the sensor. Some sensors
require you to tie leads together to create a four- or
five- wire sensor. Refer to the sensor documentation for
more information.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIPosRVDTChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, voltage_excit_freq, ac_excit_wire_mode.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_pressure_bridge_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0,
units=PressureUnits.POUNDS_PER_SQ_INCH,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
pressure. You must configure the physical channel(s) with TEDS
information to use this function. NI-DAQmx scales electrical
values to physical values according to that TEDS information.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.PressureUnits]): Specifies
in which unit to return pressure measurements from the
channel.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIPressureBridgeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_resistance_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=100.0, max_val=1000.0, units=TEDSUnits.FROM_TEDS,
resistance_config=ResistanceConfiguration.TWO_WIRE,
current_excit_source=ExcitationSource.EXTERNAL,
current_excit_val=0.001, custom_scale_name=""):
"""
Creates channel(s) to measure resistance. You must configure the
physical channel(s) with TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TEDSUnits]): Specifies the
units to use to return measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIResistanceChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
current_excit_source.value, current_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_rtd_chan(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
resistance_config=ResistanceConfiguration.TWO_WIRE,
current_excit_source=ExcitationSource.EXTERNAL,
current_excit_val=0.0025):
"""
Creates channel(s) that use an RTD to measure temperature. You
must configure the physical channel(s) with TEDS information to
use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIRTDChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
current_excit_source.value, current_excit_val)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_strain_gage_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-0.001, max_val=0.001, units=StrainUnits.STRAIN,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, initial_bridge_voltage=0.0,
lead_wire_resistance=0.0, custom_scale_name=""):
"""
Creates channel(s) to measure strain. You must configure the
physical channel(s) with TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.StrainUnits]): Specifies
the units to use to return strain measurements.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies information about the bridge configuration and
measurement.
voltage_excit_val (Optional[float]): Specifies information
about the bridge configuration and measurement.
initial_bridge_voltage (Optional[float]): Specifies
information about the bridge configuration and
measurement.
lead_wire_resistance (Optional[float]): Specifies
information about the bridge configuration and
measurement.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIStrainGageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, initial_bridge_voltage, lead_wire_resistance,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_thrmcpl_chan(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
cjc_source=CJCSource.CONSTANT_USER_VALUE, cjc_val=25.0,
cjc_channel=""):
"""
Creates channel(s) that use a thermocouple to measure
temperature. You must configure the physical channel(s) with
TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
cjc_source (Optional[nidaqmx.constants.CJCSource]):
Specifies the source of cold-junction compensation.
cjc_val (Optional[float]): Specifies in **units** the
temperature of the cold junction if you set
**cjc_source** to **CONSTANT_VALUE**.
cjc_channel (Optional[str]): Specifies the channel that
acquires the temperature of the thermocouple cold-
junction if you set **cjc_source** to **CHANNEL**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIThrmcplChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, cjc_source.value, cjc_val,
cjc_channel)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_thrmstr_chan_iex(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
resistance_config=ResistanceConfiguration.FOUR_WIRE,
current_excit_source=ExcitationSource.EXTERNAL,
current_excit_val=0.00015):
"""
Creates channel(s) that use a thermistor to measure temperature.
Use this instance when the thermistor requires current
excitation. You must configure the physical channel(s) with TEDS
information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
current_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
current_excit_val (Optional[float]): Specifies in amperes
the amount of excitation to supply to the sensor. Refer
to the sensor documentation to determine this value.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIThrmstrChanIex
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
current_excit_source.value, current_excit_val)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_thrmstr_chan_vex(
self, physical_channel, name_to_assign_to_channel="", min_val=0.0,
max_val=100.0, units=TemperatureUnits.DEG_C,
resistance_config=ResistanceConfiguration.FOUR_WIRE,
voltage_excit_source=ExcitationSource.EXTERNAL,
voltage_excit_val=2.5, r_1=5000.0):
"""
Creates channel(s) that use a thermistor to measure temperature.
Use this instance when the thermistor requires voltage
excitation. You must configure the physical channel(s) with TEDS
information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TemperatureUnits]):
Specifies the units to use to return temperature
measurements.
resistance_config (Optional[nidaqmx.constants.ResistanceConfiguration]):
Specifies the number of wires to use for resistive
measurements.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
r_1 (Optional[float]): Specifies in ohms the value of the
reference resistor.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIThrmstrChanVex
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, resistance_config.value,
voltage_excit_source.value, voltage_excit_val, r_1)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_torque_bridge_chan(
self, physical_channel, name_to_assign_to_channel="",
min_val=-100.0, max_val=100.0, units=TorqueUnits.INCH_POUNDS,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=2.5, custom_scale_name=""):
"""
Creates channel(s) that use a Wheatstone bridge to measure
torque. You must configure the physical channel(s) with TEDS
information to use this function. NI-DAQmx scales electrical
values to physical values according to that TEDS information.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TorqueUnits]): Specifies
in which unit to return torque measurements from the
channel.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAITorqueBridgeChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
min_val, max_val, units.value, voltage_excit_source.value,
voltage_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_voltage_chan(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-5.0,
max_val=5.0, units=TEDSUnits.FROM_TEDS, custom_scale_name=""):
"""
Creates channel(s) to measure voltage. You must configure the
physical channel(s) with TEDS information to use this function.
If the measurement requires the use of internal excitation or
you need excitation to scale the voltage, use the TEDS AI Custom
Voltage with Excitation instance of this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TEDSUnits]): Specifies the
units to use to return measurements.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIVoltageChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
def add_teds_ai_voltage_chan_with_excit(
self, physical_channel, name_to_assign_to_channel="",
terminal_config=TerminalConfiguration.DEFAULT, min_val=-10.0,
max_val=10.0, units=TEDSUnits.FROM_TEDS,
voltage_excit_source=ExcitationSource.INTERNAL,
voltage_excit_val=0.0, custom_scale_name=""):
"""
Creates channel(s) to measure voltage. Use this instance for
custom sensors that require excitation. You can use the
excitation to scale the measurement. You must configure the
physical channel(s) with TEDS information to use this function.
Args:
physical_channel (str): Specifies the names of the physical
channels to use to create virtual channels. The DAQmx
physical channel constant lists all physical channels on
devices and modules installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
terminal_config (Optional[nidaqmx.constants.TerminalConfiguration]):
Specifies the input terminal configuration for the
channel.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TEDSUnits]): Specifies the
units to use to return measurements.
voltage_excit_source (Optional[nidaqmx.constants.ExcitationSource]):
Specifies the source of excitation.
voltage_excit_val (Optional[float]): Specifies in volts the
amount of excitation supplied to the sensor. Refer to
the sensor documentation to determine appropriate
excitation values.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ai_channel.AIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateTEDSAIVoltageChanWithExcit
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_double,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, physical_channel, name_to_assign_to_channel,
terminal_config.value, min_val, max_val, units.value,
voltage_excit_source.value, voltage_excit_val, custom_scale_name)
check_for_error(error_code)
return self._create_chan(physical_channel, name_to_assign_to_channel)
| 52.757749
| 98
| 0.627698
| 23,195
| 202,537
| 5.280405
| 0.023108
| 0.021832
| 0.026062
| 0.030405
| 0.951877
| 0.948701
| 0.947264
| 0.944954
| 0.94088
| 0.936422
| 0
| 0.005766
| 0.318352
| 202,537
| 3,838
| 99
| 52.771496
| 0.881384
| 0.546947
| 0
| 0.825191
| 1
| 0
| 0.000463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041221
| false
| 0
| 0.08626
| 0
| 0.168702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f47dc318630c0632cba152a7c84200513f1125b8
| 21,441
|
py
|
Python
|
test/test_parse_custom_time.py
|
chosak/apachelogs
|
7ee86af1c6c038bda68903b0da6537604900e331
|
[
"MIT"
] | null | null | null |
test/test_parse_custom_time.py
|
chosak/apachelogs
|
7ee86af1c6c038bda68903b0da6537604900e331
|
[
"MIT"
] | null | null | null |
test/test_parse_custom_time.py
|
chosak/apachelogs
|
7ee86af1c6c038bda68903b0da6537604900e331
|
[
"MIT"
] | null | null | null |
from datetime import date, datetime, time, timedelta, timezone
import locale
import pytest
from apachelogs import LogParser
w5 = timezone(timedelta(hours=-5))
w4 = timezone(timedelta(hours=-4))
@pytest.mark.parametrize('fmt,entry,fields', [
(
'%{%a %b %d}t %r',
'Sat Nov 25 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": None,
"request_time_fields": {
"abbrev_wday": "Sat",
"abbrev_mon": "Nov",
"mday": 25,
},
"directives": {
"%{%a}t": "Sat",
"%{%b}t": "Nov",
"%{%d}t": 25,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%A %B %d}t %r',
'Saturday November 25 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": None,
"request_time_fields": {
"full_wday": "Saturday",
"full_mon": "November",
"mday": 25,
},
"directives": {
"%{%A}t": "Saturday",
"%{%B}t": "November",
"%{%d}t": 25,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%w (%u) %m/%d}t %r',
'6 (6) 11/25 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": None,
"request_time_fields": {
"wday": 6,
"iso_wday": 6,
"mon": 11,
"mday": 25,
},
"directives": {
"%{%w}t": 6,
"%{%u}t": 6,
"%{%m}t": 11,
"%{%d}t": 25,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%s}t %r',
'1511642826 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 20, 47, 6, tzinfo=timezone.utc),
"request_time_fields": {"epoch": 1511642826},
"directives": {
"%{%s}t": 1511642826,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%s@%z}t %r',
'1511642826@-0500 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 15, 47, 6, tzinfo=w5),
"request_time_fields": {"epoch": 1511642826, "timezone": w5},
"directives": {
"%{%s}t": 1511642826,
"%{%z}t": w5,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%Y-%m-%d %H:%M:%S}t %r',
'2017-11-25 20:47:06 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 20, 47, 6),
"request_time_fields": {
"year": 2017,
"mon": 11,
"mday": 25,
"hour": 20,
"min": 47,
"sec": 6,
},
"directives": {
"%{%Y}t": 2017,
"%{%m}t": 11,
"%{%d}t": 25,
"%{%H}t": 20,
"%{%M}t": 47,
"%{%S}t": 6,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%Y-%m-%d %H:%M:%S %z}t %r',
'2017-11-25 20:47:06 -0500 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 20, 47, 6, tzinfo=w5),
"request_time_fields": {
"year": 2017,
"mon": 11,
"mday": 25,
"hour": 20,
"min": 47,
"sec": 6,
"timezone": w5,
},
"directives": {
"%{%Y}t": 2017,
"%{%m}t": 11,
"%{%d}t": 25,
"%{%H}t": 20,
"%{%M}t": 47,
"%{%S}t": 6,
"%{%z}t": w5,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%s}t@%{%z}t %r',
'1511642826@-0500 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 15, 47, 6, tzinfo=w5),
"request_time_fields": {"epoch": 1511642826, "timezone": w5},
"directives": {
"%{%s}t": 1511642826,
"%{%z}t": w5,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%Y-%m-%d}t %{%H:%M:%S}t %r',
'2017-11-25 20:47:06 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 20, 47, 6),
"request_time_fields": {
"year": 2017,
"mon": 11,
"mday": 25,
"hour": 20,
"min": 47,
"sec": 6,
},
"directives": {
"%{%Y}t": 2017,
"%{%m}t": 11,
"%{%d}t": 25,
"%{%H}t": 20,
"%{%M}t": 47,
"%{%S}t": 6,
"%r": "GET / HTTP/1.1",
},
},
),
(
'%{%Y-%m-%d}t %{%H:%M:%S}t %{%z}t %r',
'2017-11-25 20:47:06 -0500 GET / HTTP/1.1',
{
"request_line": "GET / HTTP/1.1",
"request_time": datetime(2017, 11, 25, 20, 47, 6, tzinfo=w5),
"request_time_fields": {
"year": 2017,
"mon": 11,
"mday": 25,
"hour": 20,
"min": 47,
"sec": 6,
"timezone": w5,
},
"directives": {
"%{%Y}t": 2017,
"%{%m}t": 11,
"%{%d}t": 25,
"%{%H}t": 20,
"%{%M}t": 47,
"%{%S}t": 6,
"%{%z}t": w5,
"%r": "GET / HTTP/1.1",
},
},
),
(
"%{%D %T}t",
"05/06/19 13:42:26",
{
"request_time": datetime(2019, 5, 6, 13, 42, 26),
"request_time_fields": {
"date": date(2019, 5, 6),
"time": time(13, 42, 26),
},
"directives": {
"%{%D}t": date(2019, 5, 6),
"%{%T}t": time(13, 42, 26),
},
},
),
(
"%{%D%%%T}t",
"05/06/19%13:42:26",
{
"request_time": datetime(2019, 5, 6, 13, 42, 26),
"request_time_fields": {
"date": date(2019, 5, 6),
"time": time(13, 42, 26),
},
"directives": {
"%{%D}t": date(2019, 5, 6),
"%{%T}t": time(13, 42, 26),
},
},
),
(
"%{%D%t%T}t",
"05/06/19\t13:42:26",
{
"request_time": datetime(2019, 5, 6, 13, 42, 26),
"request_time_fields": {
"date": date(2019, 5, 6),
"time": time(13, 42, 26),
},
"directives": {
"%{%D}t": date(2019, 5, 6),
"%{%T}t": time(13, 42, 26),
},
},
),
(
"%{%F %R:%S}t",
"2019-05-06 13:42:26",
{
"request_time": datetime(2019, 5, 6, 13, 42, 26),
"request_time_fields": {
"date": date(2019, 5, 6),
"hour_min": time(13, 42),
"sec": 26,
},
"directives": {
"%{%F}t": date(2019, 5, 6),
"%{%R}t": time(13, 42),
"%{%S}t": 26,
},
},
),
(
"%{begin:%F %R:%S}t",
"2019-05-06 13:42:26",
{
"begin_request_time": datetime(2019, 5, 6, 13, 42, 26),
"begin_request_time_fields": {
"date": date(2019, 5, 6),
"hour_min": time(13, 42),
"sec": 26,
},
"directives": {
"%{begin:%F}t": date(2019, 5, 6),
"%{begin:%R}t": time(13, 42),
"%{begin:%S}t": 26,
},
},
),
(
"%{end:%F %R:%S}t",
"2019-05-06 13:42:26",
{
"end_request_time": datetime(2019, 5, 6, 13, 42, 26),
"end_request_time_fields": {
"date": date(2019, 5, 6),
"hour_min": time(13, 42),
"sec": 26,
},
"directives": {
"%{end:%F}t": date(2019, 5, 6),
"%{end:%R}t": time(13, 42),
"%{end:%S}t": 26,
},
},
),
(
"%<{end:%F %R:%S}t",
"2019-05-06 13:42:26",
{
"original_end_request_time": datetime(2019, 5, 6, 13, 42, 26),
"original_end_request_time_fields": {
"date": date(2019, 5, 6),
"hour_min": time(13, 42),
"sec": 26,
},
"directives": {
"%<{end:%F}t": date(2019, 5, 6),
"%<{end:%R}t": time(13, 42),
"%<{end:%S}t": 26,
},
},
),
(
"%{}t",
'[05/Nov/2017:02:01:01 -0500]',
{
"request_time": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
"request_time_fields": {
"timestamp": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
"directives": {
"%{}t": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
}
),
(
"%{begin}t",
'[05/Nov/2017:02:01:01 -0500]',
{
"begin_request_time": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
"begin_request_time_fields": {
"timestamp": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
"directives": {
"%{begin}t": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
}
),
(
"%{end}t",
'[05/Nov/2017:02:01:01 -0500]',
{
"end_request_time": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
"end_request_time_fields": {
"timestamp": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
"directives": {
"%{end}t": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
}
),
(
"%{begin:}t",
'[05/Nov/2017:02:01:01 -0500]',
{
"begin_request_time": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
"begin_request_time_fields": {
"timestamp": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
"directives": {
"%{begin:}t": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
}
),
(
"%{end:}t",
'[05/Nov/2017:02:01:01 -0500]',
{
"end_request_time": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
"end_request_time_fields": {
"timestamp": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
"directives": {
"%{end:}t": datetime(2017,11, 5, 2, 1, 1, tzinfo=w5),
},
}
),
(
'%{%Y%n%m%t%d}t',
'2019 05 19',
{
"request_time": None,
"request_time_fields": {
"year": 2019,
"mon": 5,
"mday": 19,
},
"directives": {
"%{%Y}t": 2019,
"%{%m}t": 5,
"%{%d}t": 19,
},
},
),
(
'%{%Y%n%m%t%d}t',
'2019 \t 05 \n 19',
{
"request_time": None,
"request_time_fields": {
"year": 2019,
"mon": 5,
"mday": 19,
},
"directives": {
"%{%Y}t": 2019,
"%{%m}t": 5,
"%{%d}t": 19,
},
},
),
(
'%{%Y%n%m%t%d}t',
'20190519',
{
"request_time": None,
"request_time_fields": {
"year": 2019,
"mon": 5,
"mday": 19,
},
"directives": {
"%{%Y}t": 2019,
"%{%m}t": 5,
"%{%d}t": 19,
},
},
),
(
'%200{%I:%M:%S %p}t',
'12:34:56 ',
{
"request_time": None,
"request_time_fields": {
"hour12": 12,
"min": 34,
"sec": 56,
"am_pm": "",
},
"directives": {
"%200{%I}t": 12,
"%200{%M}t": 34,
"%200{%S}t": 56,
"%200{%p}t": "",
},
},
),
(
'%200{%I:%M:%S %p}t',
'-',
{
"request_time": None,
"request_time_fields": {
"hour12": None,
"min": None,
"sec": None,
"am_pm": None,
},
"directives": {
"%200{%I}t": None,
"%200{%M}t": None,
"%200{%S}t": None,
"%200{%p}t": None,
},
},
),
(
'%{%s %Z}t',
'1511642826 GMT',
{
"request_time": datetime(2017, 11, 25, 20, 47, 6, tzinfo=timezone.utc),
"request_time_fields": {
"epoch": 1511642826,
"tzname": "GMT",
},
"directives": {
"%{%s}t": 1511642826,
"%{%Z}t": "GMT",
},
},
),
(
'%{%s %Z}t',
'1511642826 UTC',
{
"request_time": datetime(2017, 11, 25, 20, 47, 6, tzinfo=timezone.utc),
"request_time_fields": {
"epoch": 1511642826,
"tzname": "UTC",
},
"directives": {
"%{%s}t": 1511642826,
"%{%Z}t": "UTC",
},
},
),
(
'%{%s %Z}t',
'1511642826 EST',
{
"request_time": datetime(2017, 11, 25, 15, 47, 6, tzinfo=w5),
"request_time_fields": {
"epoch": 1511642826,
"tzname": "EST",
},
"directives": {
"%{%s}t": 1511642826,
"%{%Z}t": "EST",
},
},
),
(
'%{%s %Z}t',
'1558378254 EDT',
{
"request_time": datetime(2019, 5, 20, 14, 50, 54, tzinfo=w4),
"request_time_fields": {
"epoch": 1558378254,
"tzname": "EDT",
},
"directives": {
"%{%s}t": 1558378254,
"%{%Z}t": "EDT",
},
},
),
(
'%{%s %Z}t',
'1558378254 XXX',
{
"request_time": datetime(2019, 5, 20, 18, 50, 54, tzinfo=timezone.utc),
"request_time_fields": {
"epoch": 1558378254,
"tzname": "XXX",
},
"directives": {
"%{%s}t": 1558378254,
"%{%Z}t": "XXX",
},
},
),
(
'%{%FT%T %Z}t',
'2019-02-20T14:54:43 GMT',
{
"request_time": datetime(2019, 2, 20, 14, 54, 43, tzinfo=timezone.utc),
"request_time_fields": {
"date": date(2019, 2, 20),
"time": time(14, 54, 43),
"tzname": "GMT",
},
"directives": {
"%{%F}t": date(2019, 2, 20),
"%{%T}t": time(14, 54, 43),
"%{%Z}t": "GMT",
},
},
),
(
'%{%FT%T %Z}t',
'2019-02-20T14:54:43 UTC',
{
"request_time": datetime(2019, 2, 20, 14, 54, 43, tzinfo=timezone.utc),
"request_time_fields": {
"date": date(2019, 2, 20),
"time": time(14, 54, 43),
"tzname": "UTC",
},
"directives": {
"%{%F}t": date(2019, 2, 20),
"%{%T}t": time(14, 54, 43),
"%{%Z}t": "UTC",
},
},
),
(
'%{%FT%T %Z}t',
'2019-02-20T14:54:43 EST',
{
"request_time": datetime(2019, 2, 20, 14, 54, 43, tzinfo=w5),
"request_time_fields": {
"date": date(2019, 2, 20),
"time": time(14, 54, 43),
"tzname": "EST",
},
"directives": {
"%{%F}t": date(2019, 2, 20),
"%{%T}t": time(14, 54, 43),
"%{%Z}t": "EST",
},
},
),
(
'%{%FT%T %Z}t',
'2019-05-20T14:54:43 EDT',
{
"request_time": datetime(2019, 5, 20, 14, 54, 43, tzinfo=w4),
"request_time_fields": {
"date": date(2019, 5, 20),
"time": time(14, 54, 43),
"tzname": "EDT",
},
"directives": {
"%{%F}t": date(2019, 5, 20),
"%{%T}t": time(14, 54, 43),
"%{%Z}t": "EDT",
},
},
),
(
'%{%FT%T %Z}t',
'2019-05-20T14:54:43 XXX',
{
"request_time": datetime(2019, 5, 20, 14, 54, 43),
"request_time_fields": {
"date": date(2019, 5, 20),
"time": time(14, 54, 43),
"tzname": "XXX",
},
"directives": {
"%{%F}t": date(2019, 5, 20),
"%{%T}t": time(14, 54, 43),
"%{%Z}t": "XXX",
},
},
),
])
def test_parse_custom_time(fmt, entry, fields):
log_entry = LogParser(fmt, encoding='utf-8').parse(entry)
for k,v in fields.items():
assert getattr(log_entry, k) == v
@pytest.mark.parametrize('fmt,entry,fields', [
(
'%{%d %b %Y %H:%M:%S %z}t',
'19 Mär 2019 01:39:12 +0000',
{
"request_time": datetime(2019, 3, 19, 1, 39, 12, tzinfo=timezone.utc),
"request_time_fields": {
"mday": 19,
"abbrev_mon": "Mär",
"year": 2019,
"hour": 1,
"min": 39,
"sec": 12,
"timezone": timezone.utc,
},
"directives": {
"%{%d}t": 19,
"%{%b}t": "Mär",
"%{%Y}t": 2019,
"%{%H}t": 1,
"%{%M}t": 39,
"%{%S}t": 12,
"%{%z}t": timezone.utc,
},
},
),
(
'%{%d %B %Y %H:%M:%S %z}t',
'19 März 2019 01:39:12 +0000',
{
"request_time": datetime(2019, 3, 19, 1, 39, 12, tzinfo=timezone.utc),
"request_time_fields": {
"mday": 19,
"full_mon": "März",
"year": 2019,
"hour": 1,
"min": 39,
"sec": 12,
"timezone": timezone.utc,
},
"directives": {
"%{%d}t": 19,
"%{%B}t": "März",
"%{%Y}t": 2019,
"%{%H}t": 1,
"%{%M}t": 39,
"%{%S}t": 12,
"%{%z}t": timezone.utc,
},
},
),
(
'%{%G--%V %a %H:%M:%S}t',
'2019--20 So 12:34:56',
{
"request_time": datetime(2019, 5, 19, 12, 34, 56),
"request_time_fields": {
"iso_year": 2019,
"iso_weeknum": 20,
"abbrev_wday": "So",
"hour": 12,
"min": 34,
"sec": 56,
},
"directives": {
"%{%G}t": 2019,
"%{%V}t": 20,
"%{%a}t": "So",
"%{%H}t": 12,
"%{%M}t": 34,
"%{%S}t": 56,
},
},
),
(
'%{%G--%V %A %H:%M:%S}t',
'2019--20 Sonntag 12:34:56',
{
"request_time": datetime(2019, 5, 19, 12, 34, 56),
"request_time_fields": {
"iso_year": 2019,
"iso_weeknum": 20,
"full_wday": "Sonntag",
"hour": 12,
"min": 34,
"sec": 56,
},
"directives": {
"%{%G}t": 2019,
"%{%V}t": 20,
"%{%A}t": "Sonntag",
"%{%H}t": 12,
"%{%M}t": 34,
"%{%S}t": 56,
},
},
),
])
def test_parse_custom_german_time(fmt, entry, fields):
oldlocale = locale.setlocale(locale.LC_ALL)
try:
locale.setlocale(locale.LC_ALL, 'de_DE.utf8')
except locale.Error:
pytest.skip('Locale not supported')
else:
entry = LogParser(fmt).parse(entry)
for k,v in fields.items():
assert getattr(entry, k) == v
finally:
locale.setlocale(locale.LC_ALL, oldlocale)
| 26.93593
| 83
| 0.324005
| 2,131
| 21,441
| 3.175035
| 0.068043
| 0.133314
| 0.103015
| 0.039905
| 0.85841
| 0.830772
| 0.784215
| 0.757464
| 0.743275
| 0.704109
| 0
| 0.166817
| 0.481647
| 21,441
| 795
| 84
| 26.969811
| 0.441965
| 0
| 0
| 0.556441
| 0
| 0.001328
| 0.253999
| 0.014272
| 0
| 0
| 0
| 0
| 0.002656
| 1
| 0.002656
| false
| 0
| 0.005312
| 0
| 0.007968
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be49ffe4e083418100aca43067bf9b214f603e97
| 164
|
py
|
Python
|
src/repro/optimizer/sgd.py
|
bouthilx/repro
|
611734e4eddd6a76dd4c1e7114a28a634a2a75c1
|
[
"BSD-3-Clause"
] | null | null | null |
src/repro/optimizer/sgd.py
|
bouthilx/repro
|
611734e4eddd6a76dd4c1e7114a28a634a2a75c1
|
[
"BSD-3-Clause"
] | null | null | null |
src/repro/optimizer/sgd.py
|
bouthilx/repro
|
611734e4eddd6a76dd4c1e7114a28a634a2a75c1
|
[
"BSD-3-Clause"
] | null | null | null |
from torch.optim import SGD
def build(model, lr, momentum, weight_decay):
return SGD(model.parameters(), lr=lr, momentum=momentum, weight_decay=weight_decay)
| 27.333333
| 87
| 0.768293
| 24
| 164
| 5.125
| 0.583333
| 0.268293
| 0.308943
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 164
| 5
| 88
| 32.8
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
be60457e2fba4d5f4068ff9d049d9bfa989b8ab4
| 5,824
|
py
|
Python
|
testproject/testapp/tests/test_user_delete.py
|
ulgens/djoser
|
c62371e3f9a8bbad2eaf55ffd0efad6eb6c02f26
|
[
"MIT"
] | null | null | null |
testproject/testapp/tests/test_user_delete.py
|
ulgens/djoser
|
c62371e3f9a8bbad2eaf55ffd0efad6eb6c02f26
|
[
"MIT"
] | null | null | null |
testproject/testapp/tests/test_user_delete.py
|
ulgens/djoser
|
c62371e3f9a8bbad2eaf55ffd0efad6eb6c02f26
|
[
"MIT"
] | null | null | null |
import pytest
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import override_settings
from djet import assertions
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
import djoser.views
from djoser.conf import settings as djoser_settings
from .common import PermCheckClass, RunCheck, SerializerCheckClass, create_user
User = get_user_model()
class UserMeDeleteViewTest(
APITestCase,
assertions.StatusCodeAssertionsMixin,
assertions.EmailAssertionsMixin,
assertions.InstanceAssertionsMixin,
):
viewset = djoser.views.UserViewSet
def test_delete_user_if_logged_in(self):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "secret"}
self.client.force_authenticate(user=user)
response = self.client.delete(reverse("user-me"), data=data)
self.assert_status_equal(response, status.HTTP_204_NO_CONTENT)
self.assert_instance_does_not_exist(User, username="john")
def test_not_delete_if_fails_password_validation(self):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "incorrect"}
self.client.force_authenticate(user=user)
response = self.client.delete(reverse("user-me"), data=data)
self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {"current_password": ["Invalid password."]})
def test_permission_class(self):
old_value = djoser_settings.PERMISSIONS["user_delete"]
with override_settings(
DJOSER=dict(
settings.DJOSER, **{"PERMISSIONS": {"user_delete": [PermCheckClass]}}
)
), pytest.raises(RunCheck):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "incorrect"}
self.client.force_authenticate(user=user)
self.client.delete(reverse("user-me"), data=data)
override_settings(
DJOSER=dict(settings.DJOSER, **{"PERMISSIONS": {"user_delete": old_value}})
).enable()
def test_serializer_class(self):
old_value = djoser_settings.SERIALIZERS["user_delete"]
with override_settings(
DJOSER=dict(
settings.DJOSER,
**{"SERIALIZERS": {"user_delete": SerializerCheckClass}},
)
), pytest.raises(RunCheck):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "incorrect"}
self.client.force_authenticate(user=user)
self.client.delete(reverse("user-me"), data=data)
override_settings(
DJOSER=dict(settings.DJOSER, **{"SERIALIZERS": {"user_delete": old_value}})
).enable()
class UserViewSetDeletionTest(
APITestCase,
assertions.StatusCodeAssertionsMixin,
assertions.EmailAssertionsMixin,
assertions.InstanceAssertionsMixin,
):
def test_delete_user_if_logged_in(self):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "secret"}
self.client.force_authenticate(user=user)
response = self.client.delete(
reverse("user-detail", kwargs={User._meta.pk.name: user.pk}),
data=data,
)
self.assert_status_equal(response, status.HTTP_204_NO_CONTENT)
self.assert_instance_does_not_exist(User, username="john")
def test_not_delete_if_fails_password_validation(self):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "incorrect"}
self.client.force_authenticate(user=user)
response = self.client.delete(
reverse("user-detail", kwargs={User._meta.pk.name: user.pk}),
data=data,
)
self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.data, {"current_password": ["Invalid password."]})
def test_permission_class(self):
old_value = djoser_settings.PERMISSIONS["user_delete"]
with override_settings(
DJOSER=dict(
settings.DJOSER, **{"PERMISSIONS": {"user_delete": [PermCheckClass]}}
)
), pytest.raises(RunCheck):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "incorrect"}
self.client.force_authenticate(user=user)
self.client.delete(
reverse("user-detail", kwargs={User._meta.pk.name: user.pk}),
data=data,
)
override_settings(
DJOSER=dict(settings.DJOSER, **{"PERMISSIONS": {"user_delete": old_value}})
).enable()
def test_serializer_class(self):
old_value = djoser_settings.SERIALIZERS["user_delete"]
with override_settings(
DJOSER=dict(
settings.DJOSER,
**{"SERIALIZERS": {"user_delete": SerializerCheckClass}},
)
), pytest.raises(RunCheck):
user = create_user()
self.assert_instance_exists(User, username="john")
data = {"current_password": "incorrect"}
self.client.force_authenticate(user=user)
self.client.delete(
reverse("user-detail", kwargs={User._meta.pk.name: user.pk}),
data=data,
)
override_settings(
DJOSER=dict(settings.DJOSER, **{"SERIALIZERS": {"user_delete": old_value}})
).enable()
| 36.628931
| 87
| 0.645604
| 603
| 5,824
| 5.993367
| 0.144279
| 0.044272
| 0.049806
| 0.039845
| 0.86995
| 0.86995
| 0.86995
| 0.809629
| 0.809629
| 0.809629
| 0
| 0.002717
| 0.241758
| 5,824
| 158
| 88
| 36.860759
| 0.81567
| 0
| 0
| 0.824427
| 0
| 0
| 0.101648
| 0
| 0
| 0
| 0
| 0
| 0.175573
| 1
| 0.061069
| false
| 0.091603
| 0.083969
| 0
| 0.167939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
fe24d801edfbf17e5cf00eb51347ae775e3c44bd
| 68,387
|
py
|
Python
|
dev/services/wms/thredds/wms_cfg.py
|
cbur24/dea-config
|
735510227e7451366d89c0dc90dc834eb3e65b95
|
[
"Apache-2.0"
] | null | null | null |
dev/services/wms/thredds/wms_cfg.py
|
cbur24/dea-config
|
735510227e7451366d89c0dc90dc834eb3e65b95
|
[
"Apache-2.0"
] | null | null | null |
dev/services/wms/thredds/wms_cfg.py
|
cbur24/dea-config
|
735510227e7451366d89c0dc90dc834eb3e65b95
|
[
"Apache-2.0"
] | null | null | null |
# Static config for the wms metadata.
response_cfg = {
"Access-Control-Allow-Origin": "*", # CORS header
}
service_cfg = {
# Which web service(s) should be supported by this instance
"wcs": True,
"wms": True,
"wmts": True,
# Required config for WMS and/or WCS
# Service title - appears e.g. in Terria catalog
"title": "Digital Earth Australia - OGC Web Services",
# Service URL. Should a fully qualified URL
"url": ["https://dave.services.devkube.dea.ga.gov.au"],
"human_url": "dea.ga.gov.au/",
# Supported co-ordinate reference systems
"published_CRSs": {
"EPSG:3857": { # Web Mercator
"geographic": False,
"horizontal_coord": "x",
"vertical_coord": "y",
},
"EPSG:4326": { # WGS-84
"geographic": True,
"vertical_coord_first": True
},
"EPSG:3577": { # GDA-94, internal representation
"geographic": False,
"horizontal_coord": "x",
"vertical_coord": "y",
},
},
# Required config for WCS
# Must be a geographic CRS in the published_CRSs list. EPSG:4326 is recommended, but any geographic CRS should work.
"default_geographic_CRS": "EPSG:4326",
# Supported WCS formats
"wcs_formats": {
# Key is the format name, as used in DescribeCoverage XML
"GeoTIFF": {
# Renderer is the FQN of a Python function that takes:
# * A WCS Request object
# * Some ODC data to be rendered.
"renderer": "datacube_wms.wcs_utils.get_tiff",
# The MIME type of the image, as used in the Http Response.
"mime": "image/geotiff",
# The file extension to add to the filename.
"extension": "tif",
# Whether or not the file format supports multiple time slices.
"multi-time": False
},
"netCDF": {
"renderer": "datacube_wms.wcs_utils.get_netcdf",
"mime": "application/x-netcdf",
"extension": "nc",
"multi-time": True,
}
},
# The native wcs format must be declared in wcs_formats above.
"native_wcs_format": "GeoTIFF",
# Optional config for instances supporting WMS
"max_width": 512,
"max_height": 512,
# Optional config for all services (WMS and/or WCS) - may be set to blank/empty, no defaults
"abstract": """Digital Earth Australia OGC Web Services""",
"keywords": [
"geomedian",
"WOfS",
"mangrove",
"bare-earth",
"NIDEM",
"HLTC",
"landsat",
"australia",
"time-series",
"fractional-cover"
],
"contact_info": {
"person": "Digital Earth Australia",
"organisation": "Geoscience Australia",
"position": "",
"address": {
"type": "postal",
"address": "GPO Box 378",
"city": "Canberra",
"state": "ACT",
"postcode": "2609",
"country": "Australia",
},
"telephone": "+61 2 6249 9111",
"fax": "",
"email": "earth.observation@ga.gov.au",
},
"fees": "",
"access_constraints": "© Commonwealth of Australia (Geoscience Australia) 2018. "
"This product is released under the Creative Commons Attribution 4.0 International Licence. "
"http://creativecommons.org/licenses/by/4.0/legalcode",
"preauthenticate_s3": True,
"geotiff_georeference_source": "INTERNAL"
}
layer_cfg = [
{
# Name and title of the platform layer.
# Platform layers are not mappable. The name is for internal server use only.
"name": "Sentinel-2 Definitive",
"title": "Sentinel Definitive",
"abstract": "This is a definitive archive of daily Sentinel-2 Near Real Time data. "
"that is processed on receipt using the best-available ancillary information at the time to "
"provide atmospheric corrections. For more information see "
"http://pid.geoscience.gov.au/dataset/ga/122229",
# Products available for this platform.
# For each product, the "name" is the Datacube name, and the label is used
# to describe the label to end-users.
"products": [
{
# Included as a keyword for the layer
"label": "Sentinel 2 (A and B combined)",
# Included as a keyword for the layer
"type": "",
# Included as a keyword for the layer
"variant": "Surface Reflectance",
"abstract": """
This is a 90-day rolling archive of daily Sentinel-2 Near Real Time data. The Near Real-Time capability provides analysis-ready data that is processed on receipt using the best-available ancillary information at the time to provide atmospheric corrections.
For more information see http://pid.geoscience.gov.au/dataset/ga/122229
The Normalised Difference Chlorophyll Index (NDCI) is based on the method of Mishra & Mishra 2012, and adapted to bands on the Sentinel-2A & B sensors.
The index indicates levels of chlorophyll-a (chl-a) concentrations in complex turbid productive waters such as those encountered in many inland water bodies. The index has not been validated in Australian waters, and there are a range of environmental conditions that may have an effect on the accuracy of the derived index values in this test implementation, including:
- Influence on the remote sensing signal from nearby land and/or atmospheric effects
- Optically shallow water
- Cloud cover
Mishra, S., Mishra, D.R., 2012. Normalized difference chlorophyll index: A novel model for remote estimation of chlorophyll-a concentration in turbid productive waters. Remote Sensing of Environment, Remote Sensing of Urban Environments 117, 394–406. https://doi.org/10.1016/j.rse.2011.10.016
For service status information, see https://status.dea.ga.gov.au""",
# The WMS name for the layer
"name": "s2_ard_granule_nbar_t",
# The Datacube name for the associated data product
"multi_product": True,
"product_name": ["s2a_ard_granule", "s2b_ard_granule"],
# The Datacube name for the associated pixel-quality product (optional)
# The name of the associated Datacube pixel-quality product
# "pq_dataset": "s2b_nrt_granule",
# The name of the measurement band for the pixel-quality product
# (Only required if pq_dataset is set)
# "pq_band": "pixel_quality",
# Min zoom factor - sets the zoom level where the cutover from indicative polygons
# to actual imagery occurs.
"min_zoom_factor": 15.0,
# The fill-colour of the indicative polygons when zoomed out.
# Triplets (rgb) or quadruplets (rgba) of integers 0-255.
"zoomed_out_fill_colour": [150, 180, 200, 160],
# Time Zone. In hours added to UTC (maybe negative)
# Used for rounding off scene times to a date.
# 9 is good value for imagery of Australia.
"time_zone": 9,
# Extent mask function
# Determines what portions of dataset is potentially meaningful data.
"extent_mask_func": lambda data, band: (data[band] != data[band].attrs['nodata']),
# Flags listed here are ignored in GetFeatureInfo requests.
# (defaults to empty list)
"ignore_info_flags": [],
# Define layer wide legend graphic if no style is passed
# to GetLegendGraphic
"legend": {
# "url": ""
"styles": ["ndvi", "ndwi", "ndci"]
},
"wcs_default_bands": ["nbart_red", "nbart_green", "nbart_blue"],
# Styles.
#
# See band_mapper.py
#
# The various available spectral bands, and ways to combine them
# into a single rgb image.
# The examples here are ad hoc
#
"styles": [
# Examples of styles which are linear combinations of the available spectral bands.
#
{
"name": "simple_rgb",
"title": "Simple RGB",
"abstract": "Simple true-colour image, using the red, green and blue bands",
"components": {
"red": {
"nbart_red": 1.0
},
"green": {
"nbart_green": 1.0
},
"blue": {
"nbart_blue": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "infrared_green",
"title": "False colour - Green, SWIR, NIR",
"abstract": "False Colour image with SWIR1->Red, NIR->Green, and Green->Blue",
"components": {
"red": {
"nbart_swir_2": 1.0
},
"green": {
"nbart_nir_1": 1.0
},
"blue": {
"nbart_green": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "ndvi",
"title": "NDVI - Red, NIR",
"abstract": "Normalised Difference Vegetation Index - a derived index that correlates well with the existence of vegetation",
"index_function": lambda data: (data["nbart_nir_1"] - data["nbart_red"]) / (data["nbart_nir_1"] + data["nbart_red"]),
"needed_bands": ["nbart_red", "nbart_nir_1"],
"color_ramp": [
{
"value": -0.0,
"color": "#8F3F20",
"alpha": 0.0
},
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 1.0
},
{
"value": 0.1,
"color": "#A35F18"
},
{
"value": 0.2,
"color": "#B88512"
},
{
"value": 0.3,
"color": "#CEAC0E"
},
{
"value": 0.4,
"color": "#E5D609"
},
{
"value": 0.5,
"color": "#FFFF0C"
},
{
"value": 0.6,
"color": "#C3DE09"
},
{
"value": 0.7,
"color": "#88B808"
},
{
"value": 0.8,
"color": "#529400"
},
{
"value": 0.9,
"color": "#237100"
},
{
"value": 1.0,
"color": "#114D04"
}
]
},
{
"name": "nbr",
"title": "NBR",
"abstract": "The Normalized burn ratio (NBR) is used to identify burned areas. The formula is similar to a normalized difference vegetation index (NDVI), except that it uses near-infrared (NIR) and shortwave-infrared (SWIR) portions of the electromagnetic spectrum (Lopez, 1991; Key and Benson, 1995)",
"index_function": lambda data: (data["nbart_nir_1"] - data["nbart_swir_3"]) / (data["nbart_nir_1"] + data["nbart_swir_3"]),
"needed_bands": ["nbart_swir_3", "nbart_nir_1"],
"color_ramp": [
{
"value": -1.0,
"color": "#d81e11",
"legend": {}
},
{
"value": -0.2,
"color": "#d81e11",
},
{
"value": -0.19999999,
"color": "#d81e11",
"alpha": 0.0,
"legend": {
"label": ">-0.2"
}
},
{
"value": 1.0,
"color": "#d81e11",
"alpha": 0.0,
},
]
},
{
"name": "ndwi",
"title": "NDWI - Green, NIR",
"abstract": "Normalised Difference Water Index - a derived index that correlates well with the existence of water",
"index_function": lambda data: (data["nbart_green"] - data["nbart_nir_1"]) / (
data["nbart_nir_1"] + data["nbart_green"]),
"needed_bands": ["nbart_green", "nbart_nir_1"],
"color_ramp": [
{
"value": -0.0,
"color": "#8F3F20",
"alpha": 0.0
},
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 1.0
},
{
"value": 1.0,
"color": "#0303FF",
},
]
},
{
"name": "ndci",
"title": "NDCI - Red Edge, Red",
"abstract": "Normalised Difference Chlorophyll Index - a derived index that correlates well with the existence of chlorophyll",
"index_function": lambda data: (data["nbart_red_edge_1"] - data["nbart_red"]) / (data["nbart_red_edge_1"] + data["nbart_red"]).where(((data["nbart_green"] - data["nbart_swir_3"]) / (data["nbart_green"] + data["nbart_swir_3"])) > 0.1),
"needed_bands": ["nbart_red_edge_1", "nbart_red", "nbart_green", "nbart_swir_3"],
"color_ramp": [
{
"value": -0.1,
"color": "#1696FF",
"legend": {
"prefix": "<"
}
},
{
"value": -0.1,
"color": "#1696FF"
},
{
"value": 0.0,
"color": "#00FFDF",
"legend": {}
},
{
"value": 0.1,
"color": "#FFF50E",
},
{
"value": 0.2,
"color": "#FFB50A",
"legend": {}
},
{
"value": 0.4,
"color": "#FF530D",
},
{
"value": 0.5,
"color": "#FF0000",
"legend": {
"prefix": ">"
}
}
]
},
{
"name": "aerosol",
"title": "Narrow Blue - 440",
"abstract": "Coastal Aerosol or Narrow Blue band, approximately 435nm to 450nm",
"components": {
"red": {
"nbart_coastal_aerosol": 1.0
},
"green": {
"nbart_coastal_aerosol": 1.0
},
"blue": {
"nbart_coastal_aerosol": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "blue",
"title": "Blue - 490",
"abstract": "Blue band, approximately 453nm to 511nm",
"components": {
"red": {
"nbart_blue": 1.0
},
"green": {
"nbart_blue": 1.0
},
"blue": {
"nbart_blue": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "green",
"title": "Green - 560",
"abstract": "Green band, approximately 534nm to 588nm",
"components": {
"red": {
"nbart_green": 1.0
},
"green": {
"nbart_green": 1.0
},
"blue": {
"nbart_green": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red",
"title": "Red - 670",
"abstract": "Red band, roughly 637nm to 672nm",
"components": {
"red": {
"nbart_red": 1.0
},
"green": {
"nbart_red": 1.0
},
"blue": {
"nbart_red": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_1",
"title": "Vegetation Red Edge - 710",
"abstract": "Near infra-red band, centred on 710nm",
"components": {
"red": {
"nbart_red_edge_1": 1.0
},
"green": {
"nbart_red_edge_1": 1.0
},
"blue": {
"nbart_red_edge_1": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_2",
"title": "Vegetation Red Edge - 740",
"abstract": "Near infra-red band, centred on 740nm",
"components": {
"red": {
"nbart_red_edge_2": 1.0
},
"green": {
"nbart_red_edge_2": 1.0
},
"blue": {
"nbart_red_edge_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_3",
"title": "Vegetation Red Edge - 780",
"abstract": "Near infra-red band, centred on 780nm",
"components": {
"red": {
"nbart_red_edge_3": 1.0
},
"green": {
"nbart_red_edge_3": 1.0
},
"blue": {
"nbart_red_edge_3": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "nir",
"title": "Near Infrared (NIR) - 840",
"abstract": "Near infra-red band, roughly 853nm to 876nm",
"components": {
"red": {
"nbart_nir_1": 1.0
},
"green": {
"nbart_nir_1": 1.0
},
"blue": {
"nbart_nir_1": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "narrow_nir",
"title": "Narrow Near Infrared - 870",
"abstract": "Near infra-red band, centred on 865nm",
"components": {
"red": {
"nbart_nir_2": 1.0
},
"green": {
"nbart_nir_2": 1.0
},
"blue": {
"nbart_nir_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "swir1",
"title": "Shortwave Infrared (SWIR) - 1610",
"abstract": "Short wave infra-red band 1, roughly 1575nm to 1647nm",
"components": {
"red": {
"nbart_swir_2": 1.0
},
"green": {
"nbart_swir_2": 1.0
},
"blue": {
"nbart_swir_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "swir2",
"title": "Shortwave Infrared (SWIR) - 2190",
"abstract": "Short wave infra-red band 2, roughly 2117nm to 2285nm",
"components": {
"red": {
"nbart_swir_3": 1.0
},
"green": {
"nbart_swir_3": 1.0
},
"blue": {
"nbart_swir_3": 1.0
}
},
"scale_range": [0.0, 3000.0]
}
],
# Default style (if request does not specify style)
# MUST be defined in the styles list above.
# (Looks like Terria assumes this is the first style in the list, but this is
# not required by the standard.)
"default_style": "simple_rgb",
},
{
# Included as a keyword for the layer
"label": "Sentinel 2B",
# Included as a keyword for the layer
"type": "",
# Included as a keyword for the layer
"variant": "Surface Reflectance",
"abstract": """
This is a 90-day rolling archive of daily Sentinel-2 Near Real Time data. The Near Real-Time capability provides analysis-ready data that is processed on receipt using the best-available ancillary information at the time to provide atmospheric corrections.
For more information see http://pid.geoscience.gov.au/dataset/ga/122229
The Normalised Difference Chlorophyll Index (NDCI) is based on the method of Mishra & Mishra 2012, and adapted to bands on the Sentinel-2A & B sensors.
The index indicates levels of chlorophyll-a (chl-a) concentrations in complex turbid productive waters such as those encountered in many inland water bodies. The index has not been validated in Australian waters, and there are a range of environmental conditions that may have an effect on the accuracy of the derived index values in this test implementation, including:
- Influence on the remote sensing signal from nearby land and/or atmospheric effects
- Optically shallow water
- Cloud cover
Mishra, S., Mishra, D.R., 2012. Normalized difference chlorophyll index: A novel model for remote estimation of chlorophyll-a concentration in turbid productive waters. Remote Sensing of Environment, Remote Sensing of Urban Environments 117, 394–406. https://doi.org/10.1016/j.rse.2011.10.016
For service status information, see https://status.dea.ga.gov.au""",
# The WMS name for the layer
"name": "s2b_ard_granule_nbar_t",
# The Datacube name for the associated data product
"product_name": "s2b_ard_granule",
# The Datacube name for the associated pixel-quality product (optional)
# The name of the associated Datacube pixel-quality product
# "pq_dataset": "s2b_nrt_granule",
# The name of the measurement band for the pixel-quality product
# (Only required if pq_dataset is set)
# "pq_band": "pixel_quality",
# Min zoom factor - sets the zoom level where the cutover from indicative polygons
# to actual imagery occurs.
"min_zoom_factor": 15.0,
# The fill-colour of the indicative polygons when zoomed out.
# Triplets (rgb) or quadruplets (rgba) of integers 0-255.
"zoomed_out_fill_colour": [150, 180, 200, 160],
# Time Zone. In hours added to UTC (maybe negative)
# Used for rounding off scene times to a date.
# 9 is good value for imagery of Australia.
"time_zone": 9,
# Extent mask function
# Determines what portions of dataset is potentially meaningful data.
"extent_mask_func": lambda data, band: (data[band] != data[band].attrs['nodata']),
# Flags listed here are ignored in GetFeatureInfo requests.
# (defaults to empty list)
"ignore_info_flags": [],
# Define layer wide legend graphic if no style is passed
# to GetLegendGraphic
"legend": {
# "url": ""
"styles": ["ndvi", "ndwi", "ndci"]
},
"wcs_default_bands": ["nbart_red", "nbart_green", "nbart_blue"],
# Styles.
#
# See band_mapper.py
#
# The various available spectral bands, and ways to combine them
# into a single rgb image.
# The examples here are ad hoc
#
"styles": [
# Examples of styles which are linear combinations of the available spectral bands.
#
{
"name": "simple_rgb",
"title": "Simple RGB",
"abstract": "Simple true-colour image, using the red, green and blue bands",
"components": {
"red": {
"nbart_red": 1.0
},
"green": {
"nbart_green": 1.0
},
"blue": {
"nbart_blue": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "infrared_green",
"title": "False colour - Green, SWIR, NIR",
"abstract": "False Colour image with SWIR1->Red, NIR->Green, and Green->Blue",
"components": {
"red": {
"nbart_swir_2": 1.0
},
"green": {
"nbart_nir_1": 1.0
},
"blue": {
"nbart_green": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "ndvi",
"title": "NDVI - Red, NIR",
"abstract": "Normalised Difference Vegetation Index - a derived index that correlates well with the existence of vegetation",
"index_function": lambda data: (data["nbart_nir_1"] - data["nbart_red"]) / (data["nbart_nir_1"] + data["nbart_red"]),
"needed_bands": ["nbart_red", "nbart_nir_1"],
"color_ramp": [
{
"value": -0.0,
"color": "#8F3F20",
"alpha": 0.0
},
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 1.0
},
{
"value": 0.1,
"color": "#A35F18"
},
{
"value": 0.2,
"color": "#B88512"
},
{
"value": 0.3,
"color": "#CEAC0E"
},
{
"value": 0.4,
"color": "#E5D609"
},
{
"value": 0.5,
"color": "#FFFF0C"
},
{
"value": 0.6,
"color": "#C3DE09"
},
{
"value": 0.7,
"color": "#88B808"
},
{
"value": 0.8,
"color": "#529400"
},
{
"value": 0.9,
"color": "#237100"
},
{
"value": 1.0,
"color": "#114D04"
}
]
},
{
"name": "nbr",
"title": "NBR",
"abstract": "The Normalized burn ratio (NBR) is used to identify burned areas. The formula is similar to a normalized difference vegetation index (NDVI), except that it uses near-infrared (NIR) and shortwave-infrared (SWIR) portions of the electromagnetic spectrum (Lopez, 1991; Key and Benson, 1995)",
"index_function": lambda data: (data["nbart_nir_1"] - data["nbart_swir_3"]) / (data["nbart_nir_1"] + data["nbart_swir_3"]),
"needed_bands": ["nbart_swir_3", "nbart_nir_1"],
"color_ramp": [
{
"value": -1.0,
"color": "#d81e11",
"legend": {}
},
{
"value": -0.2,
"color": "#d81e11",
},
{
"value": -0.19999999,
"color": "#d81e11",
"alpha": 0.0,
"legend": {
"label": ">-0.2"
}
},
{
"value": 1.0,
"color": "#d81e11",
"alpha": 0.0,
},
]
},
{
"name": "ndwi",
"title": "NDWI - Green, NIR",
"abstract": "Normalised Difference Water Index - a derived index that correlates well with the existence of water",
"index_function": lambda data: (data["nbart_green"] - data["nbart_nir_1"]) / (
data["nbart_nir_1"] + data["nbart_green"]),
"needed_bands": ["nbart_green", "nbart_nir_1"],
"color_ramp": [
{
"value": -0.0,
"color": "#8F3F20",
"alpha": 0.0
},
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 1.0
},
{
"value": 1.0,
"color": "#0303FF",
},
]
},
{
"name": "ndci",
"title": "NDCI - Red Edge, Red",
"abstract": "Normalised Difference Chlorophyll Index - a derived index that correlates well with the existence of chlorophyll",
"index_function": lambda data: (data["nbart_red_edge_1"] - data["nbart_red"]) / (data["nbart_red_edge_1"] + data["nbart_red"]).where(((data["nbart_green"] - data["nbart_swir_3"]) / (data["nbart_green"] + data["nbart_swir_3"])) > 0.1),
"needed_bands": ["nbart_red_edge_1", "nbart_red", "nbart_green", "nbart_swir_3"],
"color_ramp": [
{
"value": -0.1,
"color": "#1696FF",
"legend": {
"prefix": "<"
}
},
{
"value": -0.1,
"color": "#1696FF"
},
{
"value": 0.0,
"color": "#00FFDF",
"legend": {}
},
{
"value": 0.1,
"color": "#FFF50E",
},
{
"value": 0.2,
"color": "#FFB50A",
"legend": {}
},
{
"value": 0.4,
"color": "#FF530D",
},
{
"value": 0.5,
"color": "#FF0000",
"legend": {
"prefix": ">"
}
}
]
},
{
"name": "aerosol",
"title": "Narrow Blue - 440",
"abstract": "Coastal Aerosol or Narrow Blue band, approximately 435nm to 450nm",
"components": {
"red": {
"nbart_coastal_aerosol": 1.0
},
"green": {
"nbart_coastal_aerosol": 1.0
},
"blue": {
"nbart_coastal_aerosol": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "blue",
"title": "Blue - 490",
"abstract": "Blue band, approximately 453nm to 511nm",
"components": {
"red": {
"nbart_blue": 1.0
},
"green": {
"nbart_blue": 1.0
},
"blue": {
"nbart_blue": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "green",
"title": "Green - 560",
"abstract": "Green band, approximately 534nm to 588nm",
"components": {
"red": {
"nbart_green": 1.0
},
"green": {
"nbart_green": 1.0
},
"blue": {
"nbart_green": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red",
"title": "Red - 670",
"abstract": "Red band, roughly 637nm to 672nm",
"components": {
"red": {
"nbart_red": 1.0
},
"green": {
"nbart_red": 1.0
},
"blue": {
"nbart_red": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_1",
"title": "Vegetation Red Edge - 710",
"abstract": "Near infra-red band, centred on 710nm",
"components": {
"red": {
"nbart_red_edge_1": 1.0
},
"green": {
"nbart_red_edge_1": 1.0
},
"blue": {
"nbart_red_edge_1": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_2",
"title": "Vegetation Red Edge - 740",
"abstract": "Near infra-red band, centred on 740nm",
"components": {
"red": {
"nbart_red_edge_2": 1.0
},
"green": {
"nbart_red_edge_2": 1.0
},
"blue": {
"nbart_red_edge_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_3",
"title": "Vegetation Red Edge - 780",
"abstract": "Near infra-red band, centred on 780nm",
"components": {
"red": {
"nbart_red_edge_3": 1.0
},
"green": {
"nbart_red_edge_3": 1.0
},
"blue": {
"nbart_red_edge_3": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "nir",
"title": "Near Infrared (NIR) - 840",
"abstract": "Near infra-red band, roughly 853nm to 876nm",
"components": {
"red": {
"nbart_nir_1": 1.0
},
"green": {
"nbart_nir_1": 1.0
},
"blue": {
"nbart_nir_1": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "narrow_nir",
"title": "Narrow Near Infrared - 870",
"abstract": "Near infra-red band, centred on 865nm",
"components": {
"red": {
"nbart_nir_2": 1.0
},
"green": {
"nbart_nir_2": 1.0
},
"blue": {
"nbart_nir_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "swir1",
"title": "Shortwave Infrared (SWIR) - 1610",
"abstract": "Short wave infra-red band 1, roughly 1575nm to 1647nm",
"components": {
"red": {
"nbart_swir_2": 1.0
},
"green": {
"nbart_swir_2": 1.0
},
"blue": {
"nbart_swir_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "swir2",
"title": "Shortwave Infrared (SWIR) - 2190",
"abstract": "Short wave infra-red band 2, roughly 2117nm to 2285nm",
"components": {
"red": {
"nbart_swir_3": 1.0
},
"green": {
"nbart_swir_3": 1.0
},
"blue": {
"nbart_swir_3": 1.0
}
},
"scale_range": [0.0, 3000.0]
}
],
# Default style (if request does not specify style)
# MUST be defined in the styles list above.
# (Looks like Terria assumes this is the first style in the list, but this is
# not required by the standard.)
"default_style": "simple_rgb",
},
{
# Included as a keyword for the layer
"label": "Sentinel 2A",
# Included as a keyword for the layer
"type": "",
# Included as a keyword for the layer
"variant": "Surface Reflectance",
"abstract": """
This is a 90-day rolling archive of daily Sentinel-2 Near Real Time data. The Near Real-Time capability provides analysis-ready data that is processed on receipt using the best-available ancillary information at the time to provide atmospheric corrections.
For more information see http://pid.geoscience.gov.au/dataset/ga/122229
The Normalised Difference Chlorophyll Index (NDCI) is based on the method of Mishra & Mishra 2012, and adapted to bands on the Sentinel-2A & B sensors.
The index indicates levels of chlorophyll-a (chl-a) concentrations in complex turbid productive waters such as those encountered in many inland water bodies. The index has not been validated in Australian waters, and there are a range of environmental conditions that may have an effect on the accuracy of the derived index values in this test implementation, including:
- Influence on the remote sensing signal from nearby land and/or atmospheric effects
- Optically shallow water
- Cloud cover
Mishra, S., Mishra, D.R., 2012. Normalized difference chlorophyll index: A novel model for remote estimation of chlorophyll-a concentration in turbid productive waters. Remote Sensing of Environment, Remote Sensing of Urban Environments 117, 394–406. https://doi.org/10.1016/j.rse.2011.10.016
For service status information, see https://status.dea.ga.gov.au""",
# The WMS name for the layer
"name": "s2a_ard_granule_nbar_t",
# The Datacube name for the associated data product
"product_name": "s2a_ard_granule",
# The Datacube name for the associated pixel-quality product (optional)
# The name of the associated Datacube pixel-quality product
# "pq_dataset": "s2b_nrt_granule",
# The name of the measurement band for the pixel-quality product
# (Only required if pq_dataset is set)
# "pq_band": "pixel_quality",
# Min zoom factor - sets the zoom level where the cutover from indicative polygons
# to actual imagery occurs.
"min_zoom_factor": 15.0,
# The fill-colour of the indicative polygons when zoomed out.
# Triplets (rgb) or quadruplets (rgba) of integers 0-255.
"zoomed_out_fill_colour": [150, 180, 200, 160],
# Time Zone. In hours added to UTC (maybe negative)
# Used for rounding off scene times to a date.
# 9 is good value for imagery of Australia.
"time_zone": 9,
# Extent mask function
# Determines what portions of dataset is potentially meaningful data.
"extent_mask_func": lambda data, band: (data[band] != data[band].attrs['nodata']),
# Flags listed here are ignored in GetFeatureInfo requests.
# (defaults to empty list)
"ignore_info_flags": [],
# Define layer wide legend graphic if no style is passed
# to GetLegendGraphic
"legend": {
# "url": ""
"styles": ["ndvi", "ndwi", "ndci"]
},
"wcs_default_bands": ["nbart_red", "nbart_green", "nbart_blue"],
# Styles.
#
# See band_mapper.py
#
# The various available spectral bands, and ways to combine them
# into a single rgb image.
# The examples here are ad hoc
#
"styles": [
# Examples of styles which are linear combinations of the available spectral bands.
#
{
"name": "simple_rgb",
"title": "Simple RGB",
"abstract": "Simple true-colour image, using the red, green and blue bands",
"components": {
"red": {
"nbart_red": 1.0
},
"green": {
"nbart_green": 1.0
},
"blue": {
"nbart_blue": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "infrared_green",
"title": "False colour - Green, SWIR, NIR",
"abstract": "False Colour image with SWIR1->Red, NIR->Green, and Green->Blue",
"components": {
"red": {
"nbart_swir_2": 1.0
},
"green": {
"nbart_nir_1": 1.0
},
"blue": {
"nbart_green": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "ndvi",
"title": "NDVI - Red, NIR",
"abstract": "Normalised Difference Vegetation Index - a derived index that correlates well with the existence of vegetation",
"index_function": lambda data: (data["nbart_nir_1"] - data["nbart_red"]) / (
data["nbart_nir_1"] + data["nbart_red"]),
"needed_bands": ["nbart_red", "nbart_nir_1"],
"color_ramp": [
{
"value": -0.0,
"color": "#8F3F20",
"alpha": 0.0
},
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 1.0
},
{
"value": 0.1,
"color": "#A35F18"
},
{
"value": 0.2,
"color": "#B88512"
},
{
"value": 0.3,
"color": "#CEAC0E"
},
{
"value": 0.4,
"color": "#E5D609"
},
{
"value": 0.5,
"color": "#FFFF0C"
},
{
"value": 0.6,
"color": "#C3DE09"
},
{
"value": 0.7,
"color": "#88B808"
},
{
"value": 0.8,
"color": "#529400"
},
{
"value": 0.9,
"color": "#237100"
},
{
"value": 1.0,
"color": "#114D04"
}
]
},
{
"name": "nbr",
"title": "NBR",
"abstract": "The Normalized burn ratio (NBR) is used to identify burned areas. The formula is similar to a normalized difference vegetation index (NDVI), except that it uses near-infrared (NIR) and shortwave-infrared (SWIR) portions of the electromagnetic spectrum (Lopez, 1991; Key and Benson, 1995)",
"index_function": lambda data: (data["nbart_nir_1"] - data["nbart_swir_3"]) / (data["nbart_nir_1"] + data["nbart_swir_3"]),
"needed_bands": ["nbart_swir_3", "nbart_nir_1"],
"color_ramp": [
{
"value": -1.0,
"color": "#d81e11",
"legend": {}
},
{
"value": -0.2,
"color": "#d81e11",
},
{
"value": -0.19999999,
"color": "#d81e11",
"alpha": 0.0,
"legend": {
"label": ">-0.2"
}
},
{
"value": 1.0,
"color": "#d81e11",
"alpha": 0.0,
},
]
},
{
"name": "ndwi",
"title": "NDWI - Green, NIR",
"abstract": "Normalised Difference Water Index - a derived index that correlates well with the existence of water",
"index_function": lambda data: (data["nbart_green"] - data["nbart_nir_1"]) / (
data["nbart_nir_1"] + data["nbart_green"]),
"needed_bands": ["nbart_green", "nbart_nir_1"],
"color_ramp": [
{
"value": -0.0,
"color": "#8F3F20",
"alpha": 0.0
},
{
"value": 0.0,
"color": "#8F3F20",
"alpha": 1.0
},
{
"value": 1.0,
"color": "#0303FF",
},
]
},
{
"name": "ndci",
"title": "NDCI - Red Edge, Red",
"abstract": "Normalised Difference Chlorophyll Index - a derived index that correlates well with the existence of chlorophyll",
"index_function": lambda data: (data["nbart_red_edge_1"] - data["nbart_red"]) / (data["nbart_red_edge_1"] + data["nbart_red"]).where(((data["nbart_green"] - data["nbart_swir_3"]) / (data["nbart_green"] + data["nbart_swir_3"])) > 0.1),
"needed_bands": ["nbart_red_edge_1", "nbart_red", "nbart_green", "nbart_swir_3"],
"color_ramp": [
{
"value": -0.1,
"color": "#1696FF",
"legend": {
"prefix": "<"
}
},
{
"value": -0.1,
"color": "#1696FF"
},
{
"value": 0.0,
"color": "#00FFDF",
"legend": {}
},
{
"value": 0.1,
"color": "#FFF50E",
},
{
"value": 0.2,
"color": "#FFB50A",
"legend": {}
},
{
"value": 0.4,
"color": "#FF530D",
},
{
"value": 0.5,
"color": "#FF0000",
"legend": {
"prefix": ">"
}
}
]
},
{
"name": "aerosol",
"title": "Narrow Blue - 440",
"abstract": "Coastal Aerosol or Narrow Blue band, approximately 435nm to 450nm",
"components": {
"red": {
"nbart_coastal_aerosol": 1.0
},
"green": {
"nbart_coastal_aerosol": 1.0
},
"blue": {
"nbart_coastal_aerosol": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "blue",
"title": "Blue - 490",
"abstract": "Blue band, approximately 453nm to 511nm",
"components": {
"red": {
"nbart_blue": 1.0
},
"green": {
"nbart_blue": 1.0
},
"blue": {
"nbart_blue": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "green",
"title": "Green - 560",
"abstract": "Green band, approximately 534nm to 588nm",
"components": {
"red": {
"nbart_green": 1.0
},
"green": {
"nbart_green": 1.0
},
"blue": {
"nbart_green": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red",
"title": "Red - 670",
"abstract": "Red band, roughly 637nm to 672nm",
"components": {
"red": {
"nbart_red": 1.0
},
"green": {
"nbart_red": 1.0
},
"blue": {
"nbart_red": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_1",
"title": "Vegetation Red Edge - 710",
"abstract": "Near infra-red band, centred on 710nm",
"components": {
"red": {
"nbart_red_edge_1": 1.0
},
"green": {
"nbart_red_edge_1": 1.0
},
"blue": {
"nbart_red_edge_1": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_2",
"title": "Vegetation Red Edge - 740",
"abstract": "Near infra-red band, centred on 740nm",
"components": {
"red": {
"nbart_red_edge_2": 1.0
},
"green": {
"nbart_red_edge_2": 1.0
},
"blue": {
"nbart_red_edge_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "red_edge_3",
"title": "Vegetation Red Edge - 780",
"abstract": "Near infra-red band, centred on 780nm",
"components": {
"red": {
"nbart_red_edge_3": 1.0
},
"green": {
"nbart_red_edge_3": 1.0
},
"blue": {
"nbart_red_edge_3": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "nir",
"title": "Near Infrared (NIR) - 840",
"abstract": "Near infra-red band, roughly 853nm to 876nm",
"components": {
"red": {
"nbart_nir_1": 1.0
},
"green": {
"nbart_nir_1": 1.0
},
"blue": {
"nbart_nir_1": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "narrow_nir",
"title": "Narrow Near Infrared - 870",
"abstract": "Near infra-red band, centred on 865nm",
"components": {
"red": {
"nbart_nir_2": 1.0
},
"green": {
"nbart_nir_2": 1.0
},
"blue": {
"nbart_nir_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "swir1",
"title": "Shortwave Infrared (SWIR) - 1610",
"abstract": "Short wave infra-red band 1, roughly 1575nm to 1647nm",
"components": {
"red": {
"nbart_swir_2": 1.0
},
"green": {
"nbart_swir_2": 1.0
},
"blue": {
"nbart_swir_2": 1.0
}
},
"scale_range": [0.0, 3000.0]
},
{
"name": "swir2",
"title": "Shortwave Infrared (SWIR) - 2190",
"abstract": "Short wave infra-red band 2, roughly 2117nm to 2285nm",
"components": {
"red": {
"nbart_swir_3": 1.0
},
"green": {
"nbart_swir_3": 1.0
},
"blue": {
"nbart_swir_3": 1.0
}
},
"scale_range": [0.0, 3000.0]
}
],
# Default style (if request does not specify style)
# MUST be defined in the styles list above.
# (Looks like Terria assumes this is the first style in the list, but this is
# not required by the standard.)
"default_style": "simple_rgb",
},
],
}
]
| 46.051852
| 370
| 0.32493
| 4,936
| 68,387
| 4.380267
| 0.112237
| 0.012488
| 0.032468
| 0.021646
| 0.903705
| 0.902456
| 0.896443
| 0.896443
| 0.892281
| 0.888719
| 0
| 0.060809
| 0.581587
| 68,387
| 1,484
| 371
| 46.082884
| 0.69466
| 0.085338
| 0
| 0.591219
| 0
| 0.011355
| 0.290345
| 0.007804
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe2604afe1aa0bce8f8c9912532c655319cdfeed
| 3,301
|
py
|
Python
|
17. deep_add/deep_add_solutions.py
|
jeury301/python-morsels
|
fdbe0b1c80120e2d1388808816538fea5dab8892
|
[
"MIT"
] | 2
|
2018-08-21T10:29:57.000Z
|
2019-04-17T07:05:17.000Z
|
17. deep_add/deep_add_solutions.py
|
jeury301/python-morsels
|
fdbe0b1c80120e2d1388808816538fea5dab8892
|
[
"MIT"
] | null | null | null |
17. deep_add/deep_add_solutions.py
|
jeury301/python-morsels
|
fdbe0b1c80120e2d1388808816538fea5dab8892
|
[
"MIT"
] | null | null | null |
def v0_deep_add(list_or_number):
"""Return sum of values in given list, iterating deeply."""
total = 0
if type(list_or_number) == list:
for x in list_or_number:
total += deep_add(x)
return total
else:
return list_or_number
def v1_deep_add(lists):
"""Return sum of values in given list, iterating deeply."""
total = 0
lists = list(lists)
while lists:
item = lists.pop()
if isinstance(item, list):
lists.extend(item)
else:
total += item
return total
def v2_deep_add(list_or_number):
"""Return sum of values in given list, iterating deeply."""
if type(list_or_number) == list:
return sum(deep_add(x) for x in list_or_number)
else:
return list_or_number
def v3_deep_add(list_or_number):
"""Return sum of values in given list, iterating deeply."""
return (
sum(deep_add(x) for x in list_or_number)
if type(list_or_number) == list
else list_or_number
)
def v4_deep_add(iterable_or_number):
"""Return sum of values in given iterable, iterating deeply."""
try:
return sum(deep_add(x) for x in iterable_or_number)
except TypeError:
return iterable_or_number
def v5_deep_add(iterable_or_number):
"""Return sum of values in given iterable, iterating deeply."""
if isinstance(iterable_or_number, (int, float, complex)):
return iterable_or_number
else:
return sum(deep_add(x) for x in iterable_or_number)
from numbers import Number
def v6_deep_add(iterable_or_number):
"""Return sum of values in given iterable, iterating deeply."""
if isinstance(iterable_or_number, Number):
return iterable_or_number
else:
return sum(deep_add(x) for x in iterable_or_number)
def v7_deep_add(iterable_or_number, start=0):
"""Return sum of values in given iterable, iterating deeply."""
if isinstance(iterable_or_number, Number):
return iterable_or_number
else:
total = start
for x in iterable_or_number:
total += deep_add(x)
return total
def v8_deep_add(iterable_or_number, start=0):
"""Return sum of values in given iterable, iterating deeply."""
if isinstance(iterable_or_number, Number):
return iterable_or_number
else:
return sum((deep_add(x) for x in iterable_or_number), start)
def v9_deep_add(iterable_or_number, start=0):
"""Return sum of values in given iterable, iterating deeply."""
try:
iter(iterable_or_number)
except TypeError:
return iterable_or_number
else:
return sum((deep_add(x) for x in iterable_or_number), start)
def v10_deep_add(iterable_or_number, start=0):
"""Return sum of values in given iterable, iterating deeply."""
if hasattr(iterable_or_number, '__iter__'):
return sum((deep_add(x) for x in iterable_or_number), start)
else:
return iterable_or_number
from collections.abc import Iterable
def v11_deep_add(iterable_or_number, start=0):
"""Return sum of values in given iterable, iterating deeply."""
if isinstance(iterable_or_number, Iterable):
return sum((deep_add(x) for x in iterable_or_number), start)
else:
return iterable_or_number
| 32.362745
| 68
| 0.677673
| 481
| 3,301
| 4.390852
| 0.112266
| 0.162879
| 0.234848
| 0.096591
| 0.861742
| 0.861742
| 0.795455
| 0.795455
| 0.765152
| 0.732955
| 0
| 0.008333
| 0.236595
| 3,301
| 101
| 69
| 32.683168
| 0.829762
| 0.205695
| 0
| 0.565789
| 0
| 0
| 0.003124
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.026316
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe4138a7b55ac09923e599ca658fe9660267463e
| 10,686
|
py
|
Python
|
tests/test_dual.py
|
The-Pyoneers/cs107-FinalProject
|
76e9d7a0fd5418ece35458df3b379602e699388a
|
[
"MIT"
] | 1
|
2020-11-16T01:48:46.000Z
|
2020-11-16T01:48:46.000Z
|
tests/test_dual.py
|
The-Pyoneers/cs107-FinalProject
|
76e9d7a0fd5418ece35458df3b379602e699388a
|
[
"MIT"
] | 7
|
2020-11-16T02:48:00.000Z
|
2020-12-12T03:25:41.000Z
|
tests/test_dual.py
|
The-Pyoneers/cs107-FinalProject
|
76e9d7a0fd5418ece35458df3b379602e699388a
|
[
"MIT"
] | 2
|
2021-02-25T00:09:27.000Z
|
2021-11-15T02:29:06.000Z
|
import pytest
from farad.dual import Dual
import numpy as np
def test_add():
"""Test of addition special method (__add__) of Dual class."""
# Test for addition with scalar Dual object and float value
x = Dual(2)
fx = x + 3.5
try:
assert fx.val == 5.5
assert fx.der == 1.0
except AssertionError as e:
print(e)
raise AssertionError
# Test for addition with two scalar Dual object
x = Dual(2.0)
y = Dual(1.0)
fx = x + y
try:
assert fx.val == 3.0
assert fx.der == 2.0
except AssertionError as e:
print(e)
raise AssertionError
def test_radd():
"""Test of reverse addition special method (__radd__) of Dual class."""
# Test for reverse addition with scalar Dual object and float value
x = Dual(1.5)
fx = 1.5 + x
try:
assert fx.val == 3.0
assert fx.der == 1.0
except AssertionError as e:
print(e)
raise AssertionError
def test_sub():
"""Test of subtraction special method (__sub__) of Dual class."""
# Test for subtraction with scalar Dual object and float value
x = Dual(5)
fx = x - 0.5
try:
assert fx.val == 4.5
assert fx.der == 0.5
except AssertionError as e:
print(e)
raise AssertionError
# Test for subtraction with two scalar Dual object
x = Dual(2.0)
y = Dual(1.0)
fx = x - y
try:
assert fx.val == 1.0
assert fx.der == 0
except AssertionError as e:
print(e)
raise AssertionError
def test_rsub():
"""Test of reverse subtraction special method (__rsub__) of Dual class."""
# Test for reverse subtraction with scalar Dual object and float value
x = Dual(5)
fx = 5.5 - x
try:
assert fx.val == 0.5
assert fx.der == 4.5
except AssertionError as e:
print(e)
raise AssertionError
def test_mul():
"""Test of multiplication special method (__mul__) of Dual class."""
# Test for multiplication with scalar Dual object and float value
x = Dual(5)
fx = x * 0.5
try:
assert fx.val == 2.5
assert fx.der == 0.5
except AssertionError as e:
print(e)
raise AssertionError
# Test for multiplication with two scalar Dual object
x = Dual(2.0)
y = Dual(1.0)
fx = x * y
try:
assert fx.val == 2.0
assert fx.der == 3.0
except AssertionError as e:
print(e)
raise AssertionError
def test_rmul():
"""Test of reverse multiplication special method (__rmul__) of Dual class."""
# Test for reverse multiplication with scalar Dual object and float value
x = Dual(5)
fx = 0.5 * x
try:
assert fx.val == 2.5
assert fx.der == 0.5
except AssertionError as e:
print(e)
raise AssertionError
def test_truediv():
"""Test of the division special method (__truediv__) of Dual class."""
# Test for division with scalar Dual object and float value
x = Dual(5)
fx = x / 2
try:
assert fx.val == 2.5
assert fx.der == 0.5
except AssertionError as e:
print(e)
raise AssertionError
# Test for division with two scalar Dual object
x = Dual(2.0)
y = Dual(1.0)
fx = x / y
try:
assert fx.val == 2.0
assert fx.der == -1.0
except AssertionError as e:
print(e)
raise AssertionError
def test_rtruediv():
"""Test of the reverse division special method (__rtruediv__) of Dual class."""
# Test for reverse division with scalar Dual object and float value
x = Dual(5)
fx = 1 / x
try:
assert fx.val == 0.2
assert fx.der == -0.04
except AssertionError as e:
print(e)
raise AssertionError
def test_neg():
"""Test of the negation special method (__neg__) of Dual class."""
# Test for negation with scalar Dual object
x = Dual(5)
fx = -x
try:
assert fx.val == -5.0
assert fx.der == -1.0
except AssertionError as e:
print(e)
raise AssertionError
def test_pos():
"""Test of the positive special method (__pos__) of Dual class."""
# Test for positive special method with scalar Dual object
x = Dual(5)
fx = +x
try:
assert fx.val == 5.0
assert fx.der == 1.0
except AssertionError as e:
print(e)
raise AssertionError
def test_pow():
"""Test of the power special method (__pow__) of Dual class."""
# Test for power special method with scalar Dual object and float value
x = Dual(2)
fx = x ** 2
try:
assert fx.val == 4.0
assert fx.der == 4.0
except AssertionError as e:
print(e)
raise AssertionError
# Test for power special method with two scalar Dual object
x = Dual(2)
fx = x ** x
try:
assert fx.val == 4.0
assert fx.der == pytest.approx(6.77, 0.001)
except AssertionError as e:
print(e)
raise AssertionError
def test_rpow():
"""Test of the reverse power special method (__rpow__) of Dual class."""
# Test for reverse power special method with scalar Dual object and float value
x = Dual(2)
fx = 2 ** x
try:
assert fx.val == 4.0
assert fx.der == pytest.approx(2.77, 0.001)
except AssertionError as e:
print(e)
raise AssertionError
def test_eq():
"""Test of the equality special method (__eq__) of Dual class."""
# Test for equality special method with scalar Dual object and float value
x = Dual(2)
try:
assert (x == 2) == True
assert (x == 1) == False
except AssertionError as e:
print(e)
raise AssertionError
# Test for equality special method with two scalar Dual object
x = Dual(2, [1, 0])
y = Dual(2, [1, 0])
z = Dual(2, [0, 1])
try:
assert (x == y) == True
assert (x == z) == False
except AssertionError as e:
print(e)
raise AssertionError
def test_neq():
"""Test of the not equal special method (__neq__) of Dual class."""
# Test for not equal special method with scalar Dual object and float value
x = Dual(2)
try:
assert (x != 2) == False
assert (x != 1) == True
except AssertionError as e:
print(e)
raise AssertionError
# Test for equality special method with two scalar Dual object
x = Dual(2, [1, 0])
y = Dual(2, [1, 0])
z = Dual(2, [0, 1])
try:
assert (x != y) == False
assert (x != z) == True
except AssertionError as e:
print(e)
raise AssertionError
def test_lt():
"""Test of the less than special method (__lt__) of Dual class."""
# Test for less than special method with scalar Dual object and float value
x = Dual(2)
try:
assert (x < 3) == True
assert (x < 1) == False
except AssertionError as e:
print(e)
raise AssertionError
# Test for less than special method with two scalar Dual object
a = Dual(2, [1, 0])
b = Dual(2, [1, 0])
c = Dual(2, [0, 1])
d = Dual(1, [0, 1])
try:
assert (a < b) == False
assert (a < c) == False
assert (d < a) == True
except AssertionError as e:
print(e)
raise AssertionError
def test_le():
"""Test of the less than or equal to special method (__le__) of Dual class."""
# Test for less than or equal to special method with scalar Dual object and float value
x = Dual(2)
try:
assert (x <= 3) == True
assert (x <= 2) == True
assert (x <= 1) == False
except AssertionError as e:
print(e)
raise AssertionError
# Test for less than or equal to special method with two scalar Dual object
a = Dual(2, [1,0])
b = Dual(2, [1,0])
c = Dual(2, [0,1])
d = Dual(1, [0,1])
try:
assert (a <= b) == True
assert (a <= c) == True
assert (a <= d) == False
except AssertionError as e:
print(e)
raise AssertionError
def test_gt():
"""Test of the greater than special method (__gt__) of Dual class."""
# Test for greater than special method with scalar Dual object and float value
x = Dual(2)
try:
assert (x > 3) == False
assert (x > 1) == True
except AssertionError as e:
print(e)
raise AssertionError
# Test for greater than special method with two scalar Dual object
a = Dual(2, [1, 0])
b = Dual(2, [1, 0])
c = Dual(2, [0, 1])
d = Dual(1, [0, 1])
try:
assert (a > b) == False
assert (a > c) == False
assert (a > d) == True
except AssertionError as e:
print(e)
raise AssertionError
def test_ge():
"""Test of the greater than or equal to special method (__ge__) of Dual class."""
# Test for greater than or equal to special method with scalar Dual object and float value
x = Dual(2)
try:
assert (x >= 3) == False
assert (x >= 1) == True
except AssertionError as e:
print(e)
raise AssertionError
# Test for greater than or equal to special method with two scalar Dual object
a = Dual(2, [1,0])
b = Dual(2, [1,0])
c = Dual(2, [0,1])
d = Dual(1, [0,1])
try:
assert (a >= b) == True
assert (a >= c) == True
assert (d >= a) == False
except AssertionError as e:
print(e)
raise AssertionError
def test_repr():
"""Test of the representation special method (__repr__) of Dual class."""
# Test for representation special method with scalar Dual objects
x = Dual(2)
y = Dual(2, [0, 1])
try:
assert repr(x) == 'Dual(2,1)'
assert repr(y) == 'Dual(2,[0, 1])'
except AssertionError as e:
print(e)
raise AssertionError
def test_str():
"""Test of the string special method (__str__) of Dual class."""
# Test for string special method with scalar Dual objects
x = Dual(2)
y = Dual(2, [0, 1])
try:
assert str(x) == 'Forward-mode Dual Object ( Values: 2, Derivatives: 1 )'
assert str(y) == 'Forward-mode Dual Object ( Values: 2, Derivatives: [0, 1] )'
except AssertionError as e:
print(e)
raise AssertionError
def test_len():
"""Test of the length special method (__len__) of Dual class."""
# Test for string special method with scalar Dual objects
x = Dual(2)
y = Dual(2, [0, 1])
try:
assert len(x) == 1
assert len(y) == 1
except AssertionError as e:
print(e)
raise AssertionError
| 26.715
| 94
| 0.576923
| 1,545
| 10,686
| 3.92233
| 0.06343
| 0.085809
| 0.116172
| 0.121452
| 0.836964
| 0.796205
| 0.762871
| 0.727888
| 0.727888
| 0.710726
| 0
| 0.032076
| 0.320232
| 10,686
| 399
| 95
| 26.781955
| 0.802175
| 0.319671
| 0
| 0.686007
| 0
| 0
| 0.019066
| 0
| 0
| 0
| 0
| 0
| 0.453925
| 1
| 0.071672
| false
| 0
| 0.010239
| 0
| 0.081911
| 0.109215
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe6d5fea2299ffad5f178e3eeb98604c63524b57
| 15,412
|
py
|
Python
|
testcases/generated/vm_test.py
|
Tanc009/jdcloud-cli
|
4e11de77c68501f44e7026c0ad1c24e5d043197e
|
[
"Apache-2.0"
] | null | null | null |
testcases/generated/vm_test.py
|
Tanc009/jdcloud-cli
|
4e11de77c68501f44e7026c0ad1c24e5d043197e
|
[
"Apache-2.0"
] | null | null | null |
testcases/generated/vm_test.py
|
Tanc009/jdcloud-cli
|
4e11de77c68501f44e7026c0ad1c24e5d043197e
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
import unittest
import os
import json
class VmTest(unittest.TestCase):
def test_describe_image(self):
cmd = """python ../../main.py vm describe-image --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_image(self):
cmd = """python ../../main.py vm delete-image --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_images(self):
cmd = """python ../../main.py vm describe-images """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_image_constraints(self):
cmd = """python ../../main.py vm describe-image-constraints --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_image_constraints_batch(self):
cmd = """python ../../main.py vm describe-image-constraints-batch """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_share_image(self):
cmd = """python ../../main.py vm share-image --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_un_share_image(self):
cmd = """python ../../main.py vm un-share-image --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_release_image(self):
cmd = """python ../../main.py vm release-image --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_un_release_image(self):
cmd = """python ../../main.py vm un-release-image --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_image_members(self):
cmd = """python ../../main.py vm describe-image-members --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_copy_images(self):
cmd = """python ../../main.py vm copy-images --destination-region 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_modify_image_attribute(self):
cmd = """python ../../main.py vm modify-image-attribute --image-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_import_image(self):
cmd = """python ../../main.py vm import-image --architecture 'xxx' --os-type 'xxx' --platform 'xxx' --disk-format 'xxx' --system-disk-size-gb '5' --image-url 'xxx' --image-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_image_tasks(self):
cmd = """python ../../main.py vm image-tasks --task-action 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instances(self):
cmd = """python ../../main.py vm describe-instances """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_instances(self):
cmd = """python ../../main.py vm create-instances --instance-spec '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance(self):
cmd = """python ../../main.py vm describe-instance --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_instance(self):
cmd = """python ../../main.py vm delete-instance --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance_status(self):
cmd = """python ../../main.py vm describe-instance-status """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance_private_ip_address(self):
cmd = """python ../../main.py vm describe-instance-private-ip-address """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_stop_instance(self):
cmd = """python ../../main.py vm stop-instance --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_start_instance(self):
cmd = """python ../../main.py vm start-instance --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_reboot_instance(self):
cmd = """python ../../main.py vm reboot-instance --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_attach_network_interface(self):
cmd = """python ../../main.py vm attach-network-interface --instance-id 'xxx' --network-interface-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_detach_network_interface(self):
cmd = """python ../../main.py vm detach-network-interface --instance-id 'xxx' --network-interface-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_modify_instance_network_attribute(self):
cmd = """python ../../main.py vm modify-instance-network-attribute --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_associate_elastic_ip(self):
cmd = """python ../../main.py vm associate-elastic-ip --instance-id 'xxx' --elastic-ip-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disassociate_elastic_ip(self):
cmd = """python ../../main.py vm disassociate-elastic-ip --instance-id 'xxx' --elastic-ip-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_image(self):
cmd = """python ../../main.py vm create-image --instance-id 'xxx' --name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_attach_disk(self):
cmd = """python ../../main.py vm attach-disk --instance-id 'xxx' --disk-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_detach_disk(self):
cmd = """python ../../main.py vm detach-disk --instance-id 'xxx' --disk-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_modify_instance_disk_attribute(self):
cmd = """python ../../main.py vm modify-instance-disk-attribute --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_modify_instance_attribute(self):
cmd = """python ../../main.py vm modify-instance-attribute --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_modify_instance_password(self):
cmd = """python ../../main.py vm modify-instance-password --instance-id 'xxx' --password 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance_vnc_url(self):
cmd = """python ../../main.py vm describe-instance-vnc-url --instance-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_resize_instance(self):
cmd = """python ../../main.py vm resize-instance --instance-id 'xxx' --instance-type 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_rebuild_instance(self):
cmd = """python ../../main.py vm rebuild-instance --instance-id 'xxx' --password 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance_templates(self):
cmd = """python ../../main.py vm describe-instance-templates """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_instance_template(self):
cmd = """python ../../main.py vm create-instance-template --instance-template-data '{"":""}' --name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance_template(self):
cmd = """python ../../main.py vm describe-instance-template --instance-template-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_instance_template(self):
cmd = """python ../../main.py vm update-instance-template --instance-template-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_instance_template(self):
cmd = """python ../../main.py vm delete-instance-template --instance-template-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_verify_instance_template(self):
cmd = """python ../../main.py vm verify-instance-template --instance-template-id 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_instance_types(self):
cmd = """python ../../main.py vm describe-instance-types """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_keypairs(self):
cmd = """python ../../main.py vm describe-keypairs """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_keypair(self):
cmd = """python ../../main.py vm create-keypair --key-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_import_keypair(self):
cmd = """python ../../main.py vm import-keypair --key-name 'xxx' --public-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_keypair(self):
cmd = """python ../../main.py vm delete-keypair --key-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_quotas(self):
cmd = """python ../../main.py vm describe-quotas """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
| 33.002141
| 194
| 0.590319
| 1,870
| 15,412
| 4.796791
| 0.084492
| 0.038239
| 0.071014
| 0.092865
| 0.869565
| 0.865998
| 0.863657
| 0.794314
| 0.702453
| 0.677703
| 0
| 0.000889
| 0.270114
| 15,412
| 466
| 195
| 33.072961
| 0.796515
| 0.041137
| 0
| 0.706052
| 0
| 0.037464
| 0.226491
| 0.048713
| 0
| 0
| 0
| 0
| 0.14121
| 1
| 0.14121
| false
| 0.008646
| 0.020173
| 0
| 0.164265
| 0.14121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe8a0e2a9ddc0a0e9ede58870c874a446d086c1a
| 143
|
py
|
Python
|
fsetools/tests/test_libstd_ec_1991_1_2.py
|
fsepy/fsetools
|
6b6c647912551680109a84d8640b9cfbe7970970
|
[
"Apache-2.0"
] | 1
|
2020-02-25T21:47:56.000Z
|
2020-02-25T21:47:56.000Z
|
fsetools/tests/test_libstd_ec_1991_1_2.py
|
fsepy/fsetools
|
6b6c647912551680109a84d8640b9cfbe7970970
|
[
"Apache-2.0"
] | 12
|
2020-02-24T10:10:57.000Z
|
2020-09-18T11:18:08.000Z
|
fsetools/tests/test_libstd_ec_1991_1_2.py
|
fsepy/fsetools
|
6b6c647912551680109a84d8640b9cfbe7970970
|
[
"Apache-2.0"
] | null | null | null |
from fsetools.libstd.ec_1991_1_2 import _test_appendix_a_parametric_fire as test_appendix_a_parametric_fire
test_appendix_a_parametric_fire()
| 35.75
| 107
| 0.916084
| 24
| 143
| 4.791667
| 0.583333
| 0.313043
| 0.33913
| 0.6
| 0.704348
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044444
| 0.055944
| 143
| 3
| 108
| 47.666667
| 0.807407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
fe9491846c8146dcfa480bb2e7bf9cb2cce89350
| 1,015
|
py
|
Python
|
util/cfg_reader.py
|
shartoo/merlin-tf-slim
|
4c7d48d5f634273dd51d2e29562d3ed1195d9151
|
[
"Apache-2.0"
] | null | null | null |
util/cfg_reader.py
|
shartoo/merlin-tf-slim
|
4c7d48d5f634273dd51d2e29562d3ed1195d9151
|
[
"Apache-2.0"
] | null | null | null |
util/cfg_reader.py
|
shartoo/merlin-tf-slim
|
4c7d48d5f634273dd51d2e29562d3ed1195d9151
|
[
"Apache-2.0"
] | null | null | null |
import configparser
'''
a configuration file reader
'''
cf = configparser.ConfigParser()
def get_section_string(section, name):
'''
read variable value from cfg file whose type is string
:param section: section in configuration file
:param name: the key of value you want to read
:return: variable value
'''
return cf.get(section, name)
def get_section_int(section, name):
'''
read variable value from cfg file whose type is int
:param section: section in configuration file
:param name: the key of value you want to read
:return: variable value
'''
return cf.getint(section, name)
def get_section_bool(section, name):
'''
read variable value from cfg file whose type is boolean
:param section: section in configuration file
:param name: the key of value you want to read
:return: variable value
'''
return cf.getboolean(section, name)
| 27.432432
| 64
| 0.634483
| 129
| 1,015
| 4.945736
| 0.263566
| 0.103448
| 0.061129
| 0.10815
| 0.780564
| 0.705329
| 0.705329
| 0.705329
| 0.705329
| 0.705329
| 0
| 0
| 0.296552
| 1,015
| 36
| 65
| 28.194444
| 0.893557
| 0.578325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
fe992a28722880df3aa72c9e70c7117b3c4fb4a2
| 191
|
py
|
Python
|
swagger_client/swagger_client/api/__init__.py
|
AleksandrVin/SpeedtestService
|
9385513e656a4935de0b29212ee9d62037db6ccc
|
[
"BSD-3-Clause"
] | null | null | null |
swagger_client/swagger_client/api/__init__.py
|
AleksandrVin/SpeedtestService
|
9385513e656a4935de0b29212ee9d62037db6ccc
|
[
"BSD-3-Clause"
] | 18
|
2021-08-04T17:13:33.000Z
|
2021-09-22T14:00:44.000Z
|
swagger_client/swagger_client/api/__init__.py
|
AleksandrVin/SpeedtestService
|
9385513e656a4935de0b29212ee9d62037db6ccc
|
[
"BSD-3-Clause"
] | 3
|
2021-08-11T10:28:01.000Z
|
2021-08-17T14:30:08.000Z
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from swagger_client.api.client_api import ClientApi
from swagger_client.api.server_api import ServerApi
| 23.875
| 51
| 0.842932
| 28
| 191
| 5.428571
| 0.535714
| 0.177632
| 0.223684
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005952
| 0.120419
| 191
| 7
| 52
| 27.285714
| 0.89881
| 0.21466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
228afcfec0d117f466d20b7500e6487e3927dc13
| 124
|
py
|
Python
|
src/Bond.py
|
jaweej/pms
|
c44f082ab926b0b8227235fcc4466738fd70d26b
|
[
"MIT"
] | null | null | null |
src/Bond.py
|
jaweej/pms
|
c44f082ab926b0b8227235fcc4466738fd70d26b
|
[
"MIT"
] | 11
|
2018-02-13T13:11:45.000Z
|
2022-03-01T23:11:21.000Z
|
src/Bond.py
|
jaweej/pms
|
c44f082ab926b0b8227235fcc4466738fd70d26b
|
[
"MIT"
] | null | null | null |
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
class Bond():
""" Bond """
pass
| 17.714286
| 56
| 0.709677
| 15
| 124
| 5.8
| 0.733333
| 0.252874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201613
| 124
| 6
| 57
| 20.666667
| 0.878788
| 0.032258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
22bb1af60aedaf083174c1031a60013b3cdd78cb
| 17,575
|
py
|
Python
|
test/test_md004.py
|
scop/pymarkdown
|
562ba8f7857d99ba09e86e42de5a37ec6d9b2c30
|
[
"MIT"
] | null | null | null |
test/test_md004.py
|
scop/pymarkdown
|
562ba8f7857d99ba09e86e42de5a37ec6d9b2c30
|
[
"MIT"
] | null | null | null |
test/test_md004.py
|
scop/pymarkdown
|
562ba8f7857d99ba09e86e42de5a37ec6d9b2c30
|
[
"MIT"
] | null | null | null |
"""
Module to provide tests related to the MD004 rule.
"""
from test.markdown_scanner import MarkdownScanner
import pytest
@pytest.mark.rules
def test_md004_bad_configuration_style():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=bad",
"--strict-config",
"scan",
"test/resources/rules/md004/good_list_asterisk_single_level.md",
]
expected_return_code = 1
expected_output = ""
expected_error = (
"BadPluginError encountered while configuring plugins:\n"
+ "The value for property 'plugins.md004.style' is not valid: Allowable values: ['consistent', 'asterisk', 'plus', 'dash', 'sublist']"
)
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_asterisk_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=asterisk",
"scan",
"test/resources/rules/md004/good_list_asterisk_single_level.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_asterisk_single_level_consistent():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md004/good_list_asterisk_single_level.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_asterisk_dash_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=asterisk",
"scan",
"test/resources/rules/md004/good_list_dash_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/good_list_dash_single_level.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: dash] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_asterisk_plus_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=asterisk",
"scan",
"test/resources/rules/md004/good_list_plus_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/good_list_plus_single_level.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: plus] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_dash_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=dash",
"scan",
"test/resources/rules/md004/good_list_dash_single_level.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_dash_single_level_consistent():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md004/good_list_dash_single_level.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_dash_asterisk_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=dash",
"scan",
"test/resources/rules/md004/good_list_asterisk_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/good_list_asterisk_single_level.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: dash; Actual: asterisk] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_dash_plus_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=dash",
"scan",
"test/resources/rules/md004/good_list_plus_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/good_list_plus_single_level.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: dash; Actual: plus] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_plus_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=plus",
"scan",
"test/resources/rules/md004/good_list_plus_single_level.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_plus_single_level_consistent():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"scan",
"test/resources/rules/md004/good_list_plus_single_level.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_plus_asterisk_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=plus",
"scan",
"test/resources/rules/md004/good_list_asterisk_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/good_list_asterisk_single_level.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: plus; Actual: asterisk] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_plus_dash_single_level():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent asterisk usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=plus",
"scan",
"test/resources/rules/md004/good_list_dash_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/good_list_dash_single_level.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: plus; Actual: dash] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_single_level_consistent():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has inconsistent usage on a single
level list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--disable-rules",
"md032",
"scan",
"test/resources/rules/md004/bad_list_different_single_level.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/bad_list_different_single_level.md:2:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: plus] (ul-style)\n"
+ "test/resources/rules/md004/bad_list_different_single_level.md:3:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: dash] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_multi_level_sublevel():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent usage on multiple levels of list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=sublist",
"scan",
"test/resources/rules/md004/good_multi_level_sublevel.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_good_multi_level_sublevel_complex():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent usage on multiple levels of list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--disable-rules",
"md032",
"--set",
"plugins.md004.style=sublist",
"scan",
"test/resources/rules/md004/good_multi_level_complex.md",
]
expected_return_code = 0
expected_output = ""
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_multi_level_sublevel_complex():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent usage on multiple levels of list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--disable-rules",
"md032",
"--set",
"plugins.md004.style=sublist",
"scan",
"test/resources/rules/md004/bad_multi_level_complex.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/bad_multi_level_complex.md:6:6: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: dash; Actual: plus] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_multi_level_sublevel_complex_asterisk():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent usage on multiple levels of list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=asterisk",
"--disable-rules",
"md032",
"scan",
"test/resources/rules/md004/bad_multi_level_complex.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/bad_multi_level_complex.md:1:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: plus] (ul-style)\n"
+ "test/resources/rules/md004/bad_multi_level_complex.md:3:6: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: dash] (ul-style)\n"
+ "test/resources/rules/md004/bad_multi_level_complex.md:6:6: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: asterisk; Actual: plus] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
@pytest.mark.rules
def test_md004_bad_dual_lists_with_separator():
"""
Test to make sure we get the expected behavior after scanning a good file from the
test/resources/rules/md004 directory that has consistent usage on multiple levels of list.
"""
# Arrange
scanner = MarkdownScanner()
supplied_arguments = [
"--set",
"plugins.md004.style=sublist",
"scan",
"test/resources/rules/md004/bad_dual_lists_with_separator.md",
]
expected_return_code = 1
expected_output = (
"test/resources/rules/md004/bad_dual_lists_with_separator.md:6:1: "
+ "MD004: Inconsistent Unordered List Start style "
+ "[Expected: plus; Actual: asterisk] (ul-style)"
)
expected_error = ""
# Act
execute_results = scanner.invoke_main(arguments=supplied_arguments)
# Assert
execute_results.assert_results(
expected_output, expected_error, expected_return_code
)
| 28.12
| 142
| 0.67926
| 2,077
| 17,575
| 5.509389
| 0.048628
| 0.057939
| 0.080224
| 0.102508
| 0.972909
| 0.97221
| 0.970288
| 0.967054
| 0.967054
| 0.963733
| 0
| 0.026405
| 0.230725
| 17,575
| 624
| 143
| 28.165064
| 0.81997
| 0.215135
| 0
| 0.752089
| 0
| 0.002786
| 0.294825
| 0.173665
| 0
| 0
| 0
| 0
| 0.052925
| 1
| 0.052925
| false
| 0
| 0.005571
| 0
| 0.058496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe0c38400a3886e348c04d964796a23182a67c2f
| 18,624
|
py
|
Python
|
src/classification_aux/dataset.py
|
yellowdolphin/SIIM-COVID19-Detection
|
31e8653b467ac35a8b1d92330ad5f15a12622676
|
[
"MIT"
] | null | null | null |
src/classification_aux/dataset.py
|
yellowdolphin/SIIM-COVID19-Detection
|
31e8653b467ac35a8b1d92330ad5f15a12622676
|
[
"MIT"
] | null | null | null |
src/classification_aux/dataset.py
|
yellowdolphin/SIIM-COVID19-Detection
|
31e8653b467ac35a8b1d92330ad5f15a12622676
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import cv2
import torch
from torch.utils.data import Dataset
import albumentations as albu
from albumentations.pytorch import ToTensorV2
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
cv2.setNumThreads(0)
cv2.ocl.setUseOpenCL(False)
chest14_classes = [
'Atelectasis',
'Cardiomegaly',
'Consolidation',
'Edema',
'Effusion',
'Emphysema',
'Fibrosis',
'Hernia',
'Infiltration',
'Mass',
'No Finding',
'Nodule',
'Pleural_Thickening',
'Pneumonia',
'Pneumothorax',
]
chexpert_classes = [
'Enlarged Cardiomediastinum',
'Cardiomegaly',
'Lung Opacity',
'Lung Lesion',
'Edema',
'Consolidation',
'Pneumonia',
'Atelectasis',
'Pneumothorax',
'Pleural Effusion',
'Pleural Other',
'Fracture',
'Support Devices'
]
rsnapneumonia_classes = ['normal', 'pneumonia']
classes = [
'Negative for Pneumonia',
'Typical Appearance',
'Indeterminate Appearance',
'Atypical Appearance'
]
study_submission_classes = {
'Negative for Pneumonia': 'negative',
'Typical Appearance': 'typical',
'Indeterminate Appearance': 'indeterminate',
'Atypical Appearance': 'atypical'
}
class ExternalDataset(Dataset):
def __init__(self, df, images_dir, image_size, mode, classes):
super(ExternalDataset,self).__init__()
self.df = df.reset_index(drop=True)
self.images_dir = images_dir or '.'
self.image_size = image_size
assert mode in ['train', 'valid']
self.mode = mode
self.classes = classes
if images_dir is not None: print("images_dir:", images_dir)
if self.mode == 'train':
self.df = self.df.sample(frac=1).reset_index(drop=True)
self.transform = albu.Compose([
albu.RandomResizedCrop(height=self.image_size, width=self.image_size, scale=(0.25, 1.0), ratio=(0.75, 1.3333333333333333), interpolation=1, p=1.0),
albu.ShiftScaleRotate(shift_limit=0.05, scale_limit=0.1, rotate_limit=30, interpolation=1, border_mode=0, value=0, p=0.25),
albu.HorizontalFlip(p=0.5),
albu.VerticalFlip(p=0.5),
albu.OneOf([
albu.MotionBlur(p=.2),
albu.MedianBlur(blur_limit=3, p=0.1),
albu.Blur(blur_limit=3, p=0.1),
], p=0.25),
albu.OneOf([
albu.CLAHE(clip_limit=2),
albu.IAASharpen(),
albu.IAAEmboss(),
albu.RandomBrightnessContrast(),
], p=0.25),
albu.Cutout(num_holes=8, max_h_size=32, max_w_size=32, fill_value=0, p=0.25),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
else:
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
img_path = os.path.join(self.images_dir, self.df.loc[index, 'image_path'])
assert os.path.exists(img_path), f'{img_path} not found'
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
image = self.transform(image=image)['image']
label = torch.FloatTensor(self.df.loc[index, self.classes])
return image, label
class RSNAPneuAuxDataset(Dataset):
def __init__(self, df, images_dir, image_size, mode):
super(RSNAPneuAuxDataset, self).__init__()
self.df = df.reset_index(drop=True)
self.images_dir = images_dir or '.'
self.image_size = image_size
assert mode in ['train', 'valid']
self.mode = mode
if self.mode == 'train':
self.df = self.df.sample(frac=1).reset_index(drop=True)
self.transform = albu.Compose([
albu.RandomResizedCrop(height=self.image_size, width=self.image_size, scale=(0.25, 1.0), ratio=(0.75, 1.3333333333333333), interpolation=1, p=1.0),
albu.ShiftScaleRotate(shift_limit=0.05, scale_limit=0.1, rotate_limit=30, interpolation=1, border_mode=0, value=0, p=0.25),
albu.HorizontalFlip(p=0.5),
albu.VerticalFlip(p=0.5),
albu.OneOf([
albu.MotionBlur(p=.2),
albu.MedianBlur(blur_limit=3, p=0.1),
albu.Blur(blur_limit=3, p=0.1),
], p=0.25),
albu.OneOf([
albu.CLAHE(clip_limit=2),
albu.IAASharpen(),
albu.IAAEmboss(),
albu.RandomBrightnessContrast(),
], p=0.25),
albu.Cutout(num_holes=8, max_h_size=32, max_w_size=32, fill_value=0, p=0.25),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
else:
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
img_path = os.path.join(self.images_dir, self.df.loc[index, 'image_path'])
assert os.path.exists(img_path), f'{img_path} not found'
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
label = torch.FloatTensor(self.df.loc[index, rsnapneumonia_classes])
height, width = image.shape[0:2]
mask = np.zeros((height, width), dtype=np.uint8)
if self.df.loc[index, 'hasbox']:
arr = self.df.loc[index, 'label'].split(' ')
assert len(arr) >= 6
arr = np.array(arr).reshape(-1, 6)
class_ids, xyxys = arr[:, 0], arr[:, 2:].copy()
assert (class_ids == 'opacity').all()
xyxys = xyxys.astype(float).astype(int)
xyxys[:, [0, 2]] = xyxys[:, [0, 2]].clip(min=0, max=width)
xyxys[:, [1, 3]] = xyxys[:, [1, 3]].clip(min=0, max=height)
for x1, y1, x2, y2 in xyxys:
if x1 >= x2 or y1 >= y2: continue
mask[y1:y2,x1:x2] = 1
transformed = self.transform(image=image, mask=mask)
image = transformed["image"]
mask = transformed["mask"]
mask = mask.float()
mask = torch.unsqueeze(mask, 0)
return image, mask, label
class SiimCovidAuxDataset(Dataset):
def __init__(self, df, images_dir, images_suffix, image_size, mode):
super(SiimCovidAuxDataset, self).__init__()
self.df = df.reset_index(drop=True)
self.images_dir = images_dir or '.'
self.images_suffix = images_suffix
self.image_size = image_size
assert mode in ['train', 'valid']
self.mode = mode
if self.mode == 'train':
self.df = self.df.sample(frac=1).reset_index(drop=True)
self.transform = albu.Compose([
albu.RandomResizedCrop(height=self.image_size, width=self.image_size, scale=(0.25, 1.0), ratio=(0.75, 1.3333333333333333), interpolation=1, p=1.0),
albu.ShiftScaleRotate(shift_limit=0.05, scale_limit=0.1, rotate_limit=30, interpolation=1, border_mode=0, value=0, p=0.25),
albu.HorizontalFlip(p=0.5),
albu.VerticalFlip(p=0.5),
albu.OneOf([
albu.MotionBlur(p=.2),
albu.MedianBlur(blur_limit=3, p=0.1),
albu.Blur(blur_limit=3, p=0.1),
], p=0.25),
albu.OneOf([
albu.CLAHE(clip_limit=2),
albu.IAASharpen(),
albu.IAAEmboss(),
albu.RandomBrightnessContrast(),
], p=0.25),
albu.Cutout(num_holes=8, max_h_size=32, max_w_size=32, fill_value=0, p=0.25),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
else:
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
img_path = os.path.join(self.images_dir, self.df.loc[index, 'imageid'] + '.' + self.images_suffix)
assert os.path.exists(img_path), f'{img_path} not found'
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
label = torch.FloatTensor(self.df.loc[index, classes])
height, width = image.shape[0:2]
mask = np.zeros((height, width), dtype=np.uint8)
if self.df.loc[index, 'hasbox']:
arr = self.df.loc[index, 'label'].split(' ')
assert len(arr) >= 6
arr = np.array(arr).reshape(-1, 6)
class_ids, xyxys = arr[:, 0], arr[:, 2:].copy()
assert (class_ids == 'opacity').all()
xyxys = xyxys.astype(float).astype(int)
xyxys[:, [0, 2]] = xyxys[:, [0, 2]].clip(min=0, max=width)
xyxys[:, [1, 3]] = xyxys[:, [1, 3]].clip(min=0, max=height)
for x1, y1, x2, y2 in xyxys:
if x1 >= x2 or y1 >= y2: continue
mask[y1:y2,x1:x2] = 1
transformed = self.transform(image=image, mask=mask)
image = transformed["image"]
mask = transformed["mask"]
mask = mask.float()
mask = torch.unsqueeze(mask, 0)
if self.mode == 'train':
return image, mask, label
else:
return image, mask, label, self.df.loc[index, 'imageid']
class SiimCovidCLSTestDataset(Dataset):
def __init__(self, df, images_dir, image_size, seg=False, lung_crop=False):
super(SiimCovidCLSTestDataset, self).__init__()
self.df = df.reset_index(drop=True)
self.images_dir = images_dir
self.image_size = image_size
self.seg = seg
self.lung_crop = lung_crop
if lung_crop:
lung_pred_path = '../detection_lung_yolov5/predictions/yolov5_lungcrop_test_pred_fold3.pth'
print('Load lung prediction from {}'.format(lung_pred_path))
self.lung_crop_dict = torch.load(lung_pred_path)
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
img_path = '{}/{}.png'.format(self.images_dir, self.df.loc[index, 'imageid'])
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
height, width = image.shape[0:2]
if self.lung_crop:
x1, y1, x2, y2 = self.lung_crop_dict[self.df.loc[index, 'imageid']]
image_center_crop = image[y1:y2, x1:x2, :]
else:
new_size = int(0.8*min(height, width))
x1 = (width - new_size)//2
y1 = (height - new_size)//2
image_center_crop = image[y1:y1+new_size, x1:x1+new_size, :]
image = self.transform(image=image)['image']
image_center_crop = self.transform(image=image_center_crop)['image']
if self.seg:
return self.df.loc[index, 'imageid'], image, image_center_crop, height, width
else:
return self.df.loc[index, 'imageid'], image, image_center_crop
class SiimCovidCLSExtTestDataset(Dataset):
def __init__(self, df, image_size, seg=False):
super(SiimCovidCLSExtTestDataset, self).__init__()
self.df = df.reset_index(drop=True)
self.image_size = image_size
self.seg = seg
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
img_path = self.df.loc[index, 'image_path']
img_file = img_path.split('/')[-1]
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
height, width = image.shape[0:2]
if not self.seg:
new_size = int(0.8*min(height, width))
x1 = (width - new_size)//2
y1 = (height - new_size)//2
image_center_crop = image[y1:y1+new_size, x1:x1+new_size, :]
image = self.transform(image=image)['image']
if not self.seg:
image_center_crop = self.transform(image=image_center_crop)['image']
if self.seg:
return img_file, image, height, width
else:
return img_path, image, image_center_crop
class SiimCovidAuxPseudoDataset(Dataset):
def __init__(self, df, images_dir, image_size, mode):
super(SiimCovidAuxPseudoDataset, self).__init__()
self.df = df.reset_index(drop=True)
self.images_dir = images_dir
self.image_size = image_size
assert mode in ['train', 'valid']
self.mode = mode
if self.mode == 'train':
self.df = self.df.sample(frac=1).reset_index(drop=True)
self.transform = albu.Compose([
albu.RandomResizedCrop(height=self.image_size, width=self.image_size, scale=(0.25, 1.0), ratio=(0.75, 1.3333333333333333), interpolation=1, p=1.0),
albu.ShiftScaleRotate(shift_limit=0.05, scale_limit=0.1, rotate_limit=30, interpolation=1, border_mode=0, value=0, p=0.25),
albu.HorizontalFlip(p=0.5),
albu.VerticalFlip(p=0.5),
albu.OneOf([
albu.MotionBlur(p=.2),
albu.MedianBlur(blur_limit=3, p=0.1),
albu.Blur(blur_limit=3, p=0.1),
], p=0.25),
albu.OneOf([
albu.CLAHE(clip_limit=2),
albu.IAASharpen(),
albu.IAAEmboss(),
albu.RandomBrightnessContrast(),
], p=0.25),
albu.Cutout(num_holes=8, max_h_size=32, max_w_size=32, fill_value=0, p=0.25),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
else:
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
if self.df.loc[index, 'pseudo'] == True:
img_path = self.df.loc[index, 'image_path']
else:
img_path = '{}/{}.png'.format(self.images_dir, self.df.loc[index, 'imageid'])
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
label = torch.FloatTensor(self.df.loc[index, classes])
height, width = image.shape[0:2]
if self.df.loc[index, 'pseudo'] == True:
mask = cv2.imread(self.df.loc[index, 'mask_path'], cv2.IMREAD_GRAYSCALE)
transformed = self.transform(image=image, mask=mask)
image = transformed["image"]
mask = transformed["mask"]
mask = mask.float()
mask /= 255.0
else:
mask = np.zeros((height, width), dtype=np.uint8)
if self.df.loc[index, 'hasbox']:
arr = self.df.loc[index, 'label'].split(' ')
nums = len(arr) // 6
assert nums > 0
for i in range(nums):
class_name = arr[6*i]
assert class_name == 'opacity'
x1 = int(float(arr[6*i+2]))
y1 = int(float(arr[6*i+3]))
x2 = int(float(arr[6*i+4]))
y2= int(float(arr[6*i+5]))
x1 = min(max(0,x1),width)
x2 = min(max(0,x2),width)
y1 = min(max(0,y1),height)
y2 = min(max(0,y2),height)
if x1 >= x2 or y1 >= y2:
continue
mask[y1:y2,x1:x2] = np.ones((y2-y1, x2-x1), dtype=np.uint8)
transformed = self.transform(image=image, mask=mask)
image = transformed["image"]
mask = transformed["mask"]
mask = mask.float()
mask = torch.unsqueeze(mask, 0)
if self.mode == 'train':
return image, mask, label
else:
return image, mask, label, self.df.loc[index, 'imageid']
class SiimCovidCLSDemoDataset(Dataset):
def __init__(self, df, lung_pred_path, images_dir, image_size):
super(SiimCovidCLSDemoDataset, self).__init__()
self.df = df.reset_index(drop=True)
self.images_dir = images_dir
self.image_size = image_size
self.lung_pred_dict = torch.load(lung_pred_path)
self.transform = albu.Compose([
albu.Resize(self.image_size, self.image_size),
albu.Normalize(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
ToTensorV2(),
])
def __len__(self):
return len(self.df)
def __getitem__(self, index):
img_path = '{}/{}.png'.format(self.images_dir, self.df.loc[index, 'imageid'])
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = np.stack([image, image, image], axis=-1)
x1, y1, x2, y2 = self.lung_pred_dict[self.df.loc[index, 'imageid']]
image_center_crop = image[y1:y2, x1:x2, :]
image = self.transform(image=image)['image']
image_center_crop = self.transform(image=image_center_crop)['image']
return self.df.loc[index, 'imageid'], image, image_center_crop
| 39.044025
| 163
| 0.565936
| 2,275
| 18,624
| 4.450549
| 0.094505
| 0.033778
| 0.037235
| 0.038716
| 0.808395
| 0.796346
| 0.789926
| 0.779062
| 0.772148
| 0.764642
| 0
| 0.036365
| 0.3016
| 18,624
| 476
| 164
| 39.12605
| 0.742062
| 0
| 0
| 0.756098
| 0
| 0
| 0.057023
| 0.003866
| 0
| 0
| 0
| 0
| 0.031707
| 1
| 0.05122
| false
| 0
| 0.019512
| 0.017073
| 0.131707
| 0.004878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3ca8cce4facf30d7b0b7158f737d50ea476a1b0
| 23,641
|
py
|
Python
|
script/data_handler/HousePricesTransformer.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
script/data_handler/HousePricesTransformer.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
script/data_handler/HousePricesTransformer.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from script.data_handler.Base.Base_df_transformer import Base_df_transformer
from script.util.numpy_utils import np_frequency_equal_bins
DF = pd.DataFrame
Series = pd.Series
def df_frequency_equal_bins(df: DF, col_key: str, n_bins: int) -> list:
bins = np_frequency_equal_bins(np.array(df[col_key]), n_bins)
return list(bins)
def df_value_counts(df):
return [df[key].value_counts() for key in df]
def print_info(df, col_key, partial_df, series, Xs_keys, Ys_key):
print(col_key)
print(partial_df.info())
print(df_value_counts(partial_df))
print()
class HousePricesTransformer(Base_df_transformer):
def col_00_1stFlrSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
bins = df_frequency_equal_bins(partial_df, col_key, 10)
binned_df = self.binning(df, col_key, bins)
df = df.drop(columns=col_key)
df = pd.concat([df, binned_df], axis=1)
return df
def col_01_2ndFlrSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# bins = df_frequency_equal_bins(partial_df, col_key, 10)
bins = [-1, 0, 1, 423, 631, 767, 918, 2066]
binned_df = self.binning(df, col_key, bins)
# print(df_value_counts(binned_df))
df = df.drop(columns=col_key)
df = pd.concat([df, binned_df], axis=1)
return df
def col_02_3SsnPorch(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_04_BedroomAbvGr(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
bins = [0, 2, 3, 4, 5, 8]
binned_df = self.binning(partial_df, col_key, bins)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_05_BldgType(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_06_BsmtCond(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# may drop
# df[df[col_key] == 'Po'] = 'TA'
# print(df[col_key].value_counts())
return df
def col_07_BsmtExposure(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# may drop
return df
def col_08_BsmtFinSF1(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print(partial_df.describe())
# print(partial_df.head(20))
# print(series.value_counts())
# print(partial_df.info())
#
# print(col_key)
# print(series)
#
# print(df[df[col_key] == 'TA'][col_key])
# print(df[df[col_key] == 'TA'][col_key])
# df.loc[df[col_key] == 'TA', col_key] = 0.0
# df[col_key] = df[col_key].astype(float)
partial_df = df[[col_key]]
bins = [-1.0, 0.0, 1, 196.0, 368.0, 512.0, 654.0, 808.0, 1047.0, 5645.0]
binned_df = self.binning(partial_df, col_key, bins)
# print(binned_df.info())
# print(df_value_counts(binned_df))
# plot = PlotTools(save=False, show=True)
# plot.count(binned_df, col_key)
# plot.joint_2d(binned_df, col_key, Ys_key)
df = self.df_update_col(df, col_key, binned_df)
return df
def col_09_BsmtFinSF2(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# bins = df_frequency_equal_bins(partial_df, col_key, 10)
# print(bins)
# binned_df = self.binning(partial_df, col_key, bins)
# print(df_value_counts(binned_df))
df = df.drop(columns=col_key)
return df
def col_10_BsmtFinType1(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print(partial_df.info())
# print(df_value_counts(partial_df))
return df
def col_11_BsmtFinType2(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_12_BsmtFullBath(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_13_BsmtHalfBath(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_14_BsmtQual(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_15_BsmtUnfSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# todo may better binning
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
bins = df_frequency_equal_bins(partial_df, col_key, 10)
# print(bins)
binned_df = self.binning(partial_df, col_key, bins)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_16_CentralAir(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_17_Condition1(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_18_Condition2(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_19_Electrical(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_20_EnclosedPorch(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# TODO better binning
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# bins = df_frequency_equal_bins(partial_df, col_key, 10)
bins = [-1.0, 0.0, 1.0, 100.0, 200.0, 300.0, 400.0, 1013.0]
# print(bins)
binned_df = self.binning(partial_df, col_key, bins)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_21_ExterCond(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_22_ExterQual(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_23_Exterior1st(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_24_Exterior2nd(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_26_FireplaceQu(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_27_Fireplaces(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
idxs = df.loc[:, col_key] == '2'
df.loc[idxs, col_key] = '2~4'
idxs = df.loc[:, col_key] == '3'
df.loc[idxs, col_key] = '2~4'
idxs = df.loc[:, col_key] == '4'
df.loc[idxs, col_key] = '2~4'
# print(df_value_counts(df[[col_key]]))
return df
def col_28_Foundation(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
idxs = df.loc[:, col_key] == 'Stone'
df.loc[idxs, col_key] = 'Stone_and_Wood'
idxs = df.loc[:, col_key] == 'Wood'
df.loc[idxs, col_key] = 'Stone_and_Wood'
# print(df_value_counts(df[[col_key]]))
return df
def col_29_FullBath(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df[[col_key]] = df[[col_key]].astype(str)
idxs = df.loc[:, col_key] == '4'
df.loc[idxs, col_key] = '3~4'
idxs = df.loc[:, col_key] == '3'
df.loc[idxs, col_key] = '3~4'
idxs = df.loc[:, col_key] == '0'
df.loc[idxs, col_key] = '0~1'
idxs = df.loc[:, col_key] == '1'
df.loc[idxs, col_key] = '0~1'
# print(df_value_counts(df[[col_key]]))
# print_info(df[[col_key]], col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_30_Functional(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_31_GarageArea(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
bins = df_frequency_equal_bins(df, col_key, 10)
# print(bins)
binning = self.binning(df, col_key, bins)
# print(df_value_counts(binning))
df = self.df_update_col(df, col_key, binning)
return df
def col_32_GarageCars(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
idxs = df[col_key] == '5.0'
df.loc[idxs, col_key] = '4~5'
idxs = df[col_key] == '4.0'
df.loc[idxs, col_key] = '4~5'
# print(df_value_counts(df[[col_key]]))
return df
def col_33_GarageCond(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_34_GarageFinish(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_35_GarageQual(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_36_GarageType(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df,series, Xs_key, Ys_key)
idxs = df[col_key] == 'Basment'
df.loc[idxs, col_key] = 'etc'
idxs = df[col_key] == '2Types'
df.loc[idxs, col_key] = 'etc'
idxs = df[col_key] == 'CarPort'
df.loc[idxs, col_key] = 'etc'
# print(df_value_counts(df[[col_key]]))
return df
def col_37_GarageYrBlt(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# TODO better binning
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
bins = df_frequency_equal_bins(partial_df, col_key, 10)
# print(bins)
binned_df = self.binning(partial_df, col_key, bins)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_38_GrLivArea(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# TODO better binning
bins = df_frequency_equal_bins(partial_df, col_key, 10)
binned_df = self.binning(partial_df, col_key, bins)
# print(bins)
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_39_HalfBath(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_40_Heating(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_41_HeatingQC(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
idxs = df.loc[:, col_key] == 'Po'
df.loc[idxs, col_key] = 'TA'
# print(df_value_counts(df[[col_key]]))
return df
def col_42_HouseStyle(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
idxs = df.loc[:, col_key] == 'SFoyer'
df.loc[idxs, col_key] = 'etc'
idxs = df.loc[:, col_key] == '2.5Unf'
df.loc[idxs, col_key] = 'etc'
idxs = df.loc[:, col_key] == '1.5Unf'
df.loc[idxs, col_key] = 'etc'
idxs = df.loc[:, col_key] == '2.5Fin'
df.loc[idxs, col_key] = 'etc'
# print(df_value_counts(df[[col_key]]))
return df
def col_43_Id(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_44_KitchenAbvGr(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_45_KitchenQual(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_46_LandContour(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_47_LandSlope(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_48_LotArea(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
bins = df_frequency_equal_bins(partial_df, col_key, 10)
binned_df = self.binning(partial_df, col_key, bins)
# print(bins)
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_49_LotConfig(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_50_LotFrontage(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
bins = df_frequency_equal_bins(partial_df, col_key, 10)
binned_df = self.binning(partial_df, col_key, bins)
# print(bins)
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_51_LotShape(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
idxs = df.loc[:, col_key] == 'IR2'
df.loc[idxs, col_key] = 'IR12'
idxs = df.loc[:, col_key] == 'IR3'
df.loc[idxs, col_key] = 'IR23'
# print(df_value_counts(df[[col_key]]))
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_52_LowQualFinSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_53_MSSubClass(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
df = self.df_group_values([30, 40, 45], 40, df, col_key)
df = self.df_group_values([150, 160, 180, 190], 155, df, col_key)
# print(df_value_counts(df[[col_key]]))
return df
def col_54_MSZoning(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_55_MasVnrArea(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# bins = df_frequency_equal_bins(partial_df, col_key, 15)
bins = [-1.0, 0.0, 85.0, 144.0, 200.0, 270.0, 408.0, 1601.0]
binned_df = self.binning(partial_df, col_key, bins)
# print(bins)
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_56_MasVnrType(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_58_MiscVal(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_59_MoSold(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_60_Neighborhood(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_61_OpenPorchSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# bins = df_frequency_equal_bins(partial_df, col_key, 15)
bins = [-1, 0, 20, 32, 40, 50, 64, 84, 112, 160, 743]
binned_df = self.binning(partial_df, col_key, bins)
# print(bins)
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# print(df_value_counts(binned_df))
df = self.df_update_col(df, col_key, binned_df)
return df
def col_62_OverallCond(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = self.df_group_values(['1', '2', '3'], '1~3', df, col_key)
# print(df_value_counts(df[[col_key]]))
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
return df
def col_63_OverallQual(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
df = self.df_group_values(['1', '2', '3'], '1~3', df, col_key)
# print(df_value_counts(df[[col_key]]))
return df
def col_64_PavedDrive(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_65_PoolArea(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_67_RoofMatl(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_68_RoofStyle(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
return df
def col_69_SaleCondition(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list,
Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_71_SaleType(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
values = ['ConLD', 'CWD', 'ConLI', 'ConLw', 'Oth', 'Con', 'COD']
df = self.df_group_values(values, 'etc', df, col_key)
# print(df_value_counts(df[[col_key]]))
return df
def col_72_ScreenPorch(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
#
# [-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 577]
# bins = df_frequency_equal_bins(df, col_key, 10)
# print(bins)
# binning_df = self.binning(df, col_key, bins)
# print(df_value_counts(binning_df))
df = df.drop(columns=col_key)
return df
def col_73_Street(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
def col_74_TotRmsAbvGrd(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
df = self.df_group_values(['2', '3'], '2~3', df, col_key)
df = self.df_group_values(['11', '12', '13', '14', '15'], '11~15', df, col_key)
# print(df_value_counts(df[[col_key]]))
return df
def col_75_TotalBsmtSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
bins = df_frequency_equal_bins(df, col_key, 10)
# print(bins)
binning_df = self.binning(df, col_key, bins)
df = self.df_update_col(df, col_key, binning_df)
# print(df_value_counts(binning_df))
# print(df.info())
return df
def col_77_WoodDeckSF(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
# bins = df_frequency_equal_bins(df, col_key, 10)
bins = [-1, 0, 1, 100, 144, 192, 256, 1425]
# print(bins)
binning_df = self.binning(df, col_key, bins)
df = self.df_update_col(df, col_key, binning_df)
# print(df_value_counts(binning_df))
return df
def col_78_YearBuilt(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
# print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
bins = df_frequency_equal_bins(df, col_key, 20)
# print(bins)
binning_df = self.binning(df, col_key, bins)
df = self.df_update_col(df, col_key, binning_df)
# print(df_value_counts(binning_df))
return df
def col_79_YearRemodAdd(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
print_info(df, col_key, partial_df, series, Xs_key, Ys_key)
bins = df_frequency_equal_bins(df, col_key, 10)
# print(bins)
binning_df = self.binning(df, col_key, bins)
df = self.df_update_col(df, col_key, binning_df)
# print(df_value_counts(binning_df))
return df
def col_80_YrSold(self, df: DF, col_key: str, partial_df: DF, series: Series, Xs_key: list, Ys_key: list):
df = df.drop(columns=col_key)
return df
| 41.842478
| 118
| 0.617275
| 3,774
| 23,641
| 3.578166
| 0.074987
| 0.120409
| 0.12263
| 0.059242
| 0.864188
| 0.850785
| 0.844342
| 0.838566
| 0.821905
| 0.817165
| 0
| 0.026476
| 0.257096
| 23,641
| 564
| 119
| 41.916667
| 0.742413
| 0.206886
| 0
| 0.549488
| 0
| 0
| 0.012292
| 0
| 0
| 0
| 0
| 0.001773
| 0
| 1
| 0.266212
| false
| 0
| 0.013652
| 0.09215
| 0.546075
| 0.020478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3d66c1652cb668b80484e2fe1de572588c520d0
| 6,068
|
py
|
Python
|
tests/common/constants.py
|
inancg/Transcriptor
|
72782ddbefee1675060f6ec0c8303945f60aab8d
|
[
"MIT"
] | null | null | null |
tests/common/constants.py
|
inancg/Transcriptor
|
72782ddbefee1675060f6ec0c8303945f60aab8d
|
[
"MIT"
] | null | null | null |
tests/common/constants.py
|
inancg/Transcriptor
|
72782ddbefee1675060f6ec0c8303945f60aab8d
|
[
"MIT"
] | null | null | null |
import os
from src.common.transcribe_item import TranscribeItem, TranscribeItemType
TEST_RESOURCES_LOCATION = os.path.join(os.path.dirname(__file__),
'../resources/')
AWS_TEST_RESOURCES_LOCATION = TEST_RESOURCES_LOCATION + 'aws/'
WATSON_TEST_RESOURCES_LOCATION = TEST_RESOURCES_LOCATION + 'watson/'
TEST_CONFIDENCE_THRESHOLD: float = 0.8
TEST_CONFIDENCE_SUSPICIOUS: float = 0.4
TEST_CONFIDENCE_NOT_SUSPICIOUS: float = 0.95
TRANSCRIBE_ITEMS_SUSPICIOUS_AWS = [
TranscribeItem(content="This", confidence=1, start_time=1,
end_time=1.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content="is", confidence=0.9, start_time=1.5,
end_time=2, item_type=TranscribeItemType.WORD),
TranscribeItem(content=",", confidence=1, start_time=2,
end_time=2, item_type=TranscribeItemType.PUNCTUATION),
TranscribeItem(content="probably", confidence=0.4, start_time=2.5,
end_time=3, item_type=TranscribeItemType.WORD),
TranscribeItem(content="a", confidence=0.8, start_time=3,
end_time=3.2, item_type=TranscribeItemType.WORD),
TranscribeItem(content="test", confidence=0.6, start_time=3.5,
end_time=4, item_type=TranscribeItemType.WORD),
TranscribeItem(content="case", confidence=0.96, start_time=4,
end_time=4.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content=".", confidence=1, start_time=4.5,
end_time=4.5, item_type=TranscribeItemType.PUNCTUATION),
TranscribeItem(content="Should", confidence=0.2, start_time=6,
end_time=6.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content="it", confidence=0.85, start_time=6.5,
end_time=7, item_type=TranscribeItemType.WORD),
TranscribeItem(content="pass", confidence=0.95, start_time=7,
end_time=7.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content="?", confidence=1, start_time=7.5,
end_time=7.5, item_type=TranscribeItemType.PUNCTUATION)
]
TRANSCRIBE_ITEMS_NOT_SUSPICIOUS_AWS = [
TranscribeItem(content="This", confidence=1, start_time=1,
end_time=1.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content="is", confidence=0.9, start_time=1.5,
end_time=2, item_type=TranscribeItemType.WORD),
TranscribeItem(content="it", confidence=1, start_time=2.1,
end_time=2.3, item_type=TranscribeItemType.WORD),
TranscribeItem(content=".", confidence=1, start_time=2.3,
end_time=2.3, item_type=TranscribeItemType.PUNCTUATION)
]
TRANSCRIBE_ITEMS_NOT_SUSPICIOUS_WATSON = [
TranscribeItem(content="this", confidence=1, start_time=0,
end_time=0.1, item_type=TranscribeItemType.WORD),
TranscribeItem(content="is", confidence=0.99, start_time=0.1,
end_time=0.2, item_type=TranscribeItemType.WORD),
TranscribeItem(content="a", confidence=0.9, start_time=0.2,
end_time=0.3, item_type=TranscribeItemType.WORD),
TranscribeItem(content="test", confidence=0.91, start_time=0.3,
end_time=0.4, item_type=TranscribeItemType.WORD),
TranscribeItem(content="and", confidence=0.92, start_time=0.4,
end_time=0.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content="nothing", confidence=0.96, start_time=0.5,
end_time=0.6, item_type=TranscribeItemType.WORD),
TranscribeItem(content="suspicious", confidence=0.93, start_time=0.6,
end_time=0.7, item_type=TranscribeItemType.WORD),
TranscribeItem(content="is", confidence=0.93, start_time=0.7,
end_time=0.8, item_type=TranscribeItemType.WORD),
TranscribeItem(content="happening", confidence=0.96, start_time=0.9,
end_time=1.0, item_type=TranscribeItemType.WORD),
TranscribeItem(content=".", confidence=1, start_time=1.0,
end_time=1.0, item_type=TranscribeItemType.PUNCTUATION)
]
TRANSCRIBE_ITEMS_NOT_SUSPICIOUS_TWO_SENTENCES_WATSON = [
TranscribeItem(content="this", confidence=1, start_time=0,
end_time=0.1, item_type=TranscribeItemType.WORD),
TranscribeItem(content="is", confidence=0.99, start_time=0.1,
end_time=0.2, item_type=TranscribeItemType.WORD),
TranscribeItem(content="a", confidence=0.9, start_time=0.2,
end_time=0.3, item_type=TranscribeItemType.WORD),
TranscribeItem(content="test", confidence=0.91, start_time=0.3,
end_time=0.4, item_type=TranscribeItemType.WORD),
TranscribeItem(content=".", confidence=1, start_time=0.4,
end_time=0.4, item_type=TranscribeItemType.PUNCTUATION),
TranscribeItem(content="absolutely", confidence=0.92, start_time=0.4,
end_time=0.5, item_type=TranscribeItemType.WORD),
TranscribeItem(content="nothing", confidence=0.96, start_time=0.5,
end_time=0.6, item_type=TranscribeItemType.WORD),
TranscribeItem(content="suspicious", confidence=0.93, start_time=0.6,
end_time=0.7, item_type=TranscribeItemType.WORD),
TranscribeItem(content="is", confidence=0.93, start_time=0.7,
end_time=0.8, item_type=TranscribeItemType.WORD),
TranscribeItem(content="happening", confidence=0.96, start_time=0.9,
end_time=1.0, item_type=TranscribeItemType.WORD),
TranscribeItem(content=".", confidence=1, start_time=1.0,
end_time=1.0, item_type=TranscribeItemType.PUNCTUATION)
]
TRANSCRIBE_ITEMS_SUSPICIOUS_WATSON = [] # TODO fill
| 60.079208
| 79
| 0.64354
| 715
| 6,068
| 5.244755
| 0.08951
| 0.2072
| 0.256533
| 0.24
| 0.872267
| 0.863467
| 0.8064
| 0.7472
| 0.691733
| 0.657333
| 0
| 0.047598
| 0.24176
| 6,068
| 100
| 80
| 60.68
| 0.767442
| 0.001483
| 0
| 0.5
| 0
| 0
| 0.027241
| 0
| 0
| 0
| 0
| 0.01
| 0
| 1
| 0
| false
| 0.01087
| 0.021739
| 0
| 0.021739
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
431d7ceda38741cfcbe731075371efa22512e738
| 108
|
py
|
Python
|
sample-viewer-api/src/static/data/exploratory_scripts/archive/acute_patients/__init__.py
|
cvisb/cvisb_data
|
81ebf22782f2c44f8aa8ab9437cc4fb54248c3ed
|
[
"MIT"
] | 2
|
2020-02-18T08:16:45.000Z
|
2021-04-11T18:58:02.000Z
|
sample-viewer-api/src/static/data/exploratory_scripts/archive/acute_patients/__init__.py
|
cvisb/cvisb_data
|
81ebf22782f2c44f8aa8ab9437cc4fb54248c3ed
|
[
"MIT"
] | 47
|
2019-09-30T22:26:36.000Z
|
2021-11-17T00:34:38.000Z
|
sample-viewer-api/src/static/data/exploratory_scripts/archive/acute_patients/__init__.py
|
cvisb/cvisb_data
|
81ebf22782f2c44f8aa8ab9437cc4fb54248c3ed
|
[
"MIT"
] | 1
|
2020-07-01T21:15:18.000Z
|
2020-07-01T21:15:18.000Z
|
from .clean_lassa_acute import clean_lassa_acute
# from .clean_lassa_acute_ids import clean_lassa_acute_ids
| 36
| 58
| 0.888889
| 18
| 108
| 4.777778
| 0.333333
| 0.465116
| 0.697674
| 0.44186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 108
| 2
| 59
| 54
| 0.868687
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
433dbe5a35b372d0da14c47eb010d5f8b68d2f90
| 20,951
|
py
|
Python
|
akshare/stock_feature/stock_em_hsgt.py
|
szj2ys/akshare
|
d61ccbff50539bff1e07ffd15b841921b4846958
|
[
"MIT"
] | 1
|
2021-11-30T06:33:53.000Z
|
2021-11-30T06:33:53.000Z
|
akshare/stock_feature/stock_em_hsgt.py
|
szj2ys/akshare
|
d61ccbff50539bff1e07ffd15b841921b4846958
|
[
"MIT"
] | null | null | null |
akshare/stock_feature/stock_em_hsgt.py
|
szj2ys/akshare
|
d61ccbff50539bff1e07ffd15b841921b4846958
|
[
"MIT"
] | 1
|
2021-01-21T06:04:15.000Z
|
2021-01-21T06:04:15.000Z
|
# -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2020/7/15 19:12
Desc: 东方财富网-数据中心-沪深港通持股
http://data.eastmoney.com/hsgtcg/
沪深港通详情: http://finance.eastmoney.com/news/1622,20161118685370149.html
"""
import json
import demjson
import pandas as pd
import requests
from bs4 import BeautifulSoup
def stock_em_hsgt_north_net_flow_in(indicator: str = "沪股通") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-净流入
http://data.eastmoney.com/hsgtcg/
:param indicator: choice of {"沪股通", "深股通", "北上"}
:type indicator: str
:return: 东方财富网-数据中心-沪深港通持股-净流入
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/kamt.kline/get"
params = {
"fields1": "f1,f3,f5",
"fields2": "f51,f52",
"klt": "101",
"lmt": "500",
"ut": "b2884a393a59ad64002292a3e90d46a5",
"cb": "jQuery18305732402561585701_1584961751919",
"_": "1584962164273",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = json.loads(data_text[data_text.find("{") : -2])
if indicator == "沪股通":
temp_df = (
pd.DataFrame(data_json["data"]["hk2sh"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "深股通":
temp_df = (
pd.DataFrame(data_json["data"]["hk2sz"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "北上":
temp_df = (
pd.DataFrame(data_json["data"]["s2n"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
def stock_em_hsgt_north_cash(indicator: str = "沪股通") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-资金余额
http://data.eastmoney.com/hsgtcg/
:param indicator: choice of {"沪股通", "深股通", "北上"}
:type indicator: str
:return: 东方财富网-数据中心-沪深港通持股-资金余额
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/kamt.kline/get"
params = {
"fields1": "f1,f3,f5",
"fields2": "f51,f53",
"klt": "101",
"lmt": "500",
"ut": "b2884a393a59ad64002292a3e90d46a5",
"cb": "jQuery18305732402561585701_1584961751919",
"_": "1584962164273",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = json.loads(data_text[data_text.find("{") : -2])
if indicator == "沪股通":
temp_df = (
pd.DataFrame(data_json["data"]["hk2sh"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "深股通":
temp_df = (
pd.DataFrame(data_json["data"]["hk2sz"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "北上":
temp_df = (
pd.DataFrame(data_json["data"]["s2n"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
def stock_em_hsgt_north_acc_flow_in(indicator: str = "沪股通") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-累计净流入
http://data.eastmoney.com/hsgtcg/
:param indicator: choice of {"沪股通", "深股通", "北上"}
:type indicator: str
:return: 东方财富网-数据中心-沪深港通持股-累计净流入
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/kamt.kline/get"
params = {
"fields1": "f1,f3,f5",
"fields2": "f51,f54",
"klt": "101",
"lmt": "500",
"ut": "b2884a393a59ad64002292a3e90d46a5",
"cb": "jQuery18305732402561585701_1584961751919",
"_": "1584962164273",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = json.loads(data_text[data_text.find("{") : -2])
if indicator == "沪股通":
temp_df = (
pd.DataFrame(data_json["data"]["hk2sh"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "深股通":
temp_df = (
pd.DataFrame(data_json["data"]["hk2sz"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "北上":
temp_df = (
pd.DataFrame(data_json["data"]["s2n"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
def stock_em_hsgt_south_net_flow_in(indicator: str = "沪股通") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-南向概括-净流入
http://data.eastmoney.com/hsgtcg/
:param indicator: choice of {"沪股通", "深股通", "南下"}
:type indicator: str
:return: 东方财富网-数据中心-沪深港通持股-南向概括-净流入
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/kamt.kline/get"
params = {
"fields1": "f2,f4,f6",
"fields2": "f51,f52",
"klt": "101",
"lmt": "500",
"ut": "b2884a393a59ad64002292a3e90d46a5",
"cb": "jQuery18307854355493858363_1584963487410",
"_": "1584964176697",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = json.loads(data_text[data_text.find("{") : -2])
if indicator == "沪股通":
temp_df = (
pd.DataFrame(data_json["data"]["sh2hk"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "深股通":
temp_df = (
pd.DataFrame(data_json["data"]["sz2hk"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "南下":
temp_df = (
pd.DataFrame(data_json["data"]["n2s"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
def stock_em_hsgt_south_cash(indicator: str = "沪股通") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-南向概括-资金余额
http://data.eastmoney.com/hsgtcg/
:param indicator: choice of {"沪股通", "深股通", "南下"}
:type indicator: str
:return: 东方财富网-数据中心-沪深港通持股-南向概括-资金余额
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/kamt.kline/get"
params = {
"fields1": "f2,f4,f6",
"fields2": "f51,f53",
"klt": "101",
"lmt": "500",
"ut": "b2884a393a59ad64002292a3e90d46a5",
"cb": "jQuery18307854355493858363_1584963487410",
"_": "1584964176697",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = json.loads(data_text[data_text.find("{") : -2])
if indicator == "沪股通":
temp_df = (
pd.DataFrame(data_json["data"]["sh2hk"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "深股通":
temp_df = (
pd.DataFrame(data_json["data"]["sz2hk"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "南下":
temp_df = (
pd.DataFrame(data_json["data"]["n2s"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
def stock_em_hsgt_south_acc_flow_in(indicator: str = "沪股通") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-南向概括-累计净流入
http://data.eastmoney.com/hsgtcg/
:param indicator: choice of {"沪股通", "深股通", "南下"}
:type indicator: str
:return: 东方财富网-数据中心-沪深港通持股-南向概括-累计净流入
:rtype: pandas.DataFrame
"""
url = "http://push2his.eastmoney.com/api/qt/kamt.kline/get"
params = {
"fields1": "f2,f4,f6",
"fields2": "f51,f54",
"klt": "101",
"lmt": "500",
"ut": "b2884a393a59ad64002292a3e90d46a5",
"cb": "jQuery18307854355493858363_1584963487410",
"_": "1584964176697",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = json.loads(data_text[data_text.find("{") : -2])
if indicator == "沪股通":
temp_df = (
pd.DataFrame(data_json["data"]["sh2hk"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "深股通":
temp_df = (
pd.DataFrame(data_json["data"]["sz2hk"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
if indicator == "南下":
temp_df = (
pd.DataFrame(data_json["data"]["n2s"])
.iloc[:, 0]
.str.split(",", expand=True)
)
temp_df.columns = ["date", "value"]
return temp_df
def stock_em_hsgt_hold_stock(
market: str = "沪股通", indicator: str = "年排行"
) -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-个股排行
http://data.eastmoney.com/hsgtcg/list.html
:param market: choice of {"北向", "沪股通", "深股通"}
:type market: str
:param indicator: choice of {"今日排行", "3日排行", "5日排行", "10日排行", "月排行", "季排行", "年排行"}
:type indicator: str
:return: 指定 sector 和 indicator 的数据
:rtype: pandas.DataFrame
"""
url = "http://data.eastmoney.com/hsgtcg/list.html"
r = requests.get(url)
r.encoding = "gb2312"
soup = BeautifulSoup(r.text, "lxml")
date = soup.find(attrs={"class": "tit"}).find("span").text.strip("(").strip(")")
url = "http://dcfm.eastmoney.com/EM_MutiSvcExpandInterface/api/js/get"
if indicator == "今日排行":
indicator_type = "1"
if indicator == "3日排行":
indicator_type = "3"
if indicator == "5日排行":
indicator_type = "5"
if indicator == "10日排行":
indicator_type = "5"
if indicator == "月排行":
indicator_type = "m"
if indicator == "季排行":
indicator_type = "jd"
if indicator == "年排行":
indicator_type = "y"
if market == "北向":
filter_str = "(DateType='" + indicator_type + "' and HdDate='" + f"{date}')"
elif market == "沪股通":
filter_str = (
"(Market='001' and DateType='"
+ indicator_type
+ "' and HdDate='"
+ f"{date}')"
)
elif market == "深股通":
filter_str = (
"(Market='003' and DateType='"
+ indicator_type
+ "' and HdDate='"
+ f"{date}')"
)
params = {
"type": "HSGT20_GGTJ_SUM",
"token": "894050c76af8597a853f5b408b759f5d",
"st": "ShareSZ_Chg_One",
"sr": "-1",
"p": "1",
"ps": "5000",
"js": "var orksULCQ={pages:(tp),data:(x)}",
"filter": filter_str,
"rt": "53001697",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[data_text.find("{") :])
return pd.DataFrame(data_json["data"])
def stock_em_hsgt_stock_statistics(
market="南向持股", start_date="20200713", end_date="20200714"
):
"""
东方财富网-数据中心-沪深港通-沪深港通持股-每日个股统计
http://data.eastmoney.com/hsgtcg/StockStatistics.aspx
market=001, 沪股通持股
market=003, 深股通持股
:param market: choice of {"北向持股", "南向持股"}
:type market: str
:param start_date: 指定数据获取开始的时间, e.g., "20200713"
:type start_date: str
:param end_date: 指定数据获取结束的时间, e.g., "20200715"
:type end_date:str
:return: 指定市场和指定时间段的每日个股统计数据
:rtype: pandas.DataFrame
"""
start_date = "-".join([start_date[:4], start_date[4:6], start_date[6:]])
end_date = "-".join([end_date[:4], end_date[4:6], end_date[6:]])
if market == "南向持股":
params = {
"type": "HSGTHDSTA",
"token": "70f12f2f4f091e459a279469fe49eca5",
"st": "HDDATE,SHAREHOLDPRICE",
"sr": "3",
"p": "1",
"ps": "10000",
"js": "var AxDXinef={pages:(tp),data:(x)}",
"filter": f"(MARKET='S')(HDDATE>=^{start_date}^ and HDDATE<=^{end_date}^)",
"rt": "53160469",
}
elif market == "北向持股":
params = {
"type": "HSGTHDSTA",
"token": "70f12f2f4f091e459a279469fe49eca5",
"st": "HDDATE,SHAREHOLDPRICE",
"sr": "3",
"p": "1",
"ps": "10000",
"js": "var AxDXinef={pages:(tp),data:(x)}",
"filter": f"(MARKET in ('001','003'))(HDDATE>=^{start_date}^ and HDDATE<=^{end_date}^)",
"rt": "53160469",
}
url = "http://dcfm.eastmoney.com//em_mutisvcexpandinterface/api/js/get"
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[data_text.find("{") :])
temp_df = pd.DataFrame(data_json["data"])
return temp_df
def stock_em_hsgt_institution_statistics(
market="北向持股", start_date="20200713", end_date="20200714"
):
"""
东方财富网-数据中心-沪深港通-沪深港通持股-每日机构统计
http://data.eastmoney.com/hsgtcg/InstitutionStatistics.aspx
market=001, 沪股通持股
market=003, 深股通持股
:param market: choice of {"北向持股", "南向持股"}
:type market: str
:param start_date: 指定数据获取开始的时间, e.g., "20200713"
:type start_date: str
:param end_date: 指定数据获取结束的时间, e.g., "20200715"
:type end_date:str
:return: 指定市场和指定时间段的每日个股统计数据
:rtype: pandas.DataFrame
"""
start_date = "-".join([start_date[:4], start_date[4:6], start_date[6:]])
end_date = "-".join([end_date[:4], end_date[4:6], end_date[6:]])
if market == "南向持股":
params = {
"type": "HSGTCOMSTA",
"token": "70f12f2f4f091e459a279469fe49eca5",
"st": "HDDATE,SHAREHOLDCOUNT",
"sr": "3",
"p": "1",
"ps": "5000",
"js": "var gvfJjbLz={pages:(tp),data:(x)}",
"filter": f"(MARKET in ('001','003'))(HDDATE>=^{start_date}^ and HDDATE<=^{end_date}^)",
"rt": "53160469",
}
elif market == "北向持股":
params = {
"type": "HSGTCOMSTA",
"token": "70f12f2f4f091e459a279469fe49eca5",
"st": "HDDATE,SHAREHOLDCOUNT",
"sr": "3",
"p": "1",
"ps": "5000",
"js": "var gvfJjbLz={pages:(tp),data:(x)}",
"filter": f"(MARKET in ('001','003'))(HDDATE>=^{start_date}^ and HDDATE<=^{end_date}^)",
"rt": "53160469",
}
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36"
}
url = "http://dcfm.eastmoney.com//em_mutisvcexpandinterface/api/js/get"
r = requests.get(url, params=params, headers=headers)
data_text = r.text
data_json = demjson.decode(data_text[data_text.find("{") :])
temp_df = pd.DataFrame(data_json["data"])
return temp_df
def stock_em_hsgt_hist(symbol: str = "港股通沪") -> pd.DataFrame:
"""
东方财富网-数据中心-资金流向-沪深港通资金流向-沪深港通历史数据
http://data.eastmoney.com/hsgt/index.html
:param symbol: choice of {"沪股通", "深股通", "港股通沪", "港股通深"}
:type symbol: str
:return: 沪深港通历史数据
:rtype: pandas.DataFrame
"""
symbol_map = {"沪股通": "1", "深股通": "3", "港股通沪": "2", "港股通深": "4"}
url = "http://dcfm.eastmoney.com/EM_MutiSvcExpandInterface/api/js/get"
params = {
"type": "HSGTHIS",
"token": "70f12f2f4f091e459a279469fe49eca5",
"filter": f"(MarketType={symbol_map[symbol]})",
"js": 'var VIIlLPMH={"data":(x),"pages":(tp)}',
"ps": "2000",
"p": "1",
"sr": "-1",
"st": "DetailDate",
"rt": "53231355",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[data_text.find("{") :])
temp_df = pd.DataFrame(data_json["data"])
temp_df.columns = [
"_",
"日期",
"当日资金流入",
"当日余额",
"历史资金累计流入",
"当日成交净买额",
"买入成交额",
"卖出成交额",
"_",
"领涨股",
"领涨股涨跌幅",
"对应指数",
"涨跌幅",
]
temp_df = temp_df[
[
"日期",
"当日资金流入",
"当日余额",
"历史资金累计流入",
"当日成交净买额",
"买入成交额",
"卖出成交额",
"领涨股",
"领涨股涨跌幅",
"对应指数",
"涨跌幅",
]
]
temp_df["日期"] = pd.to_datetime(temp_df["日期"])
return temp_df
def stock_em_hsgt_board_rank(symbol: str = "北向资金增持行业板块排行", indicator: str = "今日") -> pd.DataFrame:
"""
东方财富网-数据中心-沪深港通持股-行业板块排行-北向资金增持行业板块排行
http://data.eastmoney.com/hsgtcg/hy.html
:param symbol: choice of {"北向资金增持行业板块排行", "北向资金增持概念板块排行", "北向资金增持地域板块排行"}
:type symbol: str
:param indicator: choice of {"今日", "3日", "5日", "10日", "1月", "1季", "1年"}
:type indicator: str
:return: 北向资金增持行业板块排行
:rtype: pandas.DataFrame
"""
symbol_map = {
"北向资金增持行业板块排行": "HSGT20_HYTJ_SUM",
"北向资金增持概念板块排行": "HSGT20_GNTJ_SUM",
"北向资金增持地域板块排行": "HSGT20_DQTJ_SUM",
}
indicator_map = {
"今日": "1",
"3日": "3",
"5日": "5",
"10日": "10",
"1月": "m",
"1季": "jd",
"1年": "y",
}
url = "http://dcfm.eastmoney.com/EM_MutiSvcExpandInterface/api/js/get"
params = {
"type": symbol_map[symbol],
"token": "894050c76af8597a853f5b408b759f5d",
"st": "ShareSZ_ZC",
"sr": "-1",
"p": "1",
"ps": "5000",
"js": "var WCCFPIdQ={pages:(tp),data:(x)}",
"filter": f"(DateType='{indicator_map[indicator]}')",
"rt": "53477178",
}
r = requests.get(url, params=params)
data_text = r.text
data_json = demjson.decode(data_text[data_text.find("{") :])
temp_df = pd.DataFrame(data_json["data"])
temp_df.reset_index(inplace=True)
temp_df["index"] = list(range(1, len(temp_df) + 1))
temp_df.columns = [
"序号",
"_",
"_",
"名称",
"_",
"最新涨跌幅",
"北向资金今日增持估计-股票只数",
"北向资金今日持股-股票只数",
"北向资金今日持股-占北向资金比",
"北向资金今日增持估计-市值",
"北向资金今日增持估计-市值增幅",
"北向资金今日增持估计-占板块比",
"北向资金今日增持估计-占北向资金比",
"_",
"北向资金今日持股-市值",
"_",
"今日增持最大股-市值",
"_",
"_",
"_",
"今日减持最大股-市值",
"_",
"_",
"_",
"_",
"_",
"_",
"_",
"北向资金今日持股-占板块比",
"今日增持最大股-占股本比",
"_",
"今日减持最大股-占股本比",
"_",
]
temp_df = temp_df[
[
"序号",
"名称",
"最新涨跌幅",
"北向资金今日持股-股票只数",
"北向资金今日持股-市值",
"北向资金今日持股-占板块比",
"北向资金今日持股-占北向资金比",
"北向资金今日增持估计-股票只数",
"北向资金今日增持估计-市值",
"北向资金今日增持估计-市值增幅",
"北向资金今日增持估计-占板块比",
"北向资金今日增持估计-占北向资金比",
"今日增持最大股-市值",
"今日增持最大股-占股本比",
"今日减持最大股-占股本比",
"今日减持最大股-市值",
]
]
return temp_df
if __name__ == "__main__":
stock_em_hsgt_north_net_flow_in_df = stock_em_hsgt_north_net_flow_in(
indicator="沪股通"
)
print(stock_em_hsgt_north_net_flow_in_df)
stock_em_hsgt_north_cash_df = stock_em_hsgt_north_cash(indicator="沪股通")
print(stock_em_hsgt_north_cash_df)
stock_em_hsgt_north_acc_flow_in_df = stock_em_hsgt_north_acc_flow_in(
indicator="沪股通"
)
print(stock_em_hsgt_north_acc_flow_in_df)
stock_em_hsgt_south_net_flow_in_df = stock_em_hsgt_south_net_flow_in(
indicator="沪股通"
)
print(stock_em_hsgt_south_net_flow_in_df)
stock_em_hsgt_south_cash_df = stock_em_hsgt_south_cash(indicator="沪股通")
print(stock_em_hsgt_south_cash_df)
stock_em_hsgt_south_acc_flow_in_df = stock_em_hsgt_south_acc_flow_in(
indicator="沪股通"
)
print(stock_em_hsgt_south_acc_flow_in_df)
stock_em_hsgt_hold_stock_df = stock_em_hsgt_hold_stock(
market="北向", indicator="今日排行"
)
print(stock_em_hsgt_hold_stock_df)
stock_em_hsgt_stock_statistics_df = stock_em_hsgt_stock_statistics(
market="南向持股", start_date="20201022", end_date="20201022"
)
print(stock_em_hsgt_stock_statistics_df)
stock_em_hsgt_institution_statistics_df = stock_em_hsgt_institution_statistics(
market="北向持股", start_date="20201028", end_date="20201028"
)
print(stock_em_hsgt_institution_statistics_df)
stock_em_hsgt_hist_df = stock_em_hsgt_hist(symbol="港股通沪")
print(stock_em_hsgt_hist_df)
stock_em_hsgt_industry_rank_df = stock_em_hsgt_board_rank(symbol="北向资金增持行业板块排行", indicator="今日")
print(stock_em_hsgt_industry_rank_df)
| 30.810294
| 139
| 0.543984
| 2,359
| 20,951
| 4.626537
| 0.130988
| 0.040132
| 0.044347
| 0.04004
| 0.814733
| 0.773777
| 0.761957
| 0.749496
| 0.72604
| 0.669141
| 0
| 0.071094
| 0.286335
| 20,951
| 679
| 140
| 30.85567
| 0.658842
| 0.137034
| 0
| 0.70709
| 0
| 0.001866
| 0.242481
| 0.071275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020522
| false
| 0
| 0.009328
| 0
| 0.072761
| 0.020522
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43442cb287a6887f5be57c1f85d13f81dbeb320c
| 108
|
py
|
Python
|
platform/hwconf_data/mgm11/PythonSnippet/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/mgm11/PythonSnippet/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/mgm11/PythonSnippet/__init__.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1
|
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
from mgm11.halconfig import halconfig_types as types
from mgm11.halconfig import halconfig_dependency as dep
| 54
| 55
| 0.87963
| 16
| 108
| 5.8125
| 0.5
| 0.193548
| 0.387097
| 0.516129
| 0.709677
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041237
| 0.101852
| 108
| 2
| 55
| 54
| 0.917526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4a571d39ffa8c3b4c7f11b4dbbab9237a171c6d8
| 76
|
py
|
Python
|
src/additional_package/inside_additional/methods.py
|
SharafutdinovRuslan/helloworld
|
9c549685a3de0bf8a6e4594561d738c374762d9e
|
[
"MIT"
] | null | null | null |
src/additional_package/inside_additional/methods.py
|
SharafutdinovRuslan/helloworld
|
9c549685a3de0bf8a6e4594561d738c374762d9e
|
[
"MIT"
] | null | null | null |
src/additional_package/inside_additional/methods.py
|
SharafutdinovRuslan/helloworld
|
9c549685a3de0bf8a6e4594561d738c374762d9e
|
[
"MIT"
] | null | null | null |
def get_inside_additional_package():
print('Inside additional package')
| 25.333333
| 38
| 0.789474
| 9
| 76
| 6.333333
| 0.666667
| 0.561404
| 0.807018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118421
| 76
| 2
| 39
| 38
| 0.850746
| 0
| 0
| 0
| 0
| 0
| 0.328947
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
4364f5a2e6d0e3ae69695fbe72028c2357b72d97
| 24,764
|
py
|
Python
|
abcTau/generative_models.py
|
roxana-zeraati/abcTau
|
ce4352062ee7821c80ac1c660641f41fef023e14
|
[
"BSD-3-Clause"
] | 8
|
2021-06-29T14:36:56.000Z
|
2022-03-27T18:18:10.000Z
|
abcTau/generative_models.py
|
roxana-zeraati/abcTau
|
ce4352062ee7821c80ac1c660641f41fef023e14
|
[
"BSD-3-Clause"
] | null | null | null |
abcTau/generative_models.py
|
roxana-zeraati/abcTau
|
ce4352062ee7821c80ac1c660641f41fef023e14
|
[
"BSD-3-Clause"
] | 4
|
2021-06-03T13:53:21.000Z
|
2022-03-27T18:18:01.000Z
|
"""
Module containing different generative models
"""
import numpy as np
from scipy import stats
from basic_functions import *
def oneTauOU(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate an OU process with a single timescale.
Parameters
-----------
theta : 1d array
[timescale].
deltaT : float
temporal resolution for OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean value of the OU process (average of firing rate).
data_var : float
variance of the OU process (variance of firing rate).
Returns
-------
syn_data : nd array
array of generated OU process (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau = np.array(theta[0])
# setting params for OU
v = 1
D = v/tau
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all = OU_gen(tau,D,deltaT,T,numTrials)
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for nan values
return np.zeros((numTrials,numBinData)) , numBinData
# fit mean and var
ou_std = np.sqrt(data_var)
ou_all = ou_std * ou_all + data_mean
# bin rate
syn_data = binData(ou_all, [numTrials,numBinData]) * deltaT
return syn_data, numBinData
def twoTauOU(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a two-timescales OU process.
Parameters
-----------
theta : 1d array
[timescale1, timescale2, coefficient for timescale1].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean value of the OU process (average of firing rate).
data_var : float
variance of the OU process (variance of firing rate).
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau1 = np.array(theta[0])
tau2 = np.array(theta[1])
coeff = np.array(theta[2])
# setting the params of OU
v = 1
D1 = v/tau1
D2 = v/tau2
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all1 = OU_gen(tau1,D1,deltaT,T,numTrials)
ou_all2 = OU_gen(tau2,D2,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all1 + np.sqrt(1 - coeff) * ou_all2
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_std = np.sqrt(data_var)
ou_all = ou_std * ou_all + data_mean
# bin rate
syn_data = binData(ou_all, [numTrials,numBinData]) * deltaT
return syn_data, numBinData
def oneTauOU_oscil(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a one-timescale OU process with an additive oscillation.
Parameters
-----------
theta : 1d array
[timescale of OU, frequency of oscillation, coefficient for OU].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean value of the OU process (average of firing rate).
data_var : float
variance of the OU process (variance of firing rate).
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau = np.array(theta[0])
f = np.array(theta[1])
coeff = np.array(theta[2])
# setting params for OU
v = 1
D = v/tau
binsData = np.arange(0, T + binSize, binSize)
binsData_sin = np.arange(0, T, deltaT )
numBinData = len(binsData)-1
# generate OU + oscil
ou_all = OU_gen(tau, D, deltaT, T, numTrials)
time_mat = np.tile(binsData_sin, (numTrials,1))
phases = np.random.rand(numTrials,1)* 2 * np.pi
oscil = np.sqrt(2)*np.sin(phases + 2*np.pi*0.001*f* time_mat)
data = np.sqrt(1 - coeff) * oscil + np.sqrt(coeff) * ou_all
# fit mean and var
ou_std = np.sqrt(data_var)
data_meanVar = ou_std * data + data_mean
# bin rate
syn_data = binData(data_meanVar, [numTrials,numBinData]) * deltaT
return syn_data, numBinData
def oneTauOU_twooscil(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a one-timescale OU process with two additive oscillation.
Parameters
-----------
theta : 1d array
[timescale of OU, frequency of oscillation1, frequency of oscillation2, coefficient for oscillation1, coefficient for oscillation2].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean value of the OU process (average of firing rate).
data_var : float
variance of the OU process (variance of firing rate).
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau = np.array(theta[0])
f1 = np.array(theta[1])
f2 = np.array(theta[2])
coeff1 = np.array(theta[3])
coeff2 = np.array(theta[4])
# setting params for OU
v = 1
D = v/tau
binsData = np.arange(0, T + binSize, binSize)
binsData_sin = np.arange(0, T, deltaT )
numBinData = len(binsData)-1
# generate OU + oscil
ou_all = OU_gen(tau, D, deltaT, T, numTrials)
time_mat = np.tile(binsData_sin, (numTrials,1))
phases = np.random.rand(numTrials,1)* 2 * np.pi
oscil1 = np.sqrt(2)*np.sin(phases + 2*np.pi*0.001*f1* time_mat)
phases = np.random.rand(numTrials,1)* 2 * np.pi
oscil2 = np.sqrt(2)*np.sin(phases + 2*np.pi*0.001*f2* time_mat)
data = np.sqrt(coeff1) * oscil1 + np.sqrt(coeff2) * oscil2 + np.sqrt(1 - coeff1 - coeff2) * ou_all
# fit mean and var
ou_std = np.sqrt(data_var)
data_meanVar = ou_std * data + data_mean
# bin rate
syn_data = binData(data_meanVar, [numTrials,numBinData]) * deltaT
return syn_data, numBinData
def oneTauOU_poissonSpikes(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a one-timescale process with spike counts sampled from a Gaussian distribution.
Assuming that disperssion parameter (fano factor) of spike generation function is known.
Parameters
-----------
theta : 1d array
[timescale].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau = np.array(theta[0])
# setting the params of OU
v = 1
D = v/tau
ou_std = np.sqrt(data_var - data_mean)# law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all = OU_gen(tau, D, deltaT, T, numTrials)
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = np.random.poisson(rate_sum)
return syn_data, numBinData
def oneTauOU_gammaSpikes(theta, deltaT, binSize, T, numTrials, data_mean, data_var, disp):
"""Generate a one-timescale process with spike counts sampled from a Gamma distribution.
Assuming that disperssion parameter (fano factor) of spike generation function is known.
Parameters
-----------
theta : 1d array
[timescale].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
disp : float
disperssion parameter (fano factor) of spike generation function.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau = np.array(theta[0])
# setting the params of OU
v = 1
D = v/tau
ou_std = np.sqrt(data_var - disp*data_mean)# law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all = OU_gen(tau, D, deltaT, T, numTrials)
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = gamma_sp(rate_sum,disp)
return syn_data, numBinData
def oneTauOU_gaussianSpikes(theta, deltaT, binSize, T, numTrials, data_mean, data_var, disp):
"""Generate a one-timescale process with spike counts sampled from a Gaussian distribution.
Assuming that disperssion parameter (fano factor) of spike generation function is known.
Parameters
-----------
theta : 1d array
[timescale].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
disp : float
disperssion parameter (fano factor) of spike generation function.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau = np.array(theta[0])
# setting the params of OU
v = 1
D = v/tau
ou_std = np.sqrt(data_var - disp*data_mean)# law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all = OU_gen(tau, D, deltaT, T, numTrials)
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = gaussian_sp(rate_sum, disp)
return syn_data, numBinData
def twoTauOU_poissonSpikes(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a two-timescales process with spike counts sampled from a Poisson distribution.
Parameters
-----------
theta : 1d array
[timescale1, timescale2, coefficient for timescale1].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau1 = np.array(theta[0])
tau2 = np.array(theta[1])
coeff = np.array(theta[2])
# setting the params of OU
v = 1
D1 = v/tau1
D2 = v/tau2
ou_std = np.sqrt(data_var - data_mean)# law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all1 = OU_gen(tau1,D1,deltaT,T,numTrials)
ou_all2 = OU_gen(tau2,D2,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all1 + np.sqrt(1 - coeff) * ou_all2
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = np.random.poisson(rate_sum)
return syn_data, numBinData
def twoTauOU_gammaSpikes(theta, deltaT, binSize, T, numTrials, data_mean, data_var, disp):
"""Generate a two-timescales process with spike counts sampled from a Gamma distribution.
Assuming that disperssion parameter (fano factor) of spike generation function is known.
Parameters
-----------
theta : 1d array
[timescale1, timescale2, coefficient for timescale1].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
disp : float
disperssion parameter (fano factor) of spike generation function.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau1 = np.array(theta[0])
tau2 = np.array(theta[1])
coeff = np.array(theta[2])
# setting the params of OU
v = 1
D1 = v/tau1
D2 = v/tau2
ou_std = np.sqrt(data_var - disp*data_mean)# law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all1 = OU_gen(tau1,D1,deltaT,T,numTrials)
ou_all2 = OU_gen(tau2,D2,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all1 + np.sqrt(1 - coeff) * ou_all2
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = gamma_sp(rate_sum, disp)
return syn_data, numBinData
def twoTauOU_gaussianSpikes(theta, deltaT, binSize, T, numTrials, data_mean, data_var, disp):
"""Generate a two-timescales process with spike counts sampled from a Guassion distribution.
Assuming that disperssion parameter (fano factor) of spike generation function is known.
Parameters
-----------
theta : 1d array
[timescale1, timescale2, coefficient for timescale1].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
disp : float
disperssion parameter (fano factor) of spike generation function.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau1 = np.array(theta[0])
tau2 = np.array(theta[1])
coeff = np.array(theta[2])
# setting the params of OU
v = 1
D1 = v/tau1
D2 = v/tau2
ou_std = np.sqrt(data_var - disp*data_mean)# law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all1 = OU_gen(tau1,D1,deltaT,T,numTrials)
ou_all2 = OU_gen(tau2,D2,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all1 + np.sqrt(1 - coeff) * ou_all2
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = gaussian_sp(rate_sum,disp)
return syn_data, numBinData
def twoTauOU_gammaSpikes_withDispersion(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a two-timescales process with spike counts sampled from a Gamma distribution.
disperssion parameter (fano factor) of spike generation function is fitted with ABC.
Parameters
-----------
theta : 1d array
[timescale1, timescale2, coefficient for timescale1, disperssion_parameter].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau1 = np.array(theta[0])
tau2 = np.array(theta[1])
coeff = np.array(theta[2])
disp = np.array(theta[3])
# setting the params of OU
v = 1
D1 = v/tau1
D2 = v/tau2
ou_std = np.sqrt(data_var - disp*data_mean) # law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all1 = OU_gen(tau1,D1,deltaT,T,numTrials)
ou_all2 = OU_gen(tau2,D2,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all1 + np.sqrt(1 - coeff) * ou_all2
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = gamma_sp(rate_sum,disp)
return syn_data, numBinData
def twoTauOU_gaussianSpikes_withDispersion(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a two-timescales process with spike counts sampled from a Gamma distribution.
disperssion parameter (fano factor) of spike generation function is fitted with ABC.
Parameters
-----------
theta : 1d array
[timescale1, timescale2, coefficient for timescale1, disperssion_parameter].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load params
tau1 = np.array(theta[0])
tau2 = np.array(theta[1])
coeff = np.array(theta[2])
disp = np.array(theta[3])
# setting the params of OU
v = 1
D1 = v/tau1
D2 = v/tau2
ou_std = np.sqrt(data_var - disp*data_mean) # law of total variance
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all1 = OU_gen(tau1,D1,deltaT,T,numTrials)
ou_all2 = OU_gen(tau2,D2,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all1 + np.sqrt(1 - coeff) * ou_all2
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_all = ou_std * ou_all + data_mean
ou_all[ou_all < 0] = 0
# bin rate and generate spikes
rate_sum = binData(ou_all, [numTrials,numBinData]) * deltaT
syn_data = gaussian_sp(rate_sum,disp)
return syn_data, numBinData
def oneTauOU_oneF(theta, deltaT, binSize, T, numTrials, data_mean, data_var):
"""Generate a one-timescale OU process augmeneted with an additive 1/f spectrum.
Parameters
-----------
theta : 1d array
[timescale, 1/f exponent, coefficient for timescale].
deltaT : float
temporal resolution for the OU process generation.
binSize : float
bin-size for binning data and computing the autocorrelation.
T : float
duration of trials.
numTrials : float
number of trials.
data_mean : float
mean of the spike counts.
data_var : float
variance of the spike counts.
Returns
-------
syn_data : nd array
array of binned spike-counts (numTrials * int(T/binSize)).
numBinData : int
number of bins/samples per trial (required for computing autocorrelation).
"""
# load parameters
tau = np.array(theta[0])
expon = np.array(theta[1])
coeff = np.array(theta[2])
# setting params for 1/f
fs = T/deltaT
fmax = fs/2
deltaF = fmax/(fs)
# generate 1/f
f_range = np.arange(1,fmax + 1, deltaF)
psd = 1/((f_range)**expon)
onef = psd_to_timeseries(psd, numTrials)
onef = stats.zscore(onef, axis = 1)
# setting params for OU
binSize = deltaT
v = 1
D = v/tau
binsData = np.arange(0, T + binSize, binSize)
numBinData = len(binsData)-1
# generate OU
ou_all = OU_gen(tau,D,deltaT,T,numTrials)
ou_all = np.sqrt(coeff) * ou_all + np.sqrt(1 - coeff) * onef
ou_check = np.max(ou_all)
if not np.isfinite(ou_check) or ou_check>10**10: # check for all-nan values
return np.zeros((numTrials,numBinData)), numBinData
# fit mean and var
ou_std = np.sqrt(data_var)
ou_all = ou_std * ou_all + data_mean
# bin rate
syn_data = binData(ou_all, [numTrials,numBinData]) * deltaT
return syn_data, numBinData
| 31.228247
| 140
| 0.641294
| 3,397
| 24,764
| 4.569326
| 0.052988
| 0.024804
| 0.027058
| 0.018554
| 0.951939
| 0.945819
| 0.944788
| 0.943564
| 0.943564
| 0.939892
| 0
| 0.01721
| 0.265587
| 24,764
| 793
| 141
| 31.228247
| 0.836257
| 0.481425
| 0
| 0.859375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050781
| false
| 0
| 0.011719
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
437de8194be125486716e14125b8fe13d8674e5c
| 1,923
|
py
|
Python
|
modules/complicated_wires/test_solver.py
|
Ao-Re/keep-typing-and-nobody-explodes
|
deff70c384b3271315acd49bcbfd62c05ed9a7ce
|
[
"MIT"
] | null | null | null |
modules/complicated_wires/test_solver.py
|
Ao-Re/keep-typing-and-nobody-explodes
|
deff70c384b3271315acd49bcbfd62c05ed9a7ce
|
[
"MIT"
] | null | null | null |
modules/complicated_wires/test_solver.py
|
Ao-Re/keep-typing-and-nobody-explodes
|
deff70c384b3271315acd49bcbfd62c05ed9a7ce
|
[
"MIT"
] | 2
|
2020-10-04T17:04:31.000Z
|
2020-10-20T16:59:50.000Z
|
import unittest
from .solver import solve_complicated_wire_letter
from .solver import solve_complicated_wire
class TestComplicatedWires(unittest.TestCase):
def test_cut(self):
self.assertEqual(solve_complicated_wire(True, True, True, True, False, False, 0), "Do not cut the wire")
self.assertEqual(solve_complicated_wire(True, True, True, False, True, False, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(True, True, False, True, False, True, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(True, True, False, False, False, True, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(True, False, True, True, False, False, 2), "Cut the wire")
self.assertEqual(solve_complicated_wire(True, False, True, False, False, False, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(True, False, False, True, False, False, 2), "Cut the wire")
self.assertEqual(solve_complicated_wire(True, False, False, False, False, True, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(False, True, True, True, True, False, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(False, True, True, False, False, False, 0), "Do not cut the wire")
self.assertEqual(solve_complicated_wire(False, True, False, True, True, False, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(False, True, False, False, False, True, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(False, False, True, True, False, False, 2), "Cut the wire")
self.assertEqual(solve_complicated_wire(False, False, True, False, False, False, 0), "Cut the wire")
self.assertEqual(solve_complicated_wire(False, False, False, True, False, False, 0), "Do not cut the wire")
self.assertEqual(solve_complicated_wire(False, False, False, False, False, False, 0), "Cut the wire")
| 83.608696
| 115
| 0.716069
| 269
| 1,923
| 4.977695
| 0.085502
| 0.209111
| 0.268857
| 0.370426
| 0.943988
| 0.933532
| 0.879761
| 0.860344
| 0.822255
| 0.822255
| 0
| 0.009938
| 0.162767
| 1,923
| 22
| 116
| 87.409091
| 0.821739
| 0
| 0
| 0
| 0
| 0
| 0.110764
| 0
| 0
| 0
| 0
| 0
| 0.761905
| 1
| 0.047619
| false
| 0
| 0.142857
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
438140f0fef4d4313612918e50759a21309e5269
| 3,889
|
py
|
Python
|
locust-server/scripts/input_data.py
|
0shimax/load-testing-of-api-server
|
207db5ff9ce0c561eda8e1cf2a69f84baa2c1391
|
[
"MIT"
] | null | null | null |
locust-server/scripts/input_data.py
|
0shimax/load-testing-of-api-server
|
207db5ff9ce0c561eda8e1cf2a69f84baa2c1391
|
[
"MIT"
] | null | null | null |
locust-server/scripts/input_data.py
|
0shimax/load-testing-of-api-server
|
207db5ff9ce0c561eda8e1cf2a69f84baa2c1391
|
[
"MIT"
] | null | null | null |
test_data = [
{
"float_features":
{
"engines": 2,
"passenger_capacity": 4,
"crew": 3,
"company_rating": 1,
"review_scores_rating": 96
},
"categorical_features":
{
"d_check_complete": "False",
"moon_clearance_complete": "False",
"iata_approved": "True"
}
},
{
"float_features":
{
"engines": 4,
"passenger_capacity": 8,
"crew": 5,
"company_rating": 1,
"review_scores_rating": 100
},
"categorical_features":
{
"d_check_complete": "True",
"moon_clearance_complete": "False",
"iata_approved": "False"
}
},
{
"float_features":
{
"engines": 2,
"passenger_capacity": 4,
"crew": 3,
"company_rating": 1,
"review_scores_rating": 96
},
"categorical_features":
{
"d_check_complete": "False",
"moon_clearance_complete": "False",
"iata_approved": "True"
}
},
{
"float_features":
{
"engines": 4,
"passenger_capacity": 8,
"crew": 5,
"company_rating": 1,
"review_scores_rating": 100
},
"categorical_features":
{
"d_check_complete": "True",
"moon_clearance_complete": "False",
"iata_approved": "False"
}
},
{
"float_features":
{
"engines": 2,
"passenger_capacity": 4,
"crew": 3,
"company_rating": 1,
"review_scores_rating": 96
},
"categorical_features":
{
"d_check_complete": "False",
"moon_clearance_complete": "False",
"iata_approved": "True"
}
},
{
"float_features":
{
"engines": 4,
"passenger_capacity": 8,
"crew": 5,
"company_rating": 1,
"review_scores_rating": 100
},
"categorical_features":
{
"d_check_complete": "True",
"moon_clearance_complete": "False",
"iata_approved": "False"
}
},
{
"float_features":
{
"engines": 2,
"passenger_capacity": 4,
"crew": 3,
"company_rating": 1,
"review_scores_rating": 96
},
"categorical_features":
{
"d_check_complete": "False",
"moon_clearance_complete": "False",
"iata_approved": "True"
}
},
{
"float_features":
{
"engines": 4,
"passenger_capacity": 8,
"crew": 5,
"company_rating": 1,
"review_scores_rating": 100
},
"categorical_features":
{
"d_check_complete": "True",
"moon_clearance_complete": "False",
"iata_approved": "False"
}
},
{
"float_features":
{
"engines": 2,
"passenger_capacity": 4,
"crew": 3,
"company_rating": 1,
"review_scores_rating": 96
},
"categorical_features":
{
"d_check_complete": "False",
"moon_clearance_complete": "False",
"iata_approved": "True"
}
},
{
"float_features":
{
"engines": 4,
"passenger_capacity": 8,
"crew": 5,
"company_rating": 1,
"review_scores_rating": 100
},
"categorical_features":
{
"d_check_complete": "True",
"moon_clearance_complete": "False",
"iata_approved": "False"
}
}
]
| 24.006173
| 47
| 0.438159
| 292
| 3,889
| 5.455479
| 0.10274
| 0.122411
| 0.125549
| 0.125549
| 0.994978
| 0.994978
| 0.994978
| 0.994978
| 0.994978
| 0.994978
| 0
| 0.029465
| 0.432759
| 3,889
| 162
| 48
| 24.006173
| 0.692656
| 0
| 0
| 0.617284
| 0
| 0
| 0.419023
| 0.059126
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4382d457ad0260468ad6651ad1b7952edb0191c7
| 1,936
|
py
|
Python
|
aerosandbox/performance/test_performance/test_operating_point_convert_axes_specific_rotations.py
|
raihaan123/AeroSandbox
|
1e7c78f04b066415f671237a4833ba98901bb9ec
|
[
"MIT"
] | 1
|
2021-11-01T22:48:12.000Z
|
2021-11-01T22:48:12.000Z
|
aerosandbox/performance/test_performance/test_operating_point_convert_axes_specific_rotations.py
|
raihaan123/AeroSandbox
|
1e7c78f04b066415f671237a4833ba98901bb9ec
|
[
"MIT"
] | null | null | null |
aerosandbox/performance/test_performance/test_operating_point_convert_axes_specific_rotations.py
|
raihaan123/AeroSandbox
|
1e7c78f04b066415f671237a4833ba98901bb9ec
|
[
"MIT"
] | null | null | null |
import aerosandbox as asb
import aerosandbox.numpy as np
from typing import List
import copy
import pytest
def test_alpha_wind():
op_point = asb.OperatingPoint(
alpha=90,
beta=0
)
x, y, z = op_point.convert_axes(
0, 0, 1,
"geometry",
"wind"
)
assert x == pytest.approx(-1)
assert y == pytest.approx(0)
assert z == pytest.approx(0)
def test_beta_wind():
op_point = asb.OperatingPoint(
alpha=0,
beta=90
)
x, y, z = op_point.convert_axes(
0, 1, 0,
"geometry",
"wind"
)
assert x == pytest.approx(1)
assert y == pytest.approx(0)
assert z == pytest.approx(0)
def test_beta_wind_body():
op_point = asb.OperatingPoint(
alpha=0,
beta=90
)
x, y, z = op_point.convert_axes(
0, 1, 0,
"body",
"wind"
)
assert x == pytest.approx(1)
assert y == pytest.approx(0)
assert z == pytest.approx(0)
def test_alpha_stability_body():
op_point = asb.OperatingPoint(
alpha=90,
beta=0
)
x, y, z = op_point.convert_axes(
0, 0, 1,
"body",
"stability"
)
assert x == pytest.approx(1)
assert y == pytest.approx(0)
assert z == pytest.approx(0)
def test_beta_stability_body():
op_point = asb.OperatingPoint(
alpha=0,
beta=90
)
x, y, z = op_point.convert_axes(
0, 1, 0,
"body",
"stability"
)
assert x == pytest.approx(0)
assert y == pytest.approx(1)
assert z == pytest.approx(0)
def test_order_wind_body():
op_point = asb.OperatingPoint(
alpha=90,
beta=90,
)
x, y, z = op_point.convert_axes(
0, 1, 0,
"body",
"wind"
)
assert x == pytest.approx(1)
assert y == pytest.approx(0)
assert z == pytest.approx(0)
if __name__ == '__main__':
pytest.main()
| 19.36
| 36
| 0.546488
| 260
| 1,936
| 3.907692
| 0.138462
| 0.212598
| 0.153543
| 0.141732
| 0.849409
| 0.849409
| 0.805118
| 0.75689
| 0.718504
| 0.718504
| 0
| 0.04221
| 0.326963
| 1,936
| 99
| 37
| 19.555556
| 0.737529
| 0
| 0
| 0.658824
| 0
| 0
| 0.038223
| 0
| 0
| 0
| 0
| 0
| 0.211765
| 1
| 0.070588
| false
| 0
| 0.058824
| 0
| 0.129412
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43b0cad1808544d06499007ea03c193d96d0c3dc
| 159
|
py
|
Python
|
src/ec2_spot_price/__init__.py
|
susumuota/ec2-spot-price
|
398e628aa6dfc0d080837a5a52cc097a6c1b5918
|
[
"Apache-2.0"
] | null | null | null |
src/ec2_spot_price/__init__.py
|
susumuota/ec2-spot-price
|
398e628aa6dfc0d080837a5a52cc097a6c1b5918
|
[
"Apache-2.0"
] | null | null | null |
src/ec2_spot_price/__init__.py
|
susumuota/ec2-spot-price
|
398e628aa6dfc0d080837a5a52cc097a6c1b5918
|
[
"Apache-2.0"
] | null | null | null |
from .ec2_spot_price import __version__
from .ec2_spot_price import spot_prices, print_csv, print_table
__all__ = ['spot_prices', 'print_csv', 'print_table']
| 31.8
| 63
| 0.805031
| 24
| 159
| 4.583333
| 0.458333
| 0.127273
| 0.2
| 0.290909
| 0.909091
| 0.509091
| 0
| 0
| 0
| 0
| 0
| 0.013986
| 0.100629
| 159
| 4
| 64
| 39.75
| 0.755245
| 0
| 0
| 0
| 0
| 0
| 0.194969
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
43b2bcf3d3f55029308b5381d41a5de250c3f8ce
| 5,469
|
py
|
Python
|
proxydownloader.py
|
DiabloAkar/proxydownloader
|
f1a12a78537368a5988a7d00f264ed6ddd96ae09
|
[
"MIT"
] | 1
|
2021-08-16T23:18:22.000Z
|
2021-08-16T23:18:22.000Z
|
proxydownloader.py
|
DiabloAkar/proxydownloader
|
f1a12a78537368a5988a7d00f264ed6ddd96ae09
|
[
"MIT"
] | null | null | null |
proxydownloader.py
|
DiabloAkar/proxydownloader
|
f1a12a78537368a5988a7d00f264ed6ddd96ae09
|
[
"MIT"
] | null | null | null |
import subprocess
from pip._vendor.colorama import Fore
import threading
import time
import os
#import layn
subprocess.call('start https://discord.gg/DDcXf472BF', shell=True)
subprocess.call('start https://github.com/DiabloAkar', shell=True)
#diabloakar
bammer = '''
████████▄ ▄█ ▄████████ ▀█████████▄ ▄█ ▄██████▄
███ ▀███ ███ ███ ███ ███ ███ ███ ███ ███
███ ███ ███▌ ███ ███ ███ ███ ███ ███ ███
███ ███ ███▌ ███ ███ ▄███▄▄▄██▀ ███ ███ ███
███ ███ ███▌ ▀███████████ ▀▀███▀▀▀██▄ ███ ███ ███
███ ███ ███ ███ ███ ███ ██▄ ███ ███ ███
███ ▄███ ███ ███ ███ ███ ███ ███▌ ▄ ███ ███
████████▀ █▀ ███ █▀ ▄█████████▀ █████▄▄██ ▀██████▀
▀
'''
print()
print(Fore.RED+bammer)
print(Fore.WHITE+'|'+Fore.GREEN+'1'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Http'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'2'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Socks4'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Socks5'+Fore.WHITE+'|')
print()
site = input(Fore.WHITE+'|'+Fore.GREEN+'Proxy Tür Seçimini Yap'+Fore.WHITE+'| > ')
if site == '1':
print()
print(Fore.WHITE+'|'+Fore.GREEN+'1'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Tümü'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'2'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Elit'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Anonim'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Seffaf'+Fore.WHITE+'|')
print()
Anonimlik = input(Fore.WHITE+'|'+Fore.GREEN+'Proxy Özellik Seçimini Yap'+Fore.WHITE+'| > ')
if Anonimlik == '1':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all&simplified=true")
if Anonimlik == '2':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=elite&simplified=true")
if Anonimlik == '3':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=anonymous&simplified=true")
if Anonimlik == '4':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=transparent&simplified=true")
if site == '2':
print()
print(Fore.WHITE+'|'+Fore.GREEN+'1'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Tümü'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'2'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Turkey'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' United State of America'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' GER'+Fore.WHITE+'|')
print()
Ulke = input(Fore.WHITE+'|'+Fore.GREEN+'Ülke Seçimini Yap'+Fore.WHITE+'| > ')
if Ulke == '1':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=5000&country=all&ssl=all&anonymity=all&simplified=true")
if Ulke == '2':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks4&timeout=5000&country=TR&simplified=true")
if Ulke == '3':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks4&timeout=5000&country=US&simplified=true")
if Ulke == '4':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks4&timeout=5000&country=ger&simplified=true")
if site == '3':
print()
print(Fore.WHITE+'|'+Fore.GREEN+'1'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Tümü'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'2'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' Turkey'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' United State of America'+Fore.WHITE+'|')
print(Fore.WHITE+'|'+Fore.GREEN+'3'+Fore.WHITE+'| > '+Fore.WHITE+'|'+Fore.GREEN+' GER'+Fore.WHITE+'|')
print()
Ulke = input(Fore.WHITE+'|'+Fore.GREEN+'Ülke Seçimini Yap'+Fore.WHITE+'| > ')
if Ulke == '1':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks5&timeout=5000&country=all&simplified=true")
if Ulke == '2':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks5&timeout=5000&country=TR&simplified=true")
if Ulke == '3':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks5&timeout=5000&country=US&simplified=true")
if Ulke == '4':
os.system("start https://api.proxyscrape.com/v2/?request=getproxies&protocol=socks5&timeout=5000&country=ger&simplified=true")
else:
print('Yanlış tuşlama yaptınız program kendisini 5 saniye sonra kapatacaktır!')
time.sleep(1)
print('1')
time.sleep(1)
print('2')
time.sleep(1)
print('3')
time.sleep(1)
print('4')
time.sleep(1)
print('5')
time.sleep(1)
print(Fore.RED+'PROGRAM KENDİNİ KAPATIYOR')
time.sleep(3)
#diabloakar
| 52.085714
| 163
| 0.577802
| 730
| 5,469
| 4.724658
| 0.149315
| 0.177443
| 0.184691
| 0.177443
| 0.764859
| 0.752102
| 0.71499
| 0.71499
| 0.71499
| 0.706292
| 0
| 0.025353
| 0.170598
| 5,469
| 104
| 164
| 52.586538
| 0.671076
| 0.005668
| 0
| 0.397727
| 0
| 0.136364
| 0.483399
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.056818
| 0
| 0.056818
| 0.352273
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78f0aae3eae31cc227a3c680c0f5bb1ebf194db1
| 115,967
|
py
|
Python
|
tests/unit_test_external.py
|
lionel42/pysd
|
5405b22f448ffff0945f1254a8a0a40ad3914402
|
[
"MIT"
] | null | null | null |
tests/unit_test_external.py
|
lionel42/pysd
|
5405b22f448ffff0945f1254a8a0a40ad3914402
|
[
"MIT"
] | null | null | null |
tests/unit_test_external.py
|
lionel42/pysd
|
5405b22f448ffff0945f1254a8a0a40ad3914402
|
[
"MIT"
] | null | null | null |
import sys
import unittest
import warnings
from pathlib import Path
from importlib.machinery import SourceFileLoader
import numpy as np
import xarray as xr
_root = Path(__file__).parent
_exp = SourceFileLoader(
'expected_data',
str(_root.joinpath('data/expected_data.py'))
).load_module()
class TestExcels(unittest.TestCase):
"""
Tests for Excels class
"""
def test_read_clean(self):
"""
Test for reading files with pandas
"""
from pysd.py_backend.external import Excels
file_name = _root.joinpath("data/input.xlsx")
sheet_name = "Vertical"
sheet_name2 = "Horizontal"
# reading a file
excel = Excels.read(file_name, sheet_name)
self.assertTrue(isinstance(excel, np.ndarray))
# check if it is in the dictionary
self.assertTrue(file_name.joinpath(sheet_name) in
list(Excels._Excels))
Excels.read(file_name, sheet_name2)
self.assertTrue(file_name.joinpath(sheet_name2) in
list(Excels._Excels))
# clean
Excels.clean()
self.assertEqual(list(Excels._Excels),
[])
def test_read_clean_opyxl(self):
"""
Test for reading files with openpyxl
"""
from pysd.py_backend.external import Excels
from openpyxl import Workbook
file_name = _root.joinpath("data/input.xlsx")
# reading a file
excel = Excels.read_opyxl(file_name)
self.assertTrue(isinstance(excel, Workbook))
# check if it is in the dictionary
self.assertEqual(list(Excels._Excels_opyxl),
[file_name])
Excels.read_opyxl(file_name)
self.assertEqual(list(Excels._Excels_opyxl),
[file_name])
# clean
Excels.clean()
self.assertEqual(list(Excels._Excels_opyxl),
[])
@unittest.skipIf(sys.platform.startswith("win"), "not working on Windows")
def test_close_file(self):
"""
Test for checking if excel files were closed
"""
from pysd.py_backend.external import Excels
import psutil
p = psutil.Process()
# number of files already open
n_files = len(p.open_files())
file_name = _root.joinpath("data/input.xlsx")
sheet_name = "Vertical"
sheet_name2 = "Horizontal"
# reading files
Excels.read(file_name, sheet_name)
Excels.read(file_name, sheet_name2)
Excels.read_opyxl(file_name)
self.assertGreater(len(p.open_files()), n_files)
# clean
Excels.clean()
self.assertEqual(len(p.open_files()), n_files)
class TestExternalMethods(unittest.TestCase):
"""
Test for simple methods of External
"""
def test_col_to_num(self):
"""
External._num_to_col and External._col_to_num test
"""
from pysd.py_backend.external import External
col_to_num = External._col_to_num
# Check col_to_num
self.assertEqual(col_to_num("A"), 0)
self.assertEqual(col_to_num("Z"), 25)
self.assertEqual(col_to_num("a"), col_to_num("B")-1)
self.assertEqual(col_to_num("Z"), col_to_num("aa")-1)
self.assertEqual(col_to_num("Zz"), col_to_num("AaA")-1)
def test_split_excel_cell(self):
"""
External._split_excel_cell test
"""
from pysd.py_backend.external import External
ext = External('external')
# No cells, function must return nothing
nocells = ["A2A", "H0", "0", "5A", "A_1", "ZZZZ1", "A"]
for nocell in nocells:
self.assertFalse(ext._split_excel_cell(nocell))
# Cells
cells = [(1, 0, "A2"), (573, 7, "h574"),
(1, 572, "Va2"), (1, 728, "ABA2")]
for row, col, cell in cells:
self.assertEqual((row, col), ext._split_excel_cell(cell))
def test_reshape(self):
"""
External._reshape test
"""
from pysd.py_backend.external import External
import pandas as pd
reshape = External._reshape
data1d = np.array([2, 3, 5, 6])
data2d = np.array([[2, 3, 5, 6],
[1, 7, 5, 8]])
series1d = pd.Series(data1d)
df2d = pd.DataFrame(data2d)
shapes1d = [(4,), (4, 1, 1), (1, 1, 4), (1, 4, 1)]
shapes2d = [(2, 4), (2, 4, 1), (1, 2, 4), (2, 1, 4)]
for shape_i in shapes1d:
self.assertEqual(reshape(data1d, shape_i).shape, shape_i)
self.assertEqual(reshape(series1d, shape_i).shape, shape_i)
for shape_i in shapes2d:
self.assertEqual(reshape(data2d, shape_i).shape, shape_i)
self.assertEqual(reshape(df2d, shape_i).shape, shape_i)
def test_series_selector(self):
"""
External._series_selector test
"""
from pysd.py_backend.external import External
ext = External('external')
# row selector
self.assertEqual(ext._series_selector("12", "A5"), "row")
# column selector
self.assertEqual(ext._series_selector("A", "a44"), "column")
self.assertEqual(ext._series_selector("A", "AC44"), "column")
self.assertEqual(ext._series_selector("A", "Bae2"), "column")
# name selector
self.assertEqual(ext._series_selector("Att", "a44b"), "name")
self.assertEqual(ext._series_selector("Adfs", "a0"), "name")
self.assertEqual(ext._series_selector("Ae_23", "aa_44"), "name")
self.assertEqual(ext._series_selector("Aeee3", "3a"), "name")
self.assertEqual(ext._series_selector("Aeee", "aajh2"), "name")
def test_fill_missing(self):
from pysd.py_backend.external import External
# simple casses are tested with 1 dimensional data
# 1 and 2 dimensional data is tested with test-models
ext = External("external")
series = np.arange(12)
data = np.array([np.nan, np.nan, 1., 3., np.nan, 4.,
np.nan, np.nan, 7., 8., np.nan, np.nan])
hold_back = np.array([1., 1., 1., 3., 3., 4.,
4., 4., 7., 8., 8., 8.])
look_for = np.array([1., 1., 1., 3., 4., 4.,
7., 7., 7., 8., 8., 8.])
interp = np.array([1., 1., 1., 3., 3.5, 4.,
5., 6., 7., 8., 8., 8.])
ext.interp = "hold backward"
datac = data.copy()
ext._fill_missing(series, datac)
self.assertTrue(np.all(hold_back == datac))
ext.interp = "look forward"
datac = data.copy()
ext._fill_missing(series, datac)
self.assertTrue(np.all(look_for == datac))
ext.interp = "interpolate"
datac = data.copy()
ext._fill_missing(series, datac)
self.assertTrue(np.all(interp == datac))
def test_resolve_file(self):
"""
External._resolve_file
"""
from pysd.py_backend.external import External
root = Path(__file__).parent
ext = External('external')
ext.file = 'data/input.xlsx'
ext._resolve_file(root=root)
self.assertEqual(ext.file, root.joinpath('data/input.xlsx'))
root = root.joinpath('data')
ext.file = 'input.xlsx'
ext._resolve_file(root=root)
self.assertEqual(ext.file, root.joinpath('input.xlsx'))
ext.file = 'input2.xlsx'
with self.assertRaises(FileNotFoundError) as err:
ext._resolve_file(root=root)
self.assertIn(
"File '%s' not found." % root.joinpath('input2.xlsx'),
str(err.exception))
# TODO in the future we may add an option to include indirect
# references with ?. By the moment an error is raised
ext.file = '?input.xlsx'
with self.assertRaises(ValueError) as err:
ext._resolve_file(root=root)
self.assertIn(
"Indirect reference to file: ?input.xlsx",
str(err.exception))
class TestData(unittest.TestCase):
"""
Test for the full working procedure of ExtData
class when the data is properly given in the Excel file
For 1D data all cases are computed.
For 2D, 3D only some cases are computed as the complete set of
test will cover all the possibilities.
"""
# The first two test are for length 0 series and only the retrieved data is
# calculated as the interpolation result will be constant
def test_data_interp_h1d_1(self):
"""
ExtData test for 1d horizontal series interpolation with len 1
"""
import pysd
# test as well no file extension
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "16"
cell = "B17"
coords = {}
interp = None
py_name = "test_data_interp_h1d_1"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
# test the __str__ method
print(data)
expected = xr.DataArray([5], {'time': [4]}, ['time'])
self.assertTrue(data.data.equals(expected))
def test_data_interp_hn1d_1(self):
"""
ExtData test for 1d horizontal series interpolation with len 1
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time_1"
cell = "data_1"
coords = {}
interp = None
py_name = "test_data_interp_h1d_1"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
expected = xr.DataArray([5], {'time': [4]}, ['time'])
self.assertTrue(data.data.equals(expected))
def test_data_interp_h1d(self):
"""
ExtData test for 1d horizontal series interpolation
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_h1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_interp_v1d(self):
"""
ExtData test for 1d vertical series interpolation
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_v1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_interp_hn1d(self):
"""
ExtData test for 1d horizontal series interpolation by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_h1nd"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_interp_vn1d(self):
"""
ExtData test for 1d vertical series interpolation by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_vn1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_forward_h1d(self):
"""
ExtData test for 1d horizontal series look forward
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = "look forward"
py_name = "test_data_forward_h1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.forward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_forward_v1d(self):
"""
ExtData test for 1d vertical series look forward
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = "look forward"
py_name = "test_data_forward_v1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.forward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_forward_hn1d(self):
"""
ExtData test for 1d horizontal series look forward by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = "look forward"
py_name = "test_data_forward_hn1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.forward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_forward_vn1d(self):
"""
ExtData test for 1d vertical series look forward by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = "look forward"
py_name = "test_data_forward_vn1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.forward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_backward_h1d(self):
"""
ExtData test for 1d horizontal series hold backward
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = "hold backward"
py_name = "test_data_backward_h1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.backward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_backward_v1d(self):
"""
ExtData test for 1d vertical series hold backward by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = "hold backward"
py_name = "test_data_backward_v1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.backward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_backward_hn1d(self):
"""
ExtData test for 1d horizontal series hold backward by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = "hold backward"
py_name = "test_data_backward_hn1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.backward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_backward_vn1d(self):
"""
ExtData test for 1d vertical series hold backward by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = "hold backward"
py_name = "test_data_backward_vn1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.backward_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_data_interp_vn2d(self):
"""
ExtData test for 2d vertical series interpolation by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "time"
cell = "data_2d"
coords = {'ABC': ['A', 'B', 'C']}
interp = None
py_name = "test_data_interp_vn2d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_2d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_data_forward_hn2d(self):
"""
ExtData test for 2d vertical series look forward by cell range names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell = "data_2d"
coords = {'ABC': ['A', 'B', 'C']}
interp = "look forward"
py_name = "test_data_forward_hn2d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.forward_2d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_data_backward_v2d(self):
"""
ExtData test for 2d vertical series hold backward
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "B"
cell = "C5"
coords = {'ABC': ['A', 'B', 'C']}
interp = "hold backward"
py_name = "test_data_backward_v2d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.backward_2d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_data_interp_h3d(self):
"""
ExtData test for 3d horizontal series interpolation
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell_1 = "C5"
cell_2 = "C8"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
interp = None
py_name = "test_data_interp_h3d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
interp=interp,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
coords=coords_2,
interp=interp)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_3d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_data_forward_v3d(self):
"""
ExtData test for 3d vertical series look forward
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
time_row_or_col = "B"
cell_1 = "C5"
cell_2 = "F5"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
interp = "look forward"
py_name = "test_data_forward_v3d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
interp=interp,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
coords=coords_2,
interp=interp)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.forward_3d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_data_backward_hn3d(self):
"""
ExtData test for 3d horizontal series hold backward by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
interp = "hold backward"
py_name = "test_data_backward_hn3d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
interp=interp,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
coords=coords_2,
interp=interp)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.backward_3d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_data_raw_h1d(self):
"""
ExtData test for 1d horizontal series raw
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = "raw"
py_name = "test_data_forward_h1d"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
for x, y in zip(_exp.xpts, _exp.raw_1d):
data_x = data(x)
if np.isnan(y):
equal = np.isnan(data_x)
else:
equal = y == data_x
self.assertTrue(equal, "Wrong result at X=" + str(x))
class TestLookup(unittest.TestCase):
"""
Test for the full working procedure of ExtLookup
class when the data is properly given in the Excel file
For 1D data for all cases are computed.
For 2D, 3D only some cases are computed as the complete set of
test will cover all the possibilities.
"""
def test_lookup_h1d(self):
"""
ExtLookup test for 1d horizontal series
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
x_row_or_col = "4"
cell = "C5"
coords = {}
py_name = "test_lookup_h1d"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_lookup_v1d(self):
"""
ExtLookup test for 1d vertical series
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
x_row_or_col = "B"
cell = "C5"
coords = {}
py_name = "test_lookup_v1d"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_lookup_hn1d(self):
"""
ExtLookup test for 1d horizontal series by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
x_row_or_col = "time"
cell = "data_1d"
coords = {}
py_name = "test_lookup_h1nd"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_lookup_vn1d(self):
"""
ExtLookup test for 1d vertical series by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
x_row_or_col = "time"
cell = "data_1d"
coords = {}
py_name = "test_lookup_vn1d"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
def test_lookup_h2d(self):
"""
ExtLookup test for 2d horizontal series
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
x_row_or_col = "4"
cell = "C5"
coords = {'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_h2d"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_2d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_lookup_vn3d(self):
"""
ExtLookup test for 3d vertical series by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
x_row_or_col = "time"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_vn3d"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_3d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
def test_lookup_vn3d_shape0(self):
"""
ExtLookup test for 3d vertical series by cellrange names
passing shape 0 xarray as argument
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
x_row_or_col = "time"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_vn3d_shape0"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
data.initialize()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for x, y in zip(_exp.xpts, _exp.interp_3d):
self.assertTrue(y.equals(data(xr.DataArray(x))),
"Wrong result at X=" + str(x))
def test_lookup_vn2d_xarray(self):
"""
ExtLookup test for 2d vertical series by cellrange names
using xarray for interpolation
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
x_row_or_col = "time"
cell_1 = "data_2d"
coords_1 = {'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_vn2d_xarray"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.initialize()
all_smaller = xr.DataArray([-1, -10], {'XY': ['X', 'Y']}, ['XY'])
all_bigger = xr.DataArray([9, 20, 30], {'ABC': ['A', 'B', 'C']},
['ABC'])
all_inside = xr.DataArray([3.5, 5.5], {'XY': ['X', 'Y']}, ['XY'])
mixed = xr.DataArray([1.5, 20, -30], {'ABC': ['A', 'B', 'C']}, ['ABC'])
full = xr.DataArray([[1.5, -30], [-10, 2.5], [4., 5.]],
{'ABC': ['A', 'B', 'C'], 'XY': ['X', 'Y']},
['ABC', 'XY'])
all_smaller_out = data.data[0].reset_coords('lookup_dim', drop=True)\
+ 0*all_smaller
all_bigger_out = data.data[-1].reset_coords('lookup_dim', drop=True)
all_inside_out = xr.DataArray([[0.5, -1],
[-1, -0.5],
[-0.75, 0]],
{'ABC': ['A', 'B', 'C'],
'XY': ['X', 'Y']},
['ABC', 'XY'])
mixed_out = xr.DataArray([0.5, 0, 1],
{'ABC': ['A', 'B', 'C']},
['ABC'])
full_out = xr.DataArray([[0.5, 0],
[0, 0],
[-0.5, 0]],
{'ABC': ['A', 'B', 'C'], 'XY': ['X', 'Y']},
['ABC', 'XY'])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assertTrue(data(all_smaller).equals(all_smaller_out))
self.assertTrue(data(all_bigger).equals(all_bigger_out))
self.assertTrue(data(all_inside).equals(all_inside_out))
self.assertTrue(data(mixed).equals(mixed_out))
self.assertTrue(data(full).equals(full_out))
def test_lookup_vn3d_xarray(self):
"""
ExtLookup test for 3d vertical series by cellrange names
using xarray for interpolation
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
x_row_or_col = "time"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_vn3d_xarray"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
data.initialize()
all_smaller = xr.DataArray([-1, -10], {'XY': ['X', 'Y']}, ['XY'])
all_bigger = xr.DataArray([9, 20, 30], {'ABC': ['A', 'B', 'C']},
['ABC'])
all_inside = xr.DataArray([3.5, 7.5], {'XY': ['X', 'Y']}, ['XY'])
mixed = xr.DataArray([1.5, 20, -30], {'ABC': ['A', 'B', 'C']}, ['ABC'])
full = xr.DataArray([[1.5, -30], [-10, 2.5], [4., 5.]],
{'ABC': ['A', 'B', 'C'], 'XY': ['X', 'Y']},
['ABC', 'XY'])
all_smaller_out = data.data[0].reset_coords('lookup_dim', drop=True)
all_bigger_out = data.data[-1].reset_coords('lookup_dim', drop=True)
all_inside_out = xr.DataArray([[0.5, -1, -0.75],
[0.5, 1, 0]],
{'XY': ['X', 'Y'],
'ABC': ['A', 'B', 'C']},
['XY', 'ABC'])
mixed_out = xr.DataArray([[0.5, 0, 1],
[-1, 1, -1]],
{'XY': ['X', 'Y'], 'ABC': ['A', 'B', 'C']},
['XY', 'ABC'])
full_out = xr.DataArray([[0.5, 0, -0.5],
[1, 0, 0]],
{'XY': ['X', 'Y'], 'ABC': ['A', 'B', 'C']},
['XY', 'ABC'])
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assertTrue(data(all_smaller).equals(all_smaller_out))
self.assertTrue(data(all_bigger).equals(all_bigger_out))
self.assertTrue(data(all_inside).equals(all_inside_out))
self.assertTrue(data(mixed).equals(mixed_out))
self.assertTrue(data(full).equals(full_out))
class TestConstant(unittest.TestCase):
"""
Test for the full working procedure of ExtConstant
class when the data is properly given in the Excel file
For 1D, 2D and 3D all cases are computed.
"""
def test_constant_0d(self):
"""
ExtConstant test for 0d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "F7"
cell2 = "C5"
coords = {}
py_name = "test_constant_0d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data2 = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell2,
coords=coords,
py_name=py_name)
data.initialize()
data2.initialize()
self.assertEqual(data(), -1)
self.assertEqual(data2(), 0)
def test_constant_n0d(self):
"""
ExtConstant test for 0d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "constant"
cell2 = "constant2"
coords = {}
py_name = "test_constant_0d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data2 = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell2,
coords=coords,
py_name=py_name)
data.initialize()
data2.initialize()
self.assertEqual(data(), -1)
self.assertEqual(data2(), 0)
def test_constant_h1d(self):
"""
ExtConstant test for horizontal 1d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "C5"
coords = {'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h1d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_1d))
def test_constant_v1d(self):
"""
ExtConstant test for vertical 1d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
cell = "C5*"
coords = {'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_v1d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_1d))
def test_constant_hn1d(self):
"""
ExtConstant test for horizontal 1d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "data_1d"
coords = {'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_hn1d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_1d))
def test_constant_vn1d(self):
"""
ExtConstant test for vertical 1d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
cell = "data_1d*"
coords = {'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_vn1d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_1d))
def test_constant_h2d(self):
"""
ExtConstant test for horizontal 2d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "C5"
coords = {'ABC': ['A', 'B', 'C'], 'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h2d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_2d))
def test_constant_v2d(self):
"""
ExtConstant test for vertical 2d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
cell = "C5*"
coords = {'ABC': ['A', 'B', 'C'], 'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_v2d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_2d))
def test_constant_hn2d(self):
"""
ExtConstant test for horizontal 2d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "data_2d"
coords = {'ABC': ['A', 'B', 'C'], 'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_hn2d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_2d))
def test_constant_vn2d(self):
"""
ExtConstant test for vertical 2d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
cell = "data_2d*"
coords = {'ABC': ['A', 'B', 'C'], 'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_vn2d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(data().equals(_exp.constant_2d))
def test_constant_h3d(self):
"""
ExtConstant test for horizontal 3d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "C5"
cell2 = "C8"
coords = {'ABC': ['A', 'B', 'C'],
'XY': ['X'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords2 = {'ABC': ['A', 'B', 'C'],
'XY': ['Y'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h3d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell2,
coords=coords2)
data.initialize()
self.assertTrue(data().equals(_exp.constant_3d))
def test_constant_v3d(self):
"""
ExtConstant test for vertical 3d data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
cell = "C5*"
cell2 = "F5*"
coords = {'ABC': ['A', 'B', 'C'],
'XY': ['X'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords2 = {'ABC': ['A', 'B', 'C'],
'XY': ['Y'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_v3d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell2,
coords=coords2)
data.initialize()
self.assertTrue(data().equals(_exp.constant_3d))
def test_constant_hn3d(self):
"""
ExtConstant test for horizontal 3d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "data_2d"
cell2 = "data_2db"
coords = {'ABC': ['A', 'B', 'C'],
'XY': ['X'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords2 = {'ABC': ['A', 'B', 'C'],
'XY': ['Y'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_hn3d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell2,
coords=coords2)
data.initialize()
self.assertTrue(data().equals(_exp.constant_3d))
def test_constant_vn3d(self):
"""
ExtConstant test for vertical 3d data by cellrange names
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical"
cell = "data_2d*"
cell2 = "data_2db*"
coords = {'ABC': ['A', 'B', 'C'],
'XY': ['X'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords2 = {'ABC': ['A', 'B', 'C'],
'XY': ['Y'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_vn2d"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell2,
coords=coords2)
data.initialize()
self.assertTrue(data().equals(_exp.constant_3d))
class TestSubscript(unittest.TestCase):
"""
Test for the full working procedure of ExtSubscript
class when the data is properly given in the Excel file
"""
def test_subscript_h(self):
"""
ExtSubscript test for horizontal subscripts
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
firstcell = "C4"
lastcell = "J4"
prefix = 'val'
expected = ['val0', 'val1', 'val2', 'val3',
'val5', 'val6', 'val7', 'val8']
data = pysd.external.ExtSubscript(file_name=file_name,
sheet=sheet,
root=_root,
firstcell=firstcell,
lastcell=lastcell,
prefix=prefix)
self.assertTrue(data.subscript, expected)
def test_subscript_v(self):
"""
ExtSubscript test for vertical subscripts
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
firstcell = "B5"
lastcell = "B7"
prefix = ''
expected = ['A', 'B', 'C']
data = pysd.external.ExtSubscript(file_name=file_name,
sheet=sheet,
root=_root,
firstcell=firstcell,
lastcell=lastcell,
prefix=prefix)
self.assertTrue(data.subscript, expected)
class TestWarningsErrors(unittest.TestCase):
"""
Test for the warnings and errors of External and its subclasses
"""
def test_not_implemented_file(self):
"""
Test for not implemented file
"""
import pysd
file_name = "data/not_implemented_file.ods"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = None
py_name = "test_not_implemented_file"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(NotImplementedError):
data.initialize()
def test_non_existent_file(self):
"""
Test for non-existent file
"""
import pysd
file_name = "data/non_existent.xls"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = None
py_name = "test_non_existent_file"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(FileNotFoundError):
data.initialize()
def test_non_existent_sheet_pyxl(self):
"""
Test for non-existent sheet with openpyxl
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Non-Existent"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_non_existent_sheet_pyxl"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_non_existent_cellrange_name_pyxl(self):
"""
Test for non-existent cellrange name with openpyxl
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell = "non_exixtent"
coords = {}
interp = None
py_name = "est_non_existent_cellrange_name_pyxl"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(AttributeError):
data.initialize()
def test_non_existent_cellrange_name_in_sheet_pyxl(self):
"""
Test for non-existent cellrange name with openpyxl
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
cell = "constant"
coords = {}
py_name = "est_non_existent_cellrange_name_in_sheet_pyxl"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
with self.assertRaises(AttributeError):
data.initialize()
# Following test are for ExtData class only
# as the initialization of ExtLookup uses the same function
def test_data_interp_h1dm_row(self):
"""
Test for warning 1d horizontal series interpolation when series
has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "time_missing"
cell = "len_0"
coords = {}
interp = None
py_name = "test_data_interp_h1dm_row"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertTrue("Not able to interpolate" in str(wu[-1].message))
self.assertTrue(all(np.isnan(data.data.values)))
def test_data_interp_h1dm_row2(self):
"""
Test for warning 1d horizontal series interpolation when series
has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "4"
cell = "C9"
coords = {"dim": ["B", "C", "D"]}
interp = None
py_name = "test_data_interp_h1dm_row2"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertTrue("Not able to interpolate" in str(wu[-1].message))
self.assertFalse(any(np.isnan(data.data.loc[:, "B"].values)))
self.assertFalse(any(np.isnan(data.data.loc[:, "C"].values)))
self.assertTrue(all(np.isnan(data.data.loc[:, "D"].values)))
def test_data_interp_h1dm(self):
"""
Test for warning 1d horizontal series interpolation when series
has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_h1dm"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 1)
self.assertIn("missing", str(wu[0].message))
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_h1dm_ignore(self):
"""
Test ignore warning 1d horizontal series interpolation when series
has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_h1dm_ignore"
pysd.external.External.missing = "ignore"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 0)
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_h1dm_raise(self):
"""
Test error 1d horizontal series interpolation when series
has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "4"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_h1dm_ignore"
pysd.external.External.missing = "raise"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_v1dm(self):
"""
Test for warning 1d vertical series interpolation when series has
missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical missing"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_v1dm"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 1)
self.assertTrue("missing" in str(wu[0].message))
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_v1dm_ignore(self):
"""
Test ignore warning 1d vertical series interpolation when series has
missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical missing"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_v1dm_ignore"
pysd.external.External.missing = "ignore"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 0)
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_v1dm_raise(self):
"""
Test error 1d vertical series interpolation when series has
missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical missing"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_v1dm_ignore"
pysd.external.External.missing = "raise"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_hn1dm(self):
"""
Test for warning 1d horizontal series by cellrange names
when series has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "time_missing"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_h1dm"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 1)
self.assertTrue("missing" in str(wu[0].message))
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_hn1dm_ignore(self):
"""
Test ignore warning 1d horizontal series by cellrange names
when series has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "time_missing"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_h1dm_ignore"
pysd.external.External.missing = "ignore"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 0)
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_1d):
self.assertEqual(y, data(x), "Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_hn1dm_raise(self):
"""
Test for error 1d horizontal series by cellrange names
when series has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "time_missing"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_h1dm_raise"
pysd.external.External.missing = "raise"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_hn3dmd(self):
"""
Test for warning 3d horizontal series interpolation by cellrange names
with missing data values. More cases are tested with test-models
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "time"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
interp = "interpolate"
py_name = "test_data_interp_hn3dmd"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
interp=interp,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
interp=interp,
coords=coords_2)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue(np.all(
["missing" in str(w.message) for w in wu]
))
self.assertTrue(np.all(
["will be filled" in str(w.message) for w in wu]
))
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_3d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the time" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the time" in str(wu[1].message))
def test_data_interp_hn3dmd_raw(self):
"""
Test for warning 1d horizontal series interpolation when series
has missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "time"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
interp = "raw"
py_name = "test_data_interp_hn3dmd_raw"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
interp=interp,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
interp=interp,
coords=coords_2)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue(np.all(
["missing" in str(w.message) for w in wu]
))
self.assertTrue(np.all(
["will be filled" not in str(w.message) for w in wu]
))
def test_lookup_hn3dmd_raise(self):
"""
Test for error 3d horizontal series interpolation with missing data
values.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
x_row_or_col = "15"
cell_1 = "C16"
cell_2 = "C19"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_hn3dmd_raise"
pysd.external.External.missing = "raise"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
with self.assertRaises(ValueError):
data.initialize()
def test_lookup_hn3dmd_ignore(self):
"""
Test for ignore warnings 3d horizontal series interpolation with
missing data values.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
x_row_or_col = "15"
cell_1 = "C16"
cell_2 = "C19"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_hn3dmd_ignore"
pysd.external.External.missing = "ignore"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 0)
with warnings.catch_warnings(record=True) as ws:
for x, y in zip(_exp.xpts, _exp.interp_3d):
self.assertTrue(y.equals(data(x)),
"Wrong result at X=" + str(x))
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue("extrapolating data below the minimum value"
+ " of the series" in str(wu[0].message))
self.assertTrue("extrapolating data above the maximum value"
+ " of the series" in str(wu[1].message))
def test_constant_h3dm(self):
"""
Test for warning in 3d horizontal series interpolation with missing
values.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
cell_1 = "C16"
cell_2 = "C19"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h3dm"
pysd.external.External.missing = "warning"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell_2,
coords=coords_2)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 2)
self.assertTrue(np.all(
["missing" in str(w.message) for w in wu]
))
def test_constant_h3dm_ignore(self):
"""
Test for ignore in 3d horizontal series interpolation with missing
values.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
cell_1 = "C16"
cell_2 = "C19"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h3dm_ignore"
pysd.external.External.missing = "ignore"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell_2,
coords=coords_2)
with warnings.catch_warnings(record=True) as ws:
data.initialize()
# use only user warnings
wu = [w for w in ws if issubclass(w.category, UserWarning)]
self.assertEqual(len(wu), 0)
def test_constant_h3dm_raise(self):
"""
Test for error 3d horizontal constants with missing values.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
cell_1 = "C16"
cell_2 = "C19"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h3dm_raise"
pysd.external.External.missing = "raise"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell_2,
coords=coords_2)
with self.assertRaises(ValueError):
data.initialize()
def test_constant_hn3dm_raise(self):
"""
Test for error 3d horizontal constants with missing values by cellrange
name.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_hn3dm_raise"
pysd.external.External.missing = "raise"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell_2,
coords=coords_2)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_h1d0(self):
"""
Test for error 1d horizontal series for len 0 series
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "3"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_h1d0"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_v1d0(self):
"""
Test for error 1d vertical series for len 0 series
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical missing"
time_row_or_col = "A"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_v1d0"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_hn1d0(self):
"""
Test for error in series by cellrange names
when series has length 0
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
time_row_or_col = "len_0"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_h1d0"
pysd.external.External.missing = "warning"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_hn1dt(self):
"""
Test for error in series by cellrange names
when series is a sheetle
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "data_2d"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_h1dt"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_hns(self):
"""
Test for error in data when it doen't have the same
shape as the given coordinates
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "time"
cell = "data_2d"
coords = {'ABC': ['A', 'B']}
interp = None
py_name = "test_data_interp_hns"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def test_data_interp_vnss(self):
"""
Test for error in data when it doen't have the same
shape in the first dimension as the length of series
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical missing"
time_row_or_col = "time_short"
cell = "data_2d_short"
coords = {'ABC': ['A', 'B', 'C']}
interp = None
py_name = "test_data_interp_vnss"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
# Following test are independent of the reading option
def test_data_interp_hnnwd(self):
"""
Test for error in series when the series is not
well defined
"""
import pysd
file_name = "data/input.xlsx"
sheet = "No monotonous"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_hnnwd"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError) as err:
data.initialize()
self.assertIn("has repeated values", str(err.exception))
def test_data_raw_hnnm(self):
"""
Test for error in series when the series is not monotonous
"""
import pysd
file_name = "data/input.xlsx"
sheet = "No monotonous"
time_row_or_col = "10"
cell = "C12"
coords = {}
interp = None
py_name = "test_data_interp_hnnm"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
expected = {-1: 2, 0: 2, 1: 2, 2: 3,
3: -1, 4: -1, 5: 1, 6: 1,
7: 0, 8: 0, 9: 0}
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for i in range(-1, 9):
self.assertEqual(data(i), expected[i])
time_row_or_col = "11"
py_name = "test_data_interp_hnnnm2"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
expected = {-1: 0, 0: 0, 1: 0, 2: 1,
3: 2, 4: 3, 5: -1, 6: -1,
7: 1, 8: 2, 9: 2}
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for i in range(-1, 9):
self.assertEqual(data(i), expected[i])
def test_data_h3d_interpnv(self):
"""
ExtData test for error when the interpolation method is not valid
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell = "C5"
coords = {'ABC': ['A', 'B', 'C'], 'XY': ['X']}
interp = "hold forward"
py_name = "test_data_h3d_interpnv"
with self.assertRaises(ValueError):
pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
def test_data_h3d_interp(self):
"""
ExtData test for error when the interpolation method is different
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell_1 = "C5"
cell_2 = "C8"
coords_1 = {'ABC': ['A', 'B', 'C'], 'XY': ['X']}
coords_2 = {'ABC': ['A', 'B', 'C'], 'XY': ['Y']}
interp = None
interp2 = "look forward"
py_name = "test_data_h3d_interp"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
coords=coords_2,
interp=interp2)
def test_data_h3d_add(self):
"""
ExtData test for error when add doesn't have the same dim
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
time_row_or_col = "4"
cell_1 = "C5"
cell_2 = "C8"
coords_1 = {'ABC': ['A', 'B', 'C'], 'XY': ['X']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
interp = None
py_name = "test_data_h3d_add"
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
interp=interp,
py_name=py_name)
with self.assertRaises(ValueError):
data.add(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
cell=cell_2,
coords=coords_2,
interp=interp)
def test_lookup_h3d_add(self):
"""
ExtLookup test for error when add doesn't have the same dim
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
x_row_or_col = "4"
cell_1 = "C5"
cell_2 = "C8"
coords_1 = {'ABC': ['A', 'B', 'C'], 'XY': ['X']}
coords_2 = {'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_h3d_add"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
with self.assertRaises(ValueError):
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
def test_constant_h3d_add(self):
"""
ExtConstant test for error when add doesn't have the same dim
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "C5"
cell2 = "C8"
coords = {'XY': ['X'],
'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords2 = {'XY': ['Y'],
'ABC': ['A', 'B', 'C'],
'val2': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_h3d_add"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
with self.assertRaises(ValueError):
data.add(file_name=file_name,
sheet=sheet,
cell=cell2,
coords=coords2)
def test_constant_hns(self):
"""
Test for error in data when it doen't have the same
shape as the given coordinates
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal"
cell = "data_2d"
coords = {'ABC': ['A', 'B']}
py_name = "test_constant_hns"
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
with self.assertRaises(ValueError):
data.initialize()
def text_openpyxl_str(self):
"""
Test for reading data with strings with openpyxl
"""
import pysd
pysd.external.External.missing = "keep"
file_name = "data/input.xlsx"
sheet = "CASE AND NON V" # test case insensitivity
cell = "series"
x_row_or_col = "unit"
coords = {}
py_name = "test_openpyxl_str"
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
expected = xr.DataArray(
[np.nan, 1, 2, 3, 4, 5],
{'lookup_dim': [10., 11., 12., 13., 14., 15.]},
['lookup_dim'])
data.initialize()
self.assertTrue(data.data.equals(expected))
cell = "no_constant"
sheet = "caSE anD NON V" # test case insensitivity
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
data.initialize()
self.assertTrue(np.isnan(data.data))
class DownwardCompatibility(unittest.TestCase):
"""
These tests are defined to make the external objects compatible
with SDQC library. If any change in PySD breaks these tests it
should be checked with SDQC library and correct it.
"""
def test_constant_hn3dm_keep(self):
"""
Test for keep 3d horizontal constants with missing values by cellrange
name.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
cell_1 = "data_2d"
cell_2 = "data_2db"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]}
py_name = "test_constant_hn3dm_raise"
pysd.external.External.missing = "keep"
expected = xr.DataArray(
[[[0, 0, 1, 1, -1, -1, 0, np.nan],
[0, 1, 1, -1, -1, 0, np.nan, np.nan],
[np.nan, 1, -1, -1, 0, np.nan, np.nan, 0]],
[[1, -1, -1, 0, 0, 0, 0, 1],
[-1, -1., 0, np.nan, 0, 0, 1, np.nan],
[-1, 0, np.nan, np.nan, 0, 1, 1, -1]]],
{'XY': ['X', 'Y'], 'ABC': ['A', 'B', 'C'],
'val': [0, 1, 2, 3, 5, 6, 7, 8]},
['XY', 'ABC', 'val'])
data = pysd.external.ExtConstant(file_name=file_name,
sheet=sheet,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
cell=cell_2,
coords=coords_2)
data.initialize()
self.assertTrue(data().equals(expected))
def test_lookup_hn3dmd_keep(self):
"""
Test for keep 3d horizontal series interpolation with
missing data values.
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Horizontal missing"
x_row_or_col = "15"
cell_1 = "C16"
cell_2 = "C19"
coords_1 = {'XY': ['X'], 'ABC': ['A', 'B', 'C']}
coords_2 = {'XY': ['Y'], 'ABC': ['A', 'B', 'C']}
py_name = "test_lookup_hn3dmd_ignore"
pysd.external.External.missing = "keep"
expected = xr.DataArray(
[[[0, 0, np.nan],
[1, -1, -1]],
[[0, 1, 1],
[-1, -1, 0]],
[[1, 1, -1],
[-1, 0, np.nan]],
[[1, -1, -1],
[0., np.nan, np.nan]],
[[-1, -1, 0],
[0, 0, 0]],
[[-1, 0, np.nan],
[0, 0, 1]],
[[0, np.nan, np.nan],
[0, 1, 1]],
[[np.nan, np.nan, 0],
[1, np.nan, -1]]],
{'XY': ['X', 'Y'], 'ABC': ['A', 'B', 'C'],
'lookup_dim': [0., 1., 2., 3., 5., 6., 7., 8.]},
['lookup_dim', 'XY', 'ABC'])
data = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
root=_root,
cell=cell_1,
coords=coords_1,
py_name=py_name)
data.add(file_name=file_name,
sheet=sheet,
x_row_or_col=x_row_or_col,
cell=cell_2,
coords=coords_2)
data.initialize()
self.assertTrue(data.data.equals(expected))
def test_data_interp_v1dm_keep(self):
"""
Test keep 1d vertical series interpolation when series has
missing or NaN data
"""
import pysd
file_name = "data/input.xlsx"
sheet = "Vertical missing"
time_row_or_col = "B"
cell = "C5"
coords = {}
interp = None
py_name = "test_data_interp_v1dm_ignore"
pysd.external.External.missing = "keep"
expected = xr.DataArray(
[0, 0, 1, 1, 3, -1, -1, 0, 0],
{'time': [0., 1., 2., 3., np.nan, 5., 6., 7., 8.]},
['time'])
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
self.assertTrue(data.data.equals(expected))
def test_data_interp_hnnm_keep(self):
"""
Test for keep in series when the series is not
strictly monotonous
"""
import pysd
file_name = "data/input.xlsx"
sheet = "No monotonous"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_hnnm"
pysd.external.External.missing = "keep"
expected = xr.DataArray(
[0, 0, 1, 1, -1, -1, 0, 0],
{'time': [0., 1., 2., 7., 5., 6., 7., 8.]},
['time'])
data = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
data.initialize()
self.assertTrue(data.data.equals(expected))
def test_lookup_data_attr(self):
"""
Test for keep in series when the series is not
strictly monotonous
"""
import pysd
file_name = "data/input.xlsx"
sheet = "No monotonous"
time_row_or_col = "time"
cell = "data_1d"
coords = {}
interp = None
py_name = "test_data_interp_hnnm"
datD = pysd.external.ExtData(file_name=file_name,
sheet=sheet,
time_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
interp=interp,
py_name=py_name)
datL = pysd.external.ExtLookup(file_name=file_name,
sheet=sheet,
x_row_or_col=time_row_or_col,
root=_root,
cell=cell,
coords=coords,
py_name=py_name)
datD.initialize()
datL.initialize()
self.assertTrue(hasattr(datD, 'time_row_or_cols'))
self.assertTrue(hasattr(datL, 'x_row_or_cols'))
| 35.248328
| 79
| 0.454379
| 11,919
| 115,967
| 4.205722
| 0.037755
| 0.05458
| 0.036866
| 0.041653
| 0.903687
| 0.89076
| 0.863869
| 0.836339
| 0.813079
| 0.799394
| 0
| 0.021445
| 0.447507
| 115,967
| 3,289
| 80
| 35.259045
| 0.760937
| 0.072503
| 0
| 0.823504
| 0
| 0
| 0.083688
| 0.01408
| 0
| 0
| 0
| 0.000304
| 0.079915
| 1
| 0.042308
| false
| 0
| 0.046581
| 0
| 0.092308
| 0.000427
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60818a6a802685c76e40ef6e81c2fc498e468a70
| 23,991
|
py
|
Python
|
models/BERT/bert.py
|
hansheng0512/LateTemporalModeling3DCNN
|
71c1d3fae9781c55059f0518e0b39781a535e153
|
[
"MIT"
] | 144
|
2020-08-06T02:18:49.000Z
|
2022-03-16T23:03:56.000Z
|
models/BERT/bert.py
|
hansheng0512/LateTemporalModeling3DCNN
|
71c1d3fae9781c55059f0518e0b39781a535e153
|
[
"MIT"
] | 26
|
2020-08-12T01:07:48.000Z
|
2022-01-11T16:28:08.000Z
|
models/BERT/bert.py
|
hansheng0512/LateTemporalModeling3DCNN
|
71c1d3fae9781c55059f0518e0b39781a535e153
|
[
"MIT"
] | 55
|
2020-08-13T03:00:17.000Z
|
2022-03-28T06:38:08.000Z
|
import torch.nn as nn
import torch
from .transformer import TransformerBlock, TransformerBlock2
from .embedding import BERTEmbedding, BERTEmbedding2, BERTEmbedding3, BERTEmbedding4
class BERT(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
self.clsToken= nn.Parameter(clsToken)
torch.nn.init.normal_(self.clsToken, std = hidden ** -0.5)
self.a_2 = nn.Parameter(torch.ones_like(self.clsToken))
self.b_2 = nn.Parameter(torch.zeros_like(self.clsToken))
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding4(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
for module in self.modules():
if isinstance(module, nn.Linear):
#nn.init.xavier_normal_(module.weight)
nn.init.normal_(module.weight, mean=0, std = 0.02)
if hasattr(module, "bias") and module.bias is not None:
nn.init.constant_(module.bias, 0.0)
# nn.init.xavier_normal_(self.transformer_blocks[0].feed_forward.w_2.weight, gain = 1/(0.425) ** 0.5)
# nn.init.xavier_normal_(self.transformer_blocks[0].feed_forward.w_1.weight, gain = 1)
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
clstoken_scales = self.clsToken * self.a_2 + self.b_2
x = torch.cat((clstoken_scales.repeat(batch_size,1,1),input_vectors),1)
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT2(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
self.clsToken= nn.Parameter(clsToken)
torch.nn.init.normal_(self.clsToken, std = hidden ** -0.5)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding3(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
for module in self.modules():
if isinstance(module, nn.Embedding):
#nn.init.normal_(module.weight, mean=0, std=0.02)
nn.init.uniform_(module.weight, -0.06, 0.06)
if isinstance(module, nn.Linear):
nn.init.normal_(module.weight, mean=0, std=0.02)
if hasattr(module, "bias") and module.bias is not None:
nn.init.constant_(module.bias, 0.0)
if isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
x = torch.cat((self.clsToken.repeat(batch_size,1,1),input_vectors),1)
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT3(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
self.clsToken= nn.Parameter(clsToken)
torch.nn.init.normal_(clsToken,std=0.02)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding2(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
x = torch.cat((self.clsToken.repeat(batch_size,1,1),input_vectors),1)
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT4(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
self.clsToken= nn.Parameter(clsToken)
torch.nn.init.normal_(self.clsToken,std=0.02)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding2(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
for module in self.modules():
if isinstance(module, nn.Embedding):
nn.init.normal_(module.weight, mean=0, std=0.02)
if isinstance(module, nn.Linear):
nn.init.normal_(module.weight, mean=0, std=0.02)
if hasattr(module, "bias") and module.bias is not None:
nn.init.constant_(module.bias, 0.0)
if isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
x = torch.cat((self.clsToken.repeat(batch_size,1,1),input_vectors),1)
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT5(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
self.clsToken= nn.Parameter(clsToken)
torch.nn.init.normal_(clsToken,std=0.02)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding2(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
x = torch.cat((self.clsToken.repeat(batch_size,1,1),input_vectors),1)
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT6(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
self.clsToken= nn.Parameter(clsToken)
torch.nn.init.normal_(self.clsToken,std=0.02)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
for module in self.modules():
if isinstance(module, nn.Embedding):
nn.init.normal_(module.weight, mean=0, std=0.02)
if isinstance(module, nn.Linear):
nn.init.normal_(module.weight, mean=0, std=0.02)
if hasattr(module, "bias") and module.bias is not None:
nn.init.constant_(module.bias, 0.0)
if isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
x = torch.cat((self.clsToken.repeat(batch_size,1,1),input_vectors),1)
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT7(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken.require_grad = True
torch.nn.init.normal_(clsToken,std=0.02)
self.clsToken= nn.Parameter(clsToken)
maskToken = torch.zeros(1,1,self.input_dim).float().cuda()
maskToken.require_grad = True
torch.nn.init.normal_(maskToken,std=0.02)
self.maskToken= nn.Parameter(maskToken)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding = BERTEmbedding2(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
def forward(self, input_vectors):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors.shape[0]
sample=None
mask = None
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
x = torch.cat((self.clsToken.repeat(batch_size,1,1),input_vectors),1)
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
x[sample == 0] = self.maskToken
x = self.embedding(x)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x = transformer.forward(x, mask)
return x, sample
class BERT5_BOTH(nn.Module):
"""
BERT model : Bidirectional Encoder Representations from Transformers.
"""
def __init__(self, input_dim, max_len, hidden=768, n_layers=12, attn_heads=12, dropout=0.1, mask_prob=0.8):
"""
:param vocab_size: vocab_size of total words
:param hidden: BERT model hidden size
:param n_layers: numbers of Transformer blocks(layers)
:param attn_heads: number of attention heads
:param dropout: dropout rate
"""
super().__init__()
self.hidden = hidden
self.n_layers = n_layers
self.attn_heads = attn_heads
self.max_len=max_len
self.input_dim=input_dim
self.mask_prob=mask_prob
clsToken_rgb = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken_rgb.require_grad = True
torch.nn.init.normal_(clsToken_rgb,std=0.02)
self.clsToken_rgb= nn.Parameter(clsToken_rgb)
clsToken_flow = torch.zeros(1,1,self.input_dim).float().cuda()
clsToken_flow.require_grad = True
torch.nn.init.normal_(clsToken_flow,std=0.02)
self.clsToken_flow= nn.Parameter(clsToken_flow)
# paper noted they used 4*hidden_size for ff_network_hidden_size
self.feed_forward_hidden = hidden * 4
# embedding for BERT, sum of positional, segment, token embeddings
self.embedding1 = BERTEmbedding2(input_dim=input_dim, max_len=max_len+1)
self.embedding2 = BERTEmbedding2(input_dim=input_dim, max_len=max_len+1)
# multi-layers transformer blocks, deep network
self.transformer_blocks = nn.ModuleList(
[TransformerBlock2(hidden, attn_heads, self.feed_forward_hidden, dropout) for _ in range(n_layers)])
def forward(self, input_vectors_rgb, input_vectors_flow):
# attention masking for padded token
# torch.ByteTensor([batch_size, 1, seq_len, seq_len)
batch_size=input_vectors_rgb.shape[0]
sample=None
if self.training:
bernolliMatrix=torch.cat((torch.tensor([1]).float().cuda(), (torch.tensor([self.mask_prob]).float().cuda()).repeat(self.max_len)), 0).unsqueeze(0).repeat([batch_size,1])
self.bernolliDistributor=torch.distributions.Bernoulli(bernolliMatrix)
sample=self.bernolliDistributor.sample()
mask = (sample > 0).unsqueeze(1).repeat(1, sample.size(1), 1).unsqueeze(1)
else:
mask=torch.ones(batch_size,1,self.max_len+1,self.max_len+1).cuda()
# embedding the indexed sequence to sequence of vectors
input_vectors_rgb = torch.cat((self.clsToken_rgb.repeat(batch_size,1,1),input_vectors_rgb),1)
input_vectors_rgb = self.embedding1(input_vectors_rgb)
input_vectors_flow = torch.cat((self.clsToken_flow.repeat(batch_size,1,1),input_vectors_flow),1)
input_vectors_flow = self.embedding1(input_vectors_flow)
# running over multiple transformer blocks
for transformer in self.transformer_blocks:
x_rgb, x_flow = transformer.forward(input_vectors_rgb, input_vectors_flow, mask)
return x_rgb, x_flow, sample
| 40.525338
| 181
| 0.639657
| 3,068
| 23,991
| 4.813885
| 0.04987
| 0.026813
| 0.022344
| 0.016115
| 0.940213
| 0.935676
| 0.930192
| 0.923285
| 0.91577
| 0.913535
| 0
| 0.021844
| 0.255804
| 23,991
| 591
| 182
| 40.593909
| 0.805366
| 0.223667
| 0
| 0.851133
| 0
| 0
| 0.000889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05178
| false
| 0
| 0.012945
| 0
| 0.116505
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60954006b59bdd74f41c9e7160d407652c134f87
| 10,046
|
py
|
Python
|
asana/resources/gen/teams.py
|
tirkarthi/python-asana
|
8b3f0677d8fcca81b5757d586a388ef9aeb428eb
|
[
"MIT"
] | 266
|
2015-02-13T18:14:08.000Z
|
2022-03-29T22:03:33.000Z
|
asana/resources/gen/teams.py
|
tirkarthi/python-asana
|
8b3f0677d8fcca81b5757d586a388ef9aeb428eb
|
[
"MIT"
] | 77
|
2015-02-13T00:22:11.000Z
|
2022-02-20T07:56:14.000Z
|
asana/resources/gen/teams.py
|
tirkarthi/python-asana
|
8b3f0677d8fcca81b5757d586a388ef9aeb428eb
|
[
"MIT"
] | 95
|
2015-03-18T23:28:57.000Z
|
2022-02-20T23:28:58.000Z
|
# coding=utf-8
class _Teams:
def __init__(self, client=None):
self.client = client
def add_user_for_team(self, team_gid, params=None, **options):
"""Add a user to a team
:param str team_gid: (required) Globally unique identifier for the team.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/teams/{team_gid}/addUser".replace("{team_gid}", team_gid)
return self.client.post(path, params, **options)
def create_team(self, params=None, **options):
"""Create a team
:param Object params: Parameters for the request
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/teams"
return self.client.post(path, params, **options)
def get_team(self, team_gid, params=None, **options):
"""Get a team
:param str team_gid: (required) Globally unique identifier for the team.
:param Object params: Parameters for the request
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/teams/{team_gid}".replace("{team_gid}", team_gid)
return self.client.get(path, params, **options)
def get_teams_for_organization(self, workspace_gid, params=None, **options):
"""Get teams in an organization
:param str workspace_gid: (required) Globally unique identifier for the workspace or organization.
:param Object params: Parameters for the request
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/organizations/{workspace_gid}/teams".replace("{workspace_gid}", workspace_gid)
return self.client.get_collection(path, params, **options)
def get_teams_for_user(self, user_gid, params=None, **options):
"""Get teams for a user
:param str user_gid: (required) A string identifying a user. This can either be the string \"me\", an email, or the gid of a user.
:param Object params: Parameters for the request
- organization {str}: (required) The workspace or organization to filter teams on.
:param **options
- offset {str}: Offset token. An offset to the next page returned by the API. A pagination request will return an offset token, which can be used as an input parameter to the next request. If an offset is not passed in, the API will return the first page of results. 'Note: You can only pass in an offset that was returned to you via a previously paginated request.'
- limit {int}: Results per page. The number of objects to return per page. The value must be between 1 and 100.
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/users/{user_gid}/teams".replace("{user_gid}", user_gid)
return self.client.get_collection(path, params, **options)
def remove_user_for_team(self, team_gid, params=None, **options):
"""Remove a user from a team
:param str team_gid: (required) Globally unique identifier for the team.
:param Object params: Parameters for the request
:param **options
- opt_fields {list[str]}: Defines fields to return. Some requests return *compact* representations of objects in order to conserve resources and complete the request more efficiently. Other times requests return more information than you may need. This option allows you to list the exact set of fields that the API should be sure to return for the objects. The field names should be provided as paths, described below. The id of included objects will always be returned, regardless of the field options.
- opt_pretty {bool}: Provides “pretty” output. Provides the response in a “pretty” format. In the case of JSON this means doing proper line breaking and indentation to make it readable. This will take extra time and increase the response size so it is advisable only to use this during debugging.
:return: Object
"""
if params is None:
params = {}
path = "/teams/{team_gid}/removeUser".replace("{team_gid}", team_gid)
return self.client.post(path, params, **options)
| 102.510204
| 517
| 0.715708
| 1,522
| 10,046
| 4.68594
| 0.105782
| 0.01346
| 0.010095
| 0.022715
| 0.929753
| 0.928491
| 0.920639
| 0.898906
| 0.887689
| 0.876753
| 0
| 0.002186
| 0.225762
| 10,046
| 97
| 518
| 103.56701
| 0.91476
| 0.795341
| 0
| 0.515152
| 0
| 0
| 0.117502
| 0.069264
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212121
| false
| 0
| 0
| 0
| 0.424242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
60c5d0c141159b229b5e7b645fbcebcf23d73822
| 166
|
py
|
Python
|
golr_schema_generator/__init__.py
|
deepakunni3/golr-schema-generator
|
a3a20e53b35471f02889caecefe0309af45f9361
|
[
"BSD-3-Clause"
] | null | null | null |
golr_schema_generator/__init__.py
|
deepakunni3/golr-schema-generator
|
a3a20e53b35471f02889caecefe0309af45f9361
|
[
"BSD-3-Clause"
] | 5
|
2020-02-18T01:49:23.000Z
|
2020-02-19T00:11:31.000Z
|
golr_schema_generator/__init__.py
|
deepakunni3/golr-schema-generator
|
a3a20e53b35471f02889caecefe0309af45f9361
|
[
"BSD-3-Clause"
] | null | null | null |
from golr_schema_generator.schema_generator import SchemaGenerator
from golr_schema_generator.golr_schema_generator import GolrSchemaGenerator
__version__ = '0.0.1'
| 33.2
| 75
| 0.885542
| 21
| 166
| 6.47619
| 0.47619
| 0.441176
| 0.419118
| 0.338235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019481
| 0.072289
| 166
| 4
| 76
| 41.5
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0.03012
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
60d91e9dc358b591ace168a1dad57f6a3116d1da
| 353
|
py
|
Python
|
taller10/p.py
|
RobelisRuiz/Programacion-4
|
8c979ad862430801dd2608298a0369ec8457bde9
|
[
"Apache-2.0"
] | null | null | null |
taller10/p.py
|
RobelisRuiz/Programacion-4
|
8c979ad862430801dd2608298a0369ec8457bde9
|
[
"Apache-2.0"
] | null | null | null |
taller10/p.py
|
RobelisRuiz/Programacion-4
|
8c979ad862430801dd2608298a0369ec8457bde9
|
[
"Apache-2.0"
] | null | null | null |
p = [
{'nombre': 'portátil', 'significado': 800},
{'nombre': 'portátil', 'significado': 800},
{'nombre': 'portátil', 'significado': 800},
{'nombre': 'portátil', 'significado': 800},
{'nombre': 'portátil', 'significado': 800},
{'nombre': 'portátil', 'significado': 800},
{'nombre': 'portátil', 'significado': 800},
]
| 35.3
| 48
| 0.558074
| 29
| 353
| 6.793103
| 0.172414
| 0.497462
| 0.888325
| 0.994924
| 0.994924
| 0.994924
| 0.994924
| 0.994924
| 0.994924
| 0.994924
| 0
| 0.073944
| 0.195467
| 353
| 10
| 49
| 35.3
| 0.619718
| 0
| 0
| 0.777778
| 0
| 0
| 0.507246
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
60f6e94ca24272fcb0c9d4c12cdebb05e7d38a53
| 525
|
py
|
Python
|
eval_covid20cases_timm-regnetx_002_Clahe.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_Clahe.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_Clahe.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_0_Clahe.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_1_Clahe.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_2_Clahe.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_3_Clahe.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_4_Clahe.yml",
]
for l in ls:
os.system(l)
| 47.727273
| 99
| 0.841905
| 80
| 525
| 5.15
| 0.3
| 0.121359
| 0.145631
| 0.230583
| 0.902913
| 0.902913
| 0.902913
| 0.902913
| 0.902913
| 0.902913
| 0
| 0.060729
| 0.059048
| 525
| 11
| 100
| 47.727273
| 0.773279
| 0
| 0
| 0
| 0
| 0
| 0.874525
| 0.636882
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
716e249fd4eb2ebdb6619fdbc272903ce94d263c
| 141
|
py
|
Python
|
cellsino/__init__.py
|
RI-imaging/cellsino
|
868466da8e9a4b4c5640764d0ace19e4b638f2e9
|
[
"BSD-3-Clause"
] | 2
|
2019-03-28T01:27:51.000Z
|
2019-06-06T00:09:25.000Z
|
cellsino/__init__.py
|
RI-imaging/cellsino
|
868466da8e9a4b4c5640764d0ace19e4b638f2e9
|
[
"BSD-3-Clause"
] | 4
|
2019-02-06T16:58:22.000Z
|
2021-04-12T09:00:23.000Z
|
cellsino/__init__.py
|
RI-imaging/cellsino
|
868466da8e9a4b4c5640764d0ace19e4b638f2e9
|
[
"BSD-3-Clause"
] | null | null | null |
from ._version import version as __version__ # noqa: F401
from . import phantoms # noqa: F401
from .sinogram import Sinogram # noqa: F401
| 35.25
| 58
| 0.751773
| 19
| 141
| 5.315789
| 0.421053
| 0.237624
| 0.237624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078261
| 0.184397
| 141
| 3
| 59
| 47
| 0.8
| 0.22695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
718703fe3a6463219ad76e3fad974cc986b1bff4
| 3,059
|
py
|
Python
|
test/test_util.py
|
ncloudioj/hustle
|
890793b996d5ba0660f4f16dd71c88abc86ae5b5
|
[
"MIT"
] | 88
|
2015-01-07T16:57:29.000Z
|
2021-05-31T15:11:45.000Z
|
test/test_util.py
|
ncloudioj/hustle
|
890793b996d5ba0660f4f16dd71c88abc86ae5b5
|
[
"MIT"
] | 3
|
2015-08-17T09:42:20.000Z
|
2018-01-12T18:31:12.000Z
|
test/test_util.py
|
ncloudioj/hustle
|
890793b996d5ba0660f4f16dd71c88abc86ae5b5
|
[
"MIT"
] | 10
|
2015-04-05T14:41:32.000Z
|
2018-12-02T20:46:57.000Z
|
import unittest
from hustle.core.util import SortedIterator
class TestSortedIterator(unittest.TestCase):
def test_merges_sorted_inputs(self):
data = [
[
((1, 1), 'some_value'),
((1, 2), 'some_value'),
((1, 3), 'some_value')
],
[
((1, 100), 'some_value'),
((1, 200), 'some_value'),
((1, 300), 'some_value')
],
[
((1, 10), 'some_value'),
((1, 20), 'some_value'),
((1, 30), 'some_value')
],
[
((1, 4), 'some_value'),
((1, 40), 'some_value'),
((1, 400), 'some_value')
]
]
sorted_iterator = SortedIterator(data)
expected = [
((1, 1), 'some_value'),
((1, 2), 'some_value'),
((1, 3), 'some_value'),
((1, 4), 'some_value'),
((1, 10), 'some_value'),
((1, 20), 'some_value'),
((1, 30), 'some_value'),
((1, 40), 'some_value'),
((1, 100), 'some_value'),
((1, 200), 'some_value'),
((1, 300), 'some_value'),
((1, 400), 'some_value')]
self.assertListEqual(list(sorted_iterator), expected)
def test_assumes_individual_inputs_are_already_sorted(self):
data = [
[
((2, 1), 'some_value'),
((1, 1), 'some_value'),
],
[
((4, 1), 'some_value'),
((3, 1), 'some_value'),
]
]
sorted_iterator = SortedIterator(data)
expected = [
((2, 1), 'some_value'),
((1, 1), 'some_value'),
((4, 1), 'some_value'),
((3, 1), 'some_value')]
self.assertListEqual(list(sorted_iterator), expected)
def test_handles_duplicates(self):
data = [
[
((1, 1), 'some_value'),
((1, 2), 'some_value'),
],
[
((1, 1), 'some_value'),
((1, 2), 'some_value'),
((1, 3), 'some_value'),
],
[
((1, 3), 'some_value'),
]
]
sorted_iterator = SortedIterator(data)
expected = [
((1, 1), 'some_value'),
((1, 1), 'some_value'),
((1, 2), 'some_value'),
((1, 2), 'some_value'),
((1, 3), 'some_value'),
((1, 3), 'some_value')]
self.assertListEqual(list(sorted_iterator), expected)
def test_handles_empty_input(self):
data = [
[((1, 1), 'some_value')],
[], # <----- empty input
[((2, 1), 'some_value')],
]
sorted_iterator = SortedIterator(data)
expected = [
((1, 1), 'some_value'),
((2, 1), 'some_value')]
self.assertListEqual(list(sorted_iterator), expected)
| 30.59
| 64
| 0.396862
| 280
| 3,059
| 4.085714
| 0.153571
| 0.377622
| 0.297203
| 0.096154
| 0.829545
| 0.829545
| 0.807692
| 0.784965
| 0.749126
| 0.701049
| 0
| 0.068415
| 0.42661
| 3,059
| 99
| 65
| 30.89899
| 0.583808
| 0.005884
| 0
| 0.606383
| 0
| 0
| 0.157947
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 1
| 0.042553
| false
| 0
| 0.021277
| 0
| 0.074468
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7199fd6828f784f4d676e6f6c1ec5ad37078f292
| 65,043
|
py
|
Python
|
src/oci/database_management/sql_tuning_client.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/database_management/sql_tuning_client.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/database_management/sql_tuning_client.py
|
LaudateCorpus1/oci-python-sdk
|
b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from __future__ import absolute_import
from oci._vendor import requests # noqa: F401
from oci._vendor import six
from oci import retry, circuit_breaker # noqa: F401
from oci.base_client import BaseClient
from oci.config import get_config_value_or_default, validate_config
from oci.signer import Signer
from oci.util import Sentinel, get_signer_from_authentication_type, AUTHENTICATION_TYPE_FIELD_NAME
from .models import database_management_type_mapping
missing = Sentinel("Missing")
class SqlTuningClient(object):
"""
Use the Database Management API to perform tasks such as obtaining performance and resource usage metrics
for a fleet of Managed Databases or a specific Managed Database, creating Managed Database Groups, and
running a SQL job on a Managed Database or Managed Database Group.
"""
def __init__(self, config, **kwargs):
"""
Creates a new service client
:param dict config:
Configuration keys and values as per `SDK and Tool Configuration <https://docs.cloud.oracle.com/Content/API/Concepts/sdkconfig.htm>`__.
The :py:meth:`~oci.config.from_file` method can be used to load configuration from a file. Alternatively, a ``dict`` can be passed. You can validate_config
the dict using :py:meth:`~oci.config.validate_config`
:param str service_endpoint: (optional)
The endpoint of the service to call using this client. For example ``https://iaas.us-ashburn-1.oraclecloud.com``. If this keyword argument is
not provided then it will be derived using the region in the config parameter. You should only provide this keyword argument if you have an explicit
need to specify a service endpoint.
:param timeout: (optional)
The connection and read timeouts for the client. The default values are connection timeout 10 seconds and read timeout 60 seconds. This keyword argument can be provided
as a single float, in which case the value provided is used for both the read and connection timeouts, or as a tuple of two floats. If
a tuple is provided then the first value is used as the connection timeout and the second value as the read timeout.
:type timeout: float or tuple(float, float)
:param signer: (optional)
The signer to use when signing requests made by the service client. The default is to use a :py:class:`~oci.signer.Signer` based on the values
provided in the config parameter.
One use case for this parameter is for `Instance Principals authentication <https://docs.cloud.oracle.com/Content/Identity/Tasks/callingservicesfrominstances.htm>`__
by passing an instance of :py:class:`~oci.auth.signers.InstancePrincipalsSecurityTokenSigner` as the value for this keyword argument
:type signer: :py:class:`~oci.signer.AbstractBaseSigner`
:param obj retry_strategy: (optional)
A retry strategy to apply to all calls made by this service client (i.e. at the client level). There is no retry strategy applied by default.
Retry strategies can also be applied at the operation level by passing a ``retry_strategy`` keyword argument as part of calling the operation.
Any value provided at the operation level will override whatever is specified at the client level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
:param obj circuit_breaker_strategy: (optional)
A circuit breaker strategy to apply to all calls made by this service client (i.e. at the client level).
This client uses :py:data:`~oci.circuit_breaker.DEFAULT_CIRCUIT_BREAKER_STRATEGY` as default if no circuit breaker strategy is provided.
The specifics of circuit breaker strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/circuit_breakers.html>`__.
:param function circuit_breaker_callback: (optional)
Callback function to receive any exceptions triggerred by the circuit breaker.
:param allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this client should allow control characters in the response object. By default, the client will not
allow control characters to be in the response object.
"""
validate_config(config, signer=kwargs.get('signer'))
if 'signer' in kwargs:
signer = kwargs['signer']
elif AUTHENTICATION_TYPE_FIELD_NAME in config:
signer = get_signer_from_authentication_type(config)
else:
signer = Signer(
tenancy=config["tenancy"],
user=config["user"],
fingerprint=config["fingerprint"],
private_key_file_location=config.get("key_file"),
pass_phrase=get_config_value_or_default(config, "pass_phrase"),
private_key_content=config.get("key_content")
)
base_client_init_kwargs = {
'regional_client': True,
'service_endpoint': kwargs.get('service_endpoint'),
'base_path': '/20201101',
'service_endpoint_template': 'https://dbmgmt.{region}.oci.{secondLevelDomain}',
'skip_deserialization': kwargs.get('skip_deserialization', False),
'circuit_breaker_strategy': kwargs.get('circuit_breaker_strategy', circuit_breaker.GLOBAL_CIRCUIT_BREAKER_STRATEGY)
}
if 'timeout' in kwargs:
base_client_init_kwargs['timeout'] = kwargs.get('timeout')
if base_client_init_kwargs.get('circuit_breaker_strategy') is None:
base_client_init_kwargs['circuit_breaker_strategy'] = circuit_breaker.DEFAULT_CIRCUIT_BREAKER_STRATEGY
if 'allow_control_chars' in kwargs:
base_client_init_kwargs['allow_control_chars'] = kwargs.get('allow_control_chars')
self.base_client = BaseClient("sql_tuning", config, signer, database_management_type_mapping, **base_client_init_kwargs)
self.retry_strategy = kwargs.get('retry_strategy')
self.circuit_breaker_callback = kwargs.get('circuit_breaker_callback')
def clone_sql_tuning_task(self, managed_database_id, clone_sql_tuning_task_details, **kwargs):
"""
Clones and runs a SQL tuning task in the database.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param oci.database_management.models.CloneSqlTuningTaskDetails clone_sql_tuning_task_details: (required)
The detailed inputs required to clone a SQL tuning task.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or
server error without risk of executing that same action again. Retry tokens expire after 24
hours, but can be invalidated before then due to conflicting operations. For example, if a resource
has been deleted and purged from the system, then a retry of the original creation request
might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningTaskReturn`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/clone_sql_tuning_task.py.html>`__ to see an example of how to use clone_sql_tuning_task API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/actions/cloneSqlTuningTask"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"clone_sql_tuning_task got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=clone_sql_tuning_task_details,
response_type="SqlTuningTaskReturn")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=clone_sql_tuning_task_details,
response_type="SqlTuningTaskReturn")
def drop_sql_tuning_task(self, managed_database_id, drop_sql_tuning_task_details, **kwargs):
"""
Drops a SQL tuning task and its related results from the database.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param oci.database_management.models.DropSqlTuningTaskDetails drop_sql_tuning_task_details: (required)
The detailed inputs required to drop a SQL tuning task.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or
server error without risk of executing that same action again. Retry tokens expire after 24
hours, but can be invalidated before then due to conflicting operations. For example, if a resource
has been deleted and purged from the system, then a retry of the original creation request
might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/drop_sql_tuning_task.py.html>`__ to see an example of how to use drop_sql_tuning_task API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/actions/dropSqlTuningTask"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"drop_sql_tuning_task got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=drop_sql_tuning_task_details)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=drop_sql_tuning_task_details)
def get_execution_plan_stats_comparision(self, managed_database_id, sql_tuning_advisor_task_id, sql_object_id, execution_id, **kwargs):
"""
Retrieves a comparison of the existing SQL execution plan and a new plan.
A SQL tuning task may suggest a new execution plan for a SQL,
and this API retrieves the comparison report of the statistics of the two plans.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_tuning_advisor_task_id: (required)
The SQL tuning task identifier. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_object_id: (required)
The SQL object ID for the SQL tuning task. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int execution_id: (required)
The execution ID for an execution of a SQL tuning task. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.ExecutionPlanStatsComparision`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/get_execution_plan_stats_comparision.py.html>`__ to see an example of how to use get_execution_plan_stats_comparision API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/sqlTuningAdvisorTasks/{sqlTuningAdvisorTaskId}/executionPlanStatsComparision"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_execution_plan_stats_comparision got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id,
"sqlTuningAdvisorTaskId": sql_tuning_advisor_task_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
query_params = {
"sqlObjectId": sql_object_id,
"executionId": execution_id
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="ExecutionPlanStatsComparision")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="ExecutionPlanStatsComparision")
def get_sql_execution_plan(self, managed_database_id, sql_tuning_advisor_task_id, sql_object_id, attribute, **kwargs):
"""
Retrieves a SQL execution plan for the SQL being tuned.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_tuning_advisor_task_id: (required)
The SQL tuning task identifier. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_object_id: (required)
The SQL object ID for the SQL tuning task. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str attribute: (required)
The attribute of the SQL execution plan.
Allowed values are: "ORIGINAL", "ORIGINAL_WITH_ADJUSTED_COST", "USING_SQL_PROFILE", "USING_NEW_INDICES"
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningAdvisorTaskSqlExecutionPlan`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/get_sql_execution_plan.py.html>`__ to see an example of how to use get_sql_execution_plan API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/sqlTuningAdvisorTasks/{sqlTuningAdvisorTaskId}/sqlExecutionPlan"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_sql_execution_plan got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id,
"sqlTuningAdvisorTaskId": sql_tuning_advisor_task_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
attribute_allowed_values = ["ORIGINAL", "ORIGINAL_WITH_ADJUSTED_COST", "USING_SQL_PROFILE", "USING_NEW_INDICES"]
if attribute not in attribute_allowed_values:
raise ValueError(
"Invalid value for `attribute`, must be one of {0}".format(attribute_allowed_values)
)
query_params = {
"sqlObjectId": sql_object_id,
"attribute": attribute
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskSqlExecutionPlan")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskSqlExecutionPlan")
def get_sql_tuning_advisor_task_summary_report(self, managed_database_id, sql_tuning_advisor_task_id, **kwargs):
"""
Gets the summary report for the specified SQL Tuning Advisor task.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_tuning_advisor_task_id: (required)
The SQL tuning task identifier. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str search_period: (optional)
How far back the API will search for begin and end exec id. Unused if neither exec ids nor time filter query params are supplied. This is applicable only for Auto SQL Tuning tasks.
Allowed values are: "LAST_24HR", "LAST_7DAY", "LAST_31DAY", "SINCE_LAST", "ALL"
:param datetime time_greater_than_or_equal_to: (optional)
The optional greater than or equal to query parameter to filter the timestamp. This is applicable only for Auto SQL Tuning tasks.
:param datetime time_less_than_or_equal_to: (optional)
The optional less than or equal to query parameter to filter the timestamp. This is applicable only for Auto SQL Tuning tasks.
:param int begin_exec_id_greater_than_or_equal_to: (optional)
The optional greater than or equal to filter on the execution ID related to a specific SQL Tuning Advisor task. This is applicable only for Auto SQL Tuning tasks.
:param int end_exec_id_less_than_or_equal_to: (optional)
The optional less than or equal to query parameter to filter on the execution ID related to a specific SQL Tuning Advisor task. This is applicable only for Auto SQL Tuning tasks.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningAdvisorTaskSummaryReport`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/get_sql_tuning_advisor_task_summary_report.py.html>`__ to see an example of how to use get_sql_tuning_advisor_task_summary_report API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/sqlTuningAdvisorTasks/{sqlTuningAdvisorTaskId}/summaryReport"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"search_period",
"time_greater_than_or_equal_to",
"time_less_than_or_equal_to",
"begin_exec_id_greater_than_or_equal_to",
"end_exec_id_less_than_or_equal_to",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_sql_tuning_advisor_task_summary_report got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id,
"sqlTuningAdvisorTaskId": sql_tuning_advisor_task_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'search_period' in kwargs:
search_period_allowed_values = ["LAST_24HR", "LAST_7DAY", "LAST_31DAY", "SINCE_LAST", "ALL"]
if kwargs['search_period'] not in search_period_allowed_values:
raise ValueError(
"Invalid value for `search_period`, must be one of {0}".format(search_period_allowed_values)
)
query_params = {
"searchPeriod": kwargs.get("search_period", missing),
"timeGreaterThanOrEqualTo": kwargs.get("time_greater_than_or_equal_to", missing),
"timeLessThanOrEqualTo": kwargs.get("time_less_than_or_equal_to", missing),
"beginExecIdGreaterThanOrEqualTo": kwargs.get("begin_exec_id_greater_than_or_equal_to", missing),
"endExecIdLessThanOrEqualTo": kwargs.get("end_exec_id_less_than_or_equal_to", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskSummaryReport")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskSummaryReport")
def list_sql_tuning_advisor_task_findings(self, managed_database_id, sql_tuning_advisor_task_id, **kwargs):
"""
Gets an array of the details of the findings that match specific filters.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_tuning_advisor_task_id: (required)
The SQL tuning task identifier. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int begin_exec_id: (optional)
The optional greater than or equal to filter on the execution ID related to a specific SQL Tuning Advisor task.
:param int end_exec_id: (optional)
The optional less than or equal to query parameter to filter on the execution ID related to a specific SQL Tuning Advisor task.
:param str search_period: (optional)
The search period during which the API will search for begin and end exec id, if not supplied.
Unused if beginExecId and endExecId optional query params are both supplied.
Allowed values are: "LAST_24HR", "LAST_7DAY", "LAST_31DAY", "SINCE_LAST", "ALL"
:param str finding_filter: (optional)
The filter used to display specific findings in the report.
Allowed values are: "none", "FINDINGS", "NOFINDINGS", "ERRORS", "PROFILES", "INDICES", "STATS", "RESTRUCTURE", "ALTERNATIVE", "AUTO_PROFILES", "OTHER_PROFILES"
:param str stats_hash_filter: (optional)
The hash value of the object for the statistic finding search.
:param str index_hash_filter: (optional)
The hash value of the index table name.
:param str sort_by: (optional)
The possible sortBy values of an object's recommendations.
Allowed values are: "DBTIME_BENEFIT", "PARSING_SCHEMA", "SQL_ID", "STATS", "PROFILES", "SQL_BENEFIT", "DATE", "INDICES", "RESTRUCTURE", "ALTERNATIVE", "MISC", "ERROR", "TIMEOUTS"
:param str sort_order: (optional)
The option to sort information in ascending (\u2018ASC\u2019) or descending (\u2018DESC\u2019) order. Descending order is the default order.
Allowed values are: "ASC", "DESC"
:param str page: (optional)
The page token representing the page from where the next set of paginated results
are retrieved. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of records returned in the paginated response.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningAdvisorTaskFindingCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/list_sql_tuning_advisor_task_findings.py.html>`__ to see an example of how to use list_sql_tuning_advisor_task_findings API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/sqlTuningAdvisorTasks/{sqlTuningAdvisorTaskId}/findings"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"begin_exec_id",
"end_exec_id",
"search_period",
"finding_filter",
"stats_hash_filter",
"index_hash_filter",
"sort_by",
"sort_order",
"page",
"limit",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_sql_tuning_advisor_task_findings got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id,
"sqlTuningAdvisorTaskId": sql_tuning_advisor_task_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'search_period' in kwargs:
search_period_allowed_values = ["LAST_24HR", "LAST_7DAY", "LAST_31DAY", "SINCE_LAST", "ALL"]
if kwargs['search_period'] not in search_period_allowed_values:
raise ValueError(
"Invalid value for `search_period`, must be one of {0}".format(search_period_allowed_values)
)
if 'finding_filter' in kwargs:
finding_filter_allowed_values = ["none", "FINDINGS", "NOFINDINGS", "ERRORS", "PROFILES", "INDICES", "STATS", "RESTRUCTURE", "ALTERNATIVE", "AUTO_PROFILES", "OTHER_PROFILES"]
if kwargs['finding_filter'] not in finding_filter_allowed_values:
raise ValueError(
"Invalid value for `finding_filter`, must be one of {0}".format(finding_filter_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["DBTIME_BENEFIT", "PARSING_SCHEMA", "SQL_ID", "STATS", "PROFILES", "SQL_BENEFIT", "DATE", "INDICES", "RESTRUCTURE", "ALTERNATIVE", "MISC", "ERROR", "TIMEOUTS"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"beginExecId": kwargs.get("begin_exec_id", missing),
"endExecId": kwargs.get("end_exec_id", missing),
"searchPeriod": kwargs.get("search_period", missing),
"findingFilter": kwargs.get("finding_filter", missing),
"statsHashFilter": kwargs.get("stats_hash_filter", missing),
"indexHashFilter": kwargs.get("index_hash_filter", missing),
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing),
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskFindingCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskFindingCollection")
def list_sql_tuning_advisor_task_recommendations(self, managed_database_id, sql_tuning_advisor_task_id, sql_object_id, execution_id, **kwargs):
"""
Gets the findings and possible actions for a given object in a SQL tuning task.
The task ID and object ID are used to retrieve the findings and recommendations.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_tuning_advisor_task_id: (required)
The SQL tuning task identifier. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int sql_object_id: (required)
The SQL object ID for the SQL tuning task. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param int execution_id: (required)
The execution ID for an execution of a SQL tuning task. This is not the `OCID`__.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str sort_by: (optional)
The possible sortBy values of an object's recommendations.
Allowed values are: "RECOMMENDATION_TYPE", "BENEFIT"
:param str sort_order: (optional)
The option to sort information in ascending (\u2018ASC\u2019) or descending (\u2018DESC\u2019) order. Descending order is the default order.
Allowed values are: "ASC", "DESC"
:param str page: (optional)
The page token representing the page from where the next set of paginated results
are retrieved. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of records returned in the paginated response.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningAdvisorTaskRecommendationCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/list_sql_tuning_advisor_task_recommendations.py.html>`__ to see an example of how to use list_sql_tuning_advisor_task_recommendations API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/sqlTuningAdvisorTasks/{sqlTuningAdvisorTaskId}/recommendations"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"sort_by",
"sort_order",
"page",
"limit",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_sql_tuning_advisor_task_recommendations got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id,
"sqlTuningAdvisorTaskId": sql_tuning_advisor_task_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'sort_by' in kwargs:
sort_by_allowed_values = ["RECOMMENDATION_TYPE", "BENEFIT"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"sqlObjectId": sql_object_id,
"executionId": execution_id,
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing),
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskRecommendationCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskRecommendationCollection")
def list_sql_tuning_advisor_tasks(self, managed_database_id, **kwargs):
"""
Lists the SQL Tuning Advisor tasks for the specified Managed Database.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param str name: (optional)
The optional query parameter to filter the SQL Tuning Advisor task list by name.
:param str status: (optional)
The optional query parameter to filter the SQL Tuning Advisor task list by status.
Allowed values are: "INITIAL", "EXECUTING", "INTERRUPTED", "COMPLETED", "ERROR"
:param datetime time_greater_than_or_equal_to: (optional)
The optional greater than or equal to query parameter to filter the timestamp.
:param datetime time_less_than_or_equal_to: (optional)
The optional less than or equal to query parameter to filter the timestamp.
:param str page: (optional)
The page token representing the page from where the next set of paginated results
are retrieved. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of records returned in the paginated response.
:param str sort_by: (optional)
The option to sort the SQL Tuning Advisor task summary data.
Allowed values are: "NAME", "START_TIME"
:param str sort_order: (optional)
The option to sort information in ascending (\u2018ASC\u2019) or descending (\u2018DESC\u2019) order. Descending order is the default order.
Allowed values are: "ASC", "DESC"
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningAdvisorTaskCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/list_sql_tuning_advisor_tasks.py.html>`__ to see an example of how to use list_sql_tuning_advisor_tasks API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/sqlTuningAdvisorTasks"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"name",
"status",
"time_greater_than_or_equal_to",
"time_less_than_or_equal_to",
"page",
"limit",
"sort_by",
"sort_order",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_sql_tuning_advisor_tasks got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'status' in kwargs:
status_allowed_values = ["INITIAL", "EXECUTING", "INTERRUPTED", "COMPLETED", "ERROR"]
if kwargs['status'] not in status_allowed_values:
raise ValueError(
"Invalid value for `status`, must be one of {0}".format(status_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["NAME", "START_TIME"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"name": kwargs.get("name", missing),
"status": kwargs.get("status", missing),
"timeGreaterThanOrEqualTo": kwargs.get("time_greater_than_or_equal_to", missing),
"timeLessThanOrEqualTo": kwargs.get("time_less_than_or_equal_to", missing),
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing),
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="SqlTuningAdvisorTaskCollection")
def start_sql_tuning_task(self, managed_database_id, start_sql_tuning_task_details, **kwargs):
"""
Starts a SQL tuning task for a given set of SQL statements from the active session history top SQL statements.
:param str managed_database_id: (required)
The `OCID`__ of the Managed Database.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param oci.database_management.models.StartSqlTuningTaskDetails start_sql_tuning_task_details: (required)
The detailed inputs required to start a SQL tuning task.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or
server error without risk of executing that same action again. Retry tokens expire after 24
hours, but can be invalidated before then due to conflicting operations. For example, if a resource
has been deleted and purged from the system, then a retry of the original creation request
might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:param bool allow_control_chars: (optional)
allow_control_chars is a boolean to indicate whether or not this request should allow control characters in the response object.
By default, the response will not allow control characters in strings
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.database_management.models.SqlTuningTaskReturn`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/databasemanagement/start_sql_tuning_task.py.html>`__ to see an example of how to use start_sql_tuning_task API.
"""
resource_path = "/managedDatabases/{managedDatabaseId}/actions/startSqlTuningTask"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"allow_control_chars",
"retry_strategy",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"start_sql_tuning_task got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"managedDatabaseId": managed_database_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=start_sql_tuning_task_details,
response_type="SqlTuningTaskReturn")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=start_sql_tuning_task_details,
response_type="SqlTuningTaskReturn")
| 51.417391
| 261
| 0.664299
| 8,146
| 65,043
| 5.094279
| 0.064817
| 0.047617
| 0.013157
| 0.016868
| 0.852957
| 0.834884
| 0.819268
| 0.798882
| 0.792857
| 0.783773
| 0
| 0.00265
| 0.257384
| 65,043
| 1,264
| 262
| 51.45807
| 0.856486
| 0.438249
| 0
| 0.751174
| 0
| 0
| 0.199133
| 0.065804
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015649
| false
| 0.001565
| 0.014085
| 0
| 0.059468
| 0.001565
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71c2a3c9124856703e674a6da41157fb794c8e04
| 2,834
|
py
|
Python
|
cocluster/cocluster.py
|
ashish-code/co-clustering-visual-categorization
|
0e6d97858793c6562d28b2cefd9f61bc2fa079d0
|
[
"MIT"
] | null | null | null |
cocluster/cocluster.py
|
ashish-code/co-clustering-visual-categorization
|
0e6d97858793c6562d28b2cefd9f61bc2fa079d0
|
[
"MIT"
] | null | null | null |
cocluster/cocluster.py
|
ashish-code/co-clustering-visual-categorization
|
0e6d97858793c6562d28b2cefd9f61bc2fa079d0
|
[
"MIT"
] | null | null | null |
'''
Created on 27 Jul 2011
@author: ag00087
'''
import os
import numpy as np
import time
rootDir = '/vol/vssp/diplecs/ash/Data/'
tempDir = 'Temp'
def Oldcoclust(data,dataset,nRowCluster=1,nColCluster=1,ccType='i'):
tempPath = rootDir+dataset+tempDir
tempdataPath = tempPath+'tempdata'
tempdataDimPath = tempPath+'tempdata_dim'
tempCCFilePath = tempPath+'tempCCFile.txt'
np.savetxt(tempdataPath,data[:,:-1],fmt='%d',delimiter=' ')
np.savetxt(tempdataDimPath,data[:,:-1].shape,fmt='%d',delimiter=' ')
cmdPath = '/vol/vssp/diplecs/ash/code/cocluster/'
cmd = 'cocluster-linux'
args = ' -A %s -R %d -C %d -I d s %s -O c s 0 o %s' % (ccType,nRowCluster,nColCluster,tempdataPath,tempCCFilePath)
cwd = os.getcwd()
os.chdir(cmdPath)
os.system(cmd + args)
os.chdir(cwd)
tempCCFile = open(tempCCFilePath,'r')
tempFileData = tempCCFile.readlines()
tempCCFile.close()
return tempFileData
def coclust(data,dataset,nRowCluster=1,nColCluster=1,ccType='i'):
tempTimeDir = str(int(time.time()))
tempPath = rootDir+tempDir+'/'+tempTimeDir+'/'
tempdataPath = tempPath+'tempdata'
os.mkdir(tempPath)
tempdataDimPath = tempPath+'tempdata_dim'
tempCCFilePath = tempPath+'tempCCFile.txt'
np.savetxt(tempdataPath,data[:,:-1],fmt='%d',delimiter=' ')
np.savetxt(tempdataDimPath,data[:,:-1].shape,fmt='%d',delimiter=' ')
cmdPath = '/vol/vssp/diplecs/ash/code/cocluster/'
cmd = 'cocluster-linux'
args = ' -A %s -R %d -C %d -I d s %s -O c s 0 o %s' % (ccType,nRowCluster,nColCluster,tempdataPath,tempCCFilePath)
cwd = os.getcwd()
os.chdir(cmdPath)
os.system(cmd + args)
os.chdir(cwd)
tempCCFile = open(tempCCFilePath,'r')
tempFileData = tempCCFile.readlines()
tempCCFile.close()
return tempFileData
def coclustWord(data,dataset,nRowCluster=1,nColCluster=1,ccType='i'):
tempPath = rootDir+dataset+tempDir
tempdataPath = tempPath+'tempdata'+str(ccType)+str(nRowCluster)+str(nColCluster)
tempdataDimPath = tempPath+'tempdata'+str(ccType)+str(nRowCluster)+str(nColCluster)+'_dim'
tempCCFilePath = tempPath+'tempCCFile'+str(ccType)+str(nRowCluster)+str(nColCluster)
np.savetxt(tempdataPath,data,fmt='%d',delimiter=' ')
np.savetxt(tempdataDimPath,data.shape,fmt='%d',delimiter=' ')
cmdPath = '/vol/vssp/diplecs/ash/code/cocluster/'
cmd = 'cocluster-linux'
args = ' -A %s -R %d -C %d -I d s %s -O c s 0 o %s' % (ccType,nRowCluster,nColCluster,tempdataPath,tempCCFilePath)
cwd = os.getcwd()
os.chdir(cmdPath)
os.system(cmd + args)
os.chdir(cwd)
tempCCFile = open(tempCCFilePath,'r')
tempFileData = tempCCFile.readlines()
tempCCFile.close()
return tempFileData
if __name__ == '__main__':
pass
| 33.738095
| 118
| 0.666902
| 344
| 2,834
| 5.462209
| 0.212209
| 0.051091
| 0.041511
| 0.036189
| 0.827568
| 0.827568
| 0.807877
| 0.786056
| 0.715806
| 0.715806
| 0
| 0.010278
| 0.176076
| 2,834
| 84
| 119
| 33.738095
| 0.794433
| 0.014114
| 0
| 0.714286
| 0
| 0.047619
| 0.15967
| 0.049516
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0.015873
| 0.047619
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71d1f10eff7b2569ae979a952797c4e28b9b4ed6
| 3,708
|
py
|
Python
|
irrumatio_lab/migrations/0002_auto_20181122_1340.py
|
ycycorona/djangohelloworld
|
f3e8ca816f75ad2d9a8e0e14d485c144dfb818cf
|
[
"MIT"
] | null | null | null |
irrumatio_lab/migrations/0002_auto_20181122_1340.py
|
ycycorona/djangohelloworld
|
f3e8ca816f75ad2d9a8e0e14d485c144dfb818cf
|
[
"MIT"
] | null | null | null |
irrumatio_lab/migrations/0002_auto_20181122_1340.py
|
ycycorona/djangohelloworld
|
f3e8ca816f75ad2d9a8e0e14d485c144dfb818cf
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.2 on 2018-11-22 05:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('irrumatio_lab', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='productionevaluation',
name='actress_mark',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AddField(
model_name='productionevaluation',
name='overall_mark',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='production',
name='lab_review',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='production',
name='official_review',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='production',
name='pub_date',
field=models.DateField(blank=True, null=True),
),
migrations.AlterField(
model_name='productionaddons',
name='cover_img',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='productionaddons',
name='dmm_link',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='cock_level',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='depth',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='irrumatio_cum_times',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='irrumatio_duration_min',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='irrumatio_throat_cum_times',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='irrumatio_times',
field=models.IntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='puke_amount',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='puke_reaction',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='speed',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='spit_amount',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
migrations.AlterField(
model_name='productionevaluation',
name='spit_sound',
field=models.SmallIntegerField(blank=True, default=0, null=True),
),
]
| 35.653846
| 77
| 0.587648
| 332
| 3,708
| 6.436747
| 0.210843
| 0.075807
| 0.143191
| 0.20964
| 0.877398
| 0.877398
| 0.834347
| 0.834347
| 0.776322
| 0.751521
| 0
| 0.014644
| 0.300162
| 3,708
| 103
| 78
| 36
| 0.808863
| 0.012136
| 0
| 0.731959
| 1
| 0
| 0.155149
| 0.013111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010309
| 0
| 0.041237
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e0847ff47340311b2a15daa37d2da1880ab44b79
| 2,077
|
py
|
Python
|
volttrontesting/testutils/test_single_instance.py
|
rmay-intwine/volttron
|
a449f70e32f73ff0136a838d0feddb928ede6298
|
[
"Apache-2.0"
] | 1
|
2020-06-08T16:54:28.000Z
|
2020-06-08T16:54:28.000Z
|
volttrontesting/testutils/test_single_instance.py
|
rmay-intwine/volttron
|
a449f70e32f73ff0136a838d0feddb928ede6298
|
[
"Apache-2.0"
] | 8
|
2016-10-07T22:49:28.000Z
|
2022-02-23T00:57:58.000Z
|
volttrontesting/testutils/test_single_instance.py
|
rmay-intwine/volttron
|
a449f70e32f73ff0136a838d0feddb928ede6298
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import gevent
from volttron.platform import get_examples
@pytest.mark.wrapper
def test_can_install_listeners(volttron_instance):
assert volttron_instance.is_running()
uuids = []
num_listeners = 5
try:
for x in range(num_listeners):
identity = "listener_" + str(x)
auuid = volttron_instance.install_agent(
agent_dir=get_examples("ListenerAgent"), config_file={
"agentid": identity,
"message": "So Happpy"})
assert auuid
uuids.append(auuid)
gevent.sleep(0.5)
for u in uuids:
assert volttron_instance.is_agent_running(u)
agent = volttron_instance.build_agent()
agent_list = agent.vip.rpc('control', 'list_agents').get(timeout=5)
print('Agent List: {}'.format(agent_list))
assert len(agent_list) == num_listeners
finally:
for x in uuids:
try:
volttron_instance.remove_agent(x)
except:
print('COULDN"T REMOVE AGENT')
@pytest.mark.wrapper
def test_can_install_listeners_vi(volttron_instance):
assert volttron_instance.is_running()
uuids = []
num_listeners = 5
try:
for x in range(num_listeners):
identity = "listener_" + str(x)
auuid = volttron_instance.install_agent(
agent_dir=get_examples("ListenerAgent"),
start=True,
config_file={
"agentid": identity,
"message": "So Happpy"})
assert auuid
uuids.append(auuid)
gevent.sleep(0.5)
agent = volttron_instance.build_agent()
agent_list = agent.vip.rpc('control', 'list_agents').get(timeout=5)
print('Agent List: {}'.format(agent_list))
assert len(agent_list) == num_listeners
finally:
for x in uuids:
try:
volttron_instance.remove_agent(x)
except:
print('COULDN"T REMOVE AGENT')
| 30.544118
| 75
| 0.578238
| 226
| 2,077
| 5.088496
| 0.274336
| 0.153043
| 0.02087
| 0.062609
| 0.895652
| 0.895652
| 0.895652
| 0.895652
| 0.82087
| 0.82087
| 0
| 0.005714
| 0.325951
| 2,077
| 67
| 76
| 31
| 0.815714
| 0
| 0
| 0.807018
| 0
| 0
| 0.094367
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 1
| 0.035088
| false
| 0
| 0.052632
| 0
| 0.087719
| 0.070175
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0be16e8bed9660dec1216a6c26aef027eed9ba2
| 280,620
|
py
|
Python
|
experiments/variables.py
|
antoniojsp/RoadTurns
|
e535d1407676a40e68da709fb1c7bba5000be760
|
[
"Unlicense"
] | null | null | null |
experiments/variables.py
|
antoniojsp/RoadTurns
|
e535d1407676a40e68da709fb1c7bba5000be760
|
[
"Unlicense"
] | 1
|
2020-10-28T17:13:54.000Z
|
2020-10-29T00:00:27.000Z
|
experiments/variables.py
|
antoniojsp/RoadTurns
|
e535d1407676a40e68da709fb1c7bba5000be760
|
[
"Unlicense"
] | null | null | null |
calles = ["NW Spruce","NW Spruce","NW Spruce","NW Spruce","goa","goa","goa","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW goa","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland",'NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S','Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd' , 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW s Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE a Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Granger Ave', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', "NW Spruce","NW Spruce","NW Spruce","NW Spruce","goa","goa","goa","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Spruce","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW goa","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland","NW Highland",'NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','Hoffman Rd','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Crescent Valley Dr','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Lewisburg Ave','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','NW Sulphur Springs Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Airlie Rd', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Tampico Rd','Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Knox St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Warren St S', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd', 'Riddell Rd','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St','Main St', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'River Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S','Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S', 'Sidney Rd S','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St','Willamette Ferry St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'Main St', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd', 'NW Buena Vista Rd' , 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW s Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr', 'NW Springhill Dr','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy','NE Independence Hwy', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE a Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Pettibone Dr', 'NE Granger Ave', 'NE Granger Ave', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir', 'NE Elliott Cir']
| 140,310
| 280,619
| 0.656454
| 49,044
| 280,620
| 3.756097
| 0.000754
| 0.068659
| 0.203763
| 0.237724
| 0.999967
| 0.999967
| 0.999967
| 0.999967
| 0.999967
| 0.999967
| 0
| 0
| 0.163969
| 280,620
| 1
| 280,620
| 280,620
| 0.785202
| 0
| 0
| 0
| 0
| 0
| 0.771349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
e0f3f0352db3c34c691c270241d54736142fec35
| 39,405
|
py
|
Python
|
snaps_k8s/ansible_p/ansible_utils/ansible_playbook_launcher.py
|
chiraggupta06/snaps-kubernetes
|
037660aa7a91b681e57f13f1775d388d35586b9e
|
[
"Apache-2.0"
] | null | null | null |
snaps_k8s/ansible_p/ansible_utils/ansible_playbook_launcher.py
|
chiraggupta06/snaps-kubernetes
|
037660aa7a91b681e57f13f1775d388d35586b9e
|
[
"Apache-2.0"
] | null | null | null |
snaps_k8s/ansible_p/ansible_utils/ansible_playbook_launcher.py
|
chiraggupta06/snaps-kubernetes
|
037660aa7a91b681e57f13f1775d388d35586b9e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 ARICENT HOLDINGS LUXEMBOURG SARL and Cable Television
# Laboratories, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import logging
import os
__author__ = '_ARICENT'
logger = logging.getLogger('ansible_playbook_operations')
ANSIBLE_EXE = 'ansible-playbook'
def create_extra_var_str(vars_dict):
"""
This method encodes variables into an --extra-vars string
:param vars_dict:
:return: a string that can be added to the ansible-playbook exe
"""
if len(vars_dict) < 1:
return ''
out_val = "--extra-vars='{"
for key, value in vars_dict.items():
out_val = '{}"{}":"{}",'.format(out_val, key, value)
out_val = "{}{}".format(out_val[:-1], "}'")
return out_val
def execute_system_command(playbook, extra_var_str):
"""
Executes a playbook through os.system function
:playbook: Playbook to be executed
:extra_vars: extra variables to be passed to playbook
:return: True/False - True if successful
"""
command = "{} {} {}".format(ANSIBLE_EXE, playbook, extra_var_str)
logger.info(command)
retval = os.system(command)
return retval == 0
def execute_system_cmd_subprocess(playbook, extra_var_str):
"""
Executes a playbook through os.system function
:playbook: Playbook to be executed
:extra_vars: extra variables to be passed to playbook
:return: True/False - True if successful
"""
command = "{} {} {}".format(ANSIBLE_EXE, playbook, extra_var_str)
logger.info(command)
try:
returned_output = subprocess.check_output(command, shell=True)
logger.info(returned_output)
except subprocess.CalledProcessError as exception:
logger.info(exception)
logger.error('Failed Execution for playbook %s', playbook)
return False
return True
def kubespray_play(playbook, proxy_data_file, var_file, src_pkg_path,
git_branch, project_name):
"""
Applies ansible playbooks to clone the kubspray code
:param ansible_configs: a list of Ansible host configurations
:param playbook_path: the path of the playbook file
:return: t/f - true if successful
"""
extra_var_str = create_extra_var_str({
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
'SRC_PACKAGE_PATH': src_pkg_path,
'Git_branch': git_branch,
'Project_name': project_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def clone_packages(playbook, proxy_data_file, var_file, src_pkg_path,
git_branch):
"""
Applies ansible playbooks to clone the packages
:param ansible_configs: a list of Ansible host configurations
:param playbook_path: the path of the playbook file
:return: t/f - true if successful
"""
extra_var_str = create_extra_var_str({
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
'SRC_PACKAGE_PATH': src_pkg_path,
'Git_branch': git_branch,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def enable_loggings(playbook, proxy_data_file, var_file, logging, project_name,
log_level, file_path, logging_port):
"""
Applies ansible playbooks to enable logging
:param playbook: the path of the playbook file
:param VARIABLE_FILE: Path of variable file
:param logging: logging enabled or disabled
:param log_level: log_level to be disabled (error, warning, critical,
info, debug)
:return: True/False - True if successful otherwise return false
"""
extra_var_str = create_extra_var_str({
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
"logging": logging,
'Project_name': project_name,
"log_level": log_level,
"file_path": file_path,
"logging_port": logging_port,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def cpu_manager_configuration(playbook, proxy_data_file, var_file):
"""
Configure CPU management policies in Cluster
"""
extra_var_str = create_extra_var_str({
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_cmd_subprocess(playbook, extra_var_str)
logger.info('Exit')
return retval
def launch_k8s(playbook, service_subnet, pod_subnet, networking_plugin,
proxy_data_file, var_file, src_pkg_path, cwd, git_branch,
project_name):
"""
Applies ansible playbooks to the listed hosts with provided IPs
:return: t/f - true if successful
"""
extra_var_str = create_extra_var_str({
'service_subnet': service_subnet,
'pod_subnet': pod_subnet,
'networking_plugin': networking_plugin,
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
'SRC_PACKAGE_PATH': src_pkg_path,
'Git_branch': git_branch,
'Project_name': project_name,
'CURRENT_DIR': cwd,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def launch_inventory(playbook, node_type, host_name, src_pkg_path, var_file,
proj_name):
extra_var_str = create_extra_var_str({
'node_type': node_type,
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'Project_name': proj_name,
})
command = "{} {} {}".format(ANSIBLE_EXE, playbook, extra_var_str)
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def launch_new_inventory(playbook, ip, host_name, src_pkg_path, var_file, cwd,
proj_name):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'CURRENT_DIR': cwd,
'Project_name': proj_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def update_user_list(playbook, user_name, user_password, user_id,
src_pkg_path):
extra_var_str = create_extra_var_str({
'user_name': user_name,
'user_password': user_password,
'user_id': user_id,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def launch_authentication(playbook, host_name, src_pkg_path, var_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def etcd_changes(playbook, host_name, ip, src_pkg_path, var_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'ip': ip,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_storage(playbook, host_name, master_host_name, src_pkg_path,
var_file, storage, proxy_data_file, node_type):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'master_host_name': master_host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
'storage': storage,
'node_type': node_type,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_volume2(playbook, host_name, src_pkg_path, var_file,
ceph_storage_size, ceph_claim_name, proxy_data_file,
controller_host_name, ceph_controller_ip):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'ceph_storage_size': ceph_storage_size,
'ceph_claim_name': ceph_claim_name,
'PROXY_DATA_FILE': proxy_data_file,
'controller_host_name': controller_host_name,
'ceph_controller_ip': ceph_controller_ip,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_volume_first(playbook, host_name, src_pkg_path, var_file,
proxy_data_file, host_ip):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
'host_ip': host_ip,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_mon(playbook, master_host_name, var_file, proxy_data_file):
extra_var_str = create_extra_var_str({
'master_host_name': master_host_name,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_deploy(playbook, host_name, master_host_name, var_file,
proxy_data_file, user_id, passwd):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'master_host_name': master_host_name,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
'user_id': user_id,
'passwd': passwd,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_deploy_admin(playbook, host_name, master_host_name, var_file,
proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'master_host_name': master_host_name,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def ceph_volume(playbook, host_name, src_pkg_path, var_file, proxy_data_file,
osd_host_name, user_id, passwd, osd_ip):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
'osd_host_name': osd_host_name,
'user_id': user_id,
'passwd': passwd,
'osd_ip': osd_ip,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def node_labeling(playbook, master_hostname, hostname, label_key, label_value,
proxy_data_file):
extra_var_str = create_extra_var_str({
'master_hostname': master_hostname,
'hostname': hostname,
'label_key': label_key,
'label_value': label_value,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def delete_secret(playbook, master_hostname, proxy_data_file):
extra_var_str = create_extra_var_str({
'master_hostname': master_hostname,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def persistent_volume(playbook, host_name, src_pkg_path, var_file,
storage_size, claim_name, proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'storage_size': storage_size,
'claim_name': claim_name,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def weave_scope(playbook, host_name, src_pkg_path, var_file, proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def delete_node(playbook, host_name, src_pkg_path, var_file, project_name):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'Project_name': project_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def kube_proxy(playbook, host_name, src_pkg_path, var_file, proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def delete_host_k8(playbook, ip, host_name, host_file_path,
ansible_host_file_path, var_file, project_name,
multus_enabled):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'HOST_FILE_PATH': host_file_path,
'ANSIBLE_HOST_FILE_PATH': ansible_host_file_path,
'VARIABLE_FILE': var_file,
'Project_name': project_name,
'multus_enabled': multus_enabled,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def delete_project_folder(playbook, var_file, src_pkg_path, project_name,
proxy_data_file):
extra_var_str = create_extra_var_str({
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'Project_name': project_name,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def clean_k8(playbook, src_pkg_path, var_file, proxy_data_file, git_branch,
project_name):
extra_var_str = create_extra_var_str({
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'PROXY_DATA_FILE': proxy_data_file,
'Git_branch': git_branch,
'Project_name': project_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def set_k8s_packages(playbook, target, host_name, proxy_data_file, var_file,
apt_arch_src, src_pkg_path, registry_port):
extra_var_str = create_extra_var_str({
'target': target,
'host_name': host_name,
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
'APT_ARCHIVES_SRC': apt_arch_src,
'SRC_PACKAGE_PATH': src_pkg_path,
'registry_port': registry_port,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def creating_docker_repo(playbook, proxy_data_file, var_file, docker_ip,
docker_port, apt_arch_src, src_pkg_path):
extra_var_str = create_extra_var_str({
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
'docker_ip': docker_ip,
'docker_port': docker_port,
'APT_ARCHIVES_SRC': apt_arch_src,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def create_inventory_file(playbook, src_pkg_path, var_file, cwd, project_name):
extra_var_str = create_extra_var_str({
'SRC_PACKAGE_PATH': src_pkg_path,
'VARIABLE_FILE': var_file,
'CURRENT_DIR': cwd,
'Project_name': project_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def docker_conf(playbook, target, host_name, proxy_data_file, var_file,
docker_ip, docker_port):
extra_var_str = create_extra_var_str({
'target': target,
'host_name': host_name,
'PROXY_DATA_FILE': proxy_data_file,
'VARIABLE_FILE': var_file,
'docker_ip': docker_ip,
'docker_port': docker_port,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def create_crd_network(playbook, ip, host_name, src_pkg_path, proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def master_multus(playbook, ip, host_name, networking_plugin, src_pkg_path,
proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def copy_multus(playbook, ip, host_name, networking_plugin, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def node_multus(playbook, ip, host_name, networking_plugin, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def master_flannel(playbook, ip, host_name, networking_plugin, network,
subnet_len, vni, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'network': network,
'subnetLen': subnet_len,
'vni': vni,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def node_flannel(playbook, ip, host_name, networking_plugin, network,
subnet_len, vni, master_ip, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'network': network,
'subnetLen': subnet_len,
'vni': vni,
'master_ip': master_ip,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def create_flannel_networks(playbook, ip, host_name, net_name, vni, vni_temp,
src_pkg_path, proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networkName': net_name,
'vni': vni,
'vniTemp': vni_temp,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def enable_sriov(playbook, host_name, intf, script, networking_plugin):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'sriov_intf': intf,
'script_path': script,
'networking_plugin': networking_plugin,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def build_sriov(playbook, src_pkg_path, proxy_data_file):
extra_var_str = create_extra_var_str({
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def build_sriov_dpdk(playbook, src_pkg_path, proxy_data_file):
extra_var_str = create_extra_var_str({
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def sriov_install(playbook, host_name, src_pkg_path):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def sriov_dpdk_install(playbook, host_name, src_pkg_path):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def dpdk_driver_load(playbook, host_name, dpdk_driver):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'dpdk_driver': dpdk_driver,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def sriov_dpdk_crd_nw(playbook, sriov_intf,
host_name, nw_name, dpdk_driver, dpdk_tool,
node_hostname, master_plugin, proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'intf': sriov_intf,
'network_name': nw_name,
'dpdk_driver': dpdk_driver,
'dpdk_tool': dpdk_tool,
'node_hostname': node_hostname,
'masterPlugin': master_plugin,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def sriov_dhcp_crd_nw(playbook, sriov_intf,
host_name, nw_name, proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'intf': sriov_intf,
'PROXY_DATA_FILE': proxy_data_file,
'network_name': nw_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def sriov_crd_nw(playbook, sriov_intf, host_name,
nw_name, s_rng, e_rng, subnet, gw, master_plugin,
proxy_data_file):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'intf': sriov_intf,
'network_name': nw_name,
'rangeStart': s_rng,
'rangeEnd': e_rng,
'subnet': subnet,
'gateway': gw,
'masterPlugin': master_plugin,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def vlantag_interface(playbook, host, parent_intf, vlan_id, ip):
extra_var_str = create_extra_var_str({
'host': host,
'parentInterface': parent_intf,
'vlanId': str(vlan_id),
'ip': ip,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def vlantag_interface_removal(playbook, host, parent_intf, vlan_id):
extra_var_str = create_extra_var_str({
'host': host,
'parentInterface': parent_intf,
'vlanId': str(vlan_id),
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def network_creation(playbook, host, network_name, interface_node, subnet,
range_start, range_end, dst, gateway, proxy_data_file):
extra_var_str = create_extra_var_str({
'host': host,
'network_name': network_name,
'interface_node': interface_node,
'subnet': subnet,
'rangeStart': range_start,
'rangeEnd': range_end,
'dst': dst,
'gateway': gateway,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def network_removal(playbook, host, network_name, proxy_data_file):
extra_var_str = create_extra_var_str({
'host': host,
'network_name': network_name,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def network_dhcp_creation(playbook, host, network_name, interface_node,
proxy_data_file):
extra_var_str = create_extra_var_str({
'host': host,
'network_name': network_name,
'interface_node': interface_node,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def dhcp_daemon_creation(playbook, host):
extra_var_str = create_extra_var_str({
'host': host,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def dhcp_daemon_removal(playbook, host):
extra_var_str = create_extra_var_str({
'host': host,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def clean_docker(playbook, host_name):
extra_var_str = create_extra_var_str({'host_name': host_name})
logger.info("Arguments are %s", str(extra_var_str))
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def create_weave_network(playbook, ip, host_name, network_name, subnet,
master_plugin, src_pkg_path, proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networkName': network_name,
'subnet': subnet,
'masterPlugin': master_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def delete_weave_conf(playbook, ip, host_name, networking_plugin,
src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def copy_weave_cni(playbook, ip, host_name, subnet, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'subnet': subnet,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def delete_conf_files(playbook, ip, host_name, networking_plugin,
src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networking_plugin': networking_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def metrics_server(playbook, ip, host_name, proxy_data_file):
"""
fucntion added for metrics server
"""
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def metrics_server_clean(playbook, ip, host_name, proxy_data_file):
"""
fucntion added by yashwant for metrics server remove
"""
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def create_default_network(playbook, ip, host_name, network_name, subnet,
networking_plugin, master_plugin, src_pkg_path,
proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networkName': network_name,
'subnet': subnet,
'networking_plugin': networking_plugin,
'masterPlugin': master_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def flannel_daemon(playbook, ip, network, cidr, master_plugin, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'network': network,
'cidr': str(cidr),
'masterPlugin': master_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def create_flannel_interface(playbook, ip, host_name, network_name, network,
master_plugin, src_pkg_path, proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'networkName': network_name,
'network': network,
'masterPlugin': master_plugin,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def copy_flannel_cni(playbook, ip, host_name, network, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'network': network,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def clean_sriov_rc_local(playbook, host_name, sriov_intf):
extra_var_str = create_extra_var_str({
'host_name': host_name,
'sriov_intf': sriov_intf,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def weave_reclaim_ip(playbook, ip, host_name, node_hostname,
src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'node_hostname': node_hostname,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def weave_forget_ip(playbook, ip, host_name, node_hostname1, src_pkg_path):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'node_hostname1': node_hostname1,
'SRC_PACKAGE_PATH': src_pkg_path,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def install_dhcp_daemon(playbook, host):
extra_var_str = create_extra_var_str({
'host': host,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def dhcp_cleanup_network(playbook, host):
extra_var_str = create_extra_var_str({
'host': host,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def remove_sriov_networks(playbook, host, network_name):
extra_var_str = create_extra_var_str({
'host': host,
'networkName': network_name,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
class KubectlPlayBookLauncher(object):
def __init__(self):
pass
def launch_install_kubectl(self, playbook, ip, host_name, ha_enabled,
project_name, lb_ip, var_file, src_pkg_path,
proxy_data_file):
"""
function added for installing kubectl
:param playbook:
:param ip:
:param host_name:
:param ha_enabled:
:param project_name:
:param lb_ip:
:param var_file:
:param src_pkg_path:
:param proxy_data_file:
:return:
"""
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'ha_enabled': ha_enabled,
'Project_name': project_name,
'lb_ip': lb_ip,
'VARIABLE_FILE': var_file,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
def launch_set_kubectl_context(self, playbook, project_name, var_file,
src_pkg_path, proxy_data_file):
"""
function added to set kubectl context
:param Project_name:
:param VARIABLE_FILE:
:param SRC_PACKAGE_PATH:
:param PROXY_DATA_FILE:
:return:
"""
extra_var_str = create_extra_var_str({
'Project_name': project_name,
'VARIABLE_FILE': var_file,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_command(playbook, extra_var_str)
logger.info('Exit')
return retval
class CleanUpMultusPlayBookLauncher(object):
def __init__(self):
pass
def launch_delete_flannel_interfaces(self, playbook, ip, host_name,
node_type,
network_name, src_pkg_path,
proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'node_type': node_type,
'networkName': network_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_cmd_subprocess(playbook, extra_var_str)
logger.info('Exit')
return retval
def launch_delete_weave_interface(self, playbook, ip, host_name, node_type,
network_name, src_pkg_path,
proxy_data_file):
extra_var_str = create_extra_var_str({
'ip': ip,
'host_name': host_name,
'node_type': node_type,
'networkName': network_name,
'SRC_PACKAGE_PATH': src_pkg_path,
'PROXY_DATA_FILE': proxy_data_file,
})
logger.info("Arguments are %s", extra_var_str)
retval = execute_system_cmd_subprocess(playbook, extra_var_str)
logger.info('Exit')
return retval
| 31.150198
| 79
| 0.663799
| 5,095
| 39,405
| 4.717174
| 0.06104
| 0.100524
| 0.138221
| 0.062453
| 0.851377
| 0.830573
| 0.801198
| 0.786594
| 0.769452
| 0.747691
| 0
| 0.000633
| 0.238041
| 39,405
| 1,264
| 80
| 31.174842
| 0.799833
| 0.061084
| 0
| 0.816737
| 0
| 0
| 0.147492
| 0.001339
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083686
| false
| 0.008475
| 0.003178
| 0
| 0.172669
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ce0adad7c652c9d4cc100096a0beda4b590fda0
| 1,379
|
py
|
Python
|
app/quinoas/migrations/0008_auto_20190926_0218.py
|
ZeroPaul/Agrain-US
|
afa4055eaae47d33d8bfeeee59647465a219cbd5
|
[
"Apache-2.0"
] | null | null | null |
app/quinoas/migrations/0008_auto_20190926_0218.py
|
ZeroPaul/Agrain-US
|
afa4055eaae47d33d8bfeeee59647465a219cbd5
|
[
"Apache-2.0"
] | null | null | null |
app/quinoas/migrations/0008_auto_20190926_0218.py
|
ZeroPaul/Agrain-US
|
afa4055eaae47d33d8bfeeee59647465a219cbd5
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-09-26 02:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('quinoas', '0007_sample'),
]
operations = [
migrations.AlterField(
model_name='sample',
name='broken_grain',
field=models.DecimalField(decimal_places=9, default=0.0, max_digits=9),
),
migrations.AlterField(
model_name='sample',
name='coated_grain',
field=models.DecimalField(decimal_places=9, default=0.0, max_digits=9),
),
migrations.AlterField(
model_name='sample',
name='damaged_grain',
field=models.DecimalField(decimal_places=9, default=0.0, max_digits=9),
),
migrations.AlterField(
model_name='sample',
name='germinated_grain',
field=models.DecimalField(decimal_places=9, default=0.0, max_digits=9),
),
migrations.AlterField(
model_name='sample',
name='immature_grain',
field=models.DecimalField(decimal_places=9, default=0.0, max_digits=9),
),
migrations.AlterField(
model_name='sample',
name='whole_grain',
field=models.DecimalField(decimal_places=9, default=0.0, max_digits=9),
),
]
| 31.340909
| 83
| 0.585207
| 147
| 1,379
| 5.319728
| 0.285714
| 0.153453
| 0.191816
| 0.222506
| 0.767263
| 0.767263
| 0.717391
| 0.717391
| 0.717391
| 0.717391
| 0
| 0.044284
| 0.295867
| 1,379
| 43
| 84
| 32.069767
| 0.761071
| 0.032632
| 0
| 0.648649
| 1
| 0
| 0.099099
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e8133842d217fc5be6a4717a7d86ebf492d27634
| 30,562
|
py
|
Python
|
commissioning/wfc3_uvis2_spec_18_cases.py
|
dobos/pysynphot
|
5d2e0b52ceda78890940ac9239c2d88e149e0bed
|
[
"BSD-3-Clause"
] | 24
|
2015-01-04T23:38:21.000Z
|
2022-02-01T00:11:07.000Z
|
commissioning/wfc3_uvis2_spec_18_cases.py
|
dobos/pysynphot
|
5d2e0b52ceda78890940ac9239c2d88e149e0bed
|
[
"BSD-3-Clause"
] | 126
|
2015-01-29T14:50:37.000Z
|
2022-02-15T01:58:13.000Z
|
commissioning/wfc3_uvis2_spec_18_cases.py
|
dobos/pysynphot
|
5d2e0b52ceda78890940ac9239c2d88e149e0bed
|
[
"BSD-3-Clause"
] | 25
|
2015-02-09T12:12:02.000Z
|
2021-09-09T13:06:54.000Z
|
from pytools import testutil
import sys
import basecase
class calcspecCase1(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,9230,0.0,4.1)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class countrateCase1(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="['uvsp2006.tab:0000', 'uvsp2006.tab:0001', 'uvsp2006.tab:0002', 'uvsp2006.tab:0003', 'uvsp2006.tab:0004', 'uvsp2006.tab:0005', 'uvsp2006.tab:0006', 'uvsp2006.tab:0007', 'uvsp2006.tab:0008', 'uvsp2006.tab:0009', 'uvsp2006.tab:0010', 'uvsp2006.tab:0011', 'uvsp2006.tab:0012', 'uvsp2006.tab:0013', 'uvsp2006.tab:0014', 'uvsp2006.tab:0015', 'uvsp2006.tab:0016', 'uvsp2006.tab:0017', 'uvsp2006.tab:0018', 'uvsp2006.tab:0019', 'uvsp2006.tab:0020', 'uvsp2006.tab:0021', 'uvsp2006.tab:0022', 'uvsp2006.tab:0023', 'uvsp2006.tab:0024', 'uvsp2006.tab:0025', 'uvsp2006.tab:0026', 'uvsp2006.tab:0027', 'uvsp2006.tab:0028', 'uvsp2006.tab:0029', 'uvsp2006.tab:0030', 'uvsp2006.tab:0031', 'uvsp2006.tab:0032', 'uvsp2006.tab:0033', 'uvsp2006.tab:0034', 'uvsp2006.tab:0035', 'uvsp2006.tab:0036', 'uvsp2006.tab:0037', 'uvsp2006.tab:0038', 'uvsp2006.tab:0039', 'uvsp2006.tab:0040', 'uvsp2006.tab:0041', 'uvsp2006.tab:0042', 'uvsp2006.tab:0043', 'uvsp2006.tab:0044', 'uvsp2006.tab:0045', 'uvsp2006.tab:0046', 'uvsp2006.tab:0047', 'uvsp2006.tab:0048', 'uvsp2006.tab:0049', 'uvsp2006.tab:0050', 'uvsp2006.tab:0051', 'uvsp2006.tab:0052', 'uvsp2006.tab:0053', 'uvsp2006.tab:0054', 'uvsp2006.tab:0055', 'uvsp2006.tab:0056', 'uvsp2006.tab:0057', 'uvsp2006.tab:0058', 'uvsp2006.tab:0059', 'uvsp2006.tab:0060', 'uvsp2006.tab:0061', 'uvsp2006.tab:0062', 'uvsp2006.tab:0063', 'uvsp2006.tab:0064', 'uvsp2006.tab:0065', 'uvsp2006.tab:0066', 'uvsp2006.tab:0067', 'uvsp2006.tab:0068', 'uvsp2006.tab:0069', 'uvsp2006.tab:0070', 'uvsp2006.tab:0071', 'uvsp2006.tab:0072', 'uvsp2006.tab:0073', 'uvsp2006.tab:0074', 'uvsp2006.tab:0075', 'uvsp2006.tab:0076', 'uvsp2006.tab:0077', 'uvsp2006.tab:0078', 'uvsp2006.tab:0079', 'uvsp2006.tab:0080', 'uvsp2006.tab:0081', 'uvsp2006.tab:0082', 'uvsp2006.tab:0083', 'uvsp2006.tab:0084', 'uvsp2006.tab:0085', 'uvsp2006.tab:0086', 'uvsp2006.tab:0087', 'uvsp2006.tab:0088', 'uvsp2006.tab:0089', 'uvsp2006.tab:0090', 'uvsp2006.tab:0091', 'uvsp2006.tab:0092', 'uvsp2006.tab:0093', 'uvsp2006.tab:0094', 'uvsp2006.tab:0095', 'uvsp2006.tab:0096', 'uvsp2006.tab:0097', 'uvsp2006.tab:0098', 'uvsp2006.tab:0099', 'uvsp2006.tab:0100', 'uvsp2006.tab:0101', 'uvsp2006.tab:0102', 'uvsp2006.tab:0103', 'uvsp2006.tab:0104', 'uvsp2006.tab:0105', 'uvsp2006.tab:0106', 'uvsp2006.tab:0107', 'uvsp2006.tab:0108', 'uvsp2006.tab:0109', 'uvsp2006.tab:0110', 'uvsp2006.tab:0111', 'uvsp2006.tab:0112', 'uvsp2006.tab:0113', 'uvsp2006.tab:0114', 'uvsp2006.tab:0115', 'uvsp2006.tab:0116', 'uvsp2006.tab:0117', 'uvsp2006.tab:0118', 'uvsp2006.tab:0119', 'uvsp2006.tab:0120', 'uvsp2006.tab:0121', 'uvsp2006.tab:0122', 'uvsp2006.tab:0123', 'uvsp2006.tab:0124', 'uvsp2006.tab:0125', 'uvsp2006.tab:0126', 'uvsp2006.tab:0127', 'uvsp2006.tab:0128', 'uvsp2006.tab:0129', 'uvsp2006.tab:0130', 'uvsp2006.tab:0131', 'uvsp2006.tab:0132', 'uvsp2006.tab:0133', 'uvsp2006.tab:0134', 'uvsp2006.tab:0135', 'uvsp2006.tab:0136', 'uvsp2006.tab:0137', 'uvsp2006.tab:0138', 'uvsp2006.tab:0139', 'uvsp2006.tab:0140', 'uvsp2006.tab:0141', 'uvsp2006.tab:0142', 'uvsp2006.tab:0143', 'uvsp2006.tab:0144', 'uvsp2006.tab:0145', 'uvsp2006.tab:0146', 'uvsp2006.tab:0147', 'uvsp2006.tab:0148', 'uvsp2006.tab:0149']"
self.setglobal(__file__)
self.runpy()
class calcphotCase1(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="['uvsp2006.tab:0000', 'uvsp2006.tab:0001', 'uvsp2006.tab:0002', 'uvsp2006.tab:0003', 'uvsp2006.tab:0004', 'uvsp2006.tab:0005', 'uvsp2006.tab:0006', 'uvsp2006.tab:0007', 'uvsp2006.tab:0008', 'uvsp2006.tab:0009', 'uvsp2006.tab:0010', 'uvsp2006.tab:0011', 'uvsp2006.tab:0012', 'uvsp2006.tab:0013', 'uvsp2006.tab:0014', 'uvsp2006.tab:0015', 'uvsp2006.tab:0016', 'uvsp2006.tab:0017', 'uvsp2006.tab:0018', 'uvsp2006.tab:0019', 'uvsp2006.tab:0020', 'uvsp2006.tab:0021', 'uvsp2006.tab:0022', 'uvsp2006.tab:0023', 'uvsp2006.tab:0024', 'uvsp2006.tab:0025', 'uvsp2006.tab:0026', 'uvsp2006.tab:0027', 'uvsp2006.tab:0028', 'uvsp2006.tab:0029', 'uvsp2006.tab:0030', 'uvsp2006.tab:0031', 'uvsp2006.tab:0032', 'uvsp2006.tab:0033', 'uvsp2006.tab:0034', 'uvsp2006.tab:0035', 'uvsp2006.tab:0036', 'uvsp2006.tab:0037', 'uvsp2006.tab:0038', 'uvsp2006.tab:0039', 'uvsp2006.tab:0040', 'uvsp2006.tab:0041', 'uvsp2006.tab:0042', 'uvsp2006.tab:0043', 'uvsp2006.tab:0044', 'uvsp2006.tab:0045', 'uvsp2006.tab:0046', 'uvsp2006.tab:0047', 'uvsp2006.tab:0048', 'uvsp2006.tab:0049', 'uvsp2006.tab:0050', 'uvsp2006.tab:0051', 'uvsp2006.tab:0052', 'uvsp2006.tab:0053', 'uvsp2006.tab:0054', 'uvsp2006.tab:0055', 'uvsp2006.tab:0056', 'uvsp2006.tab:0057', 'uvsp2006.tab:0058', 'uvsp2006.tab:0059', 'uvsp2006.tab:0060', 'uvsp2006.tab:0061', 'uvsp2006.tab:0062', 'uvsp2006.tab:0063', 'uvsp2006.tab:0064', 'uvsp2006.tab:0065', 'uvsp2006.tab:0066', 'uvsp2006.tab:0067', 'uvsp2006.tab:0068', 'uvsp2006.tab:0069', 'uvsp2006.tab:0070', 'uvsp2006.tab:0071', 'uvsp2006.tab:0072', 'uvsp2006.tab:0073', 'uvsp2006.tab:0074', 'uvsp2006.tab:0075', 'uvsp2006.tab:0076', 'uvsp2006.tab:0077', 'uvsp2006.tab:0078', 'uvsp2006.tab:0079', 'uvsp2006.tab:0080', 'uvsp2006.tab:0081', 'uvsp2006.tab:0082', 'uvsp2006.tab:0083', 'uvsp2006.tab:0084', 'uvsp2006.tab:0085', 'uvsp2006.tab:0086', 'uvsp2006.tab:0087', 'uvsp2006.tab:0088', 'uvsp2006.tab:0089', 'uvsp2006.tab:0090', 'uvsp2006.tab:0091', 'uvsp2006.tab:0092', 'uvsp2006.tab:0093', 'uvsp2006.tab:0094', 'uvsp2006.tab:0095', 'uvsp2006.tab:0096', 'uvsp2006.tab:0097', 'uvsp2006.tab:0098', 'uvsp2006.tab:0099', 'uvsp2006.tab:0100', 'uvsp2006.tab:0101', 'uvsp2006.tab:0102', 'uvsp2006.tab:0103', 'uvsp2006.tab:0104', 'uvsp2006.tab:0105', 'uvsp2006.tab:0106', 'uvsp2006.tab:0107', 'uvsp2006.tab:0108', 'uvsp2006.tab:0109', 'uvsp2006.tab:0110', 'uvsp2006.tab:0111', 'uvsp2006.tab:0112', 'uvsp2006.tab:0113', 'uvsp2006.tab:0114', 'uvsp2006.tab:0115', 'uvsp2006.tab:0116', 'uvsp2006.tab:0117', 'uvsp2006.tab:0118', 'uvsp2006.tab:0119', 'uvsp2006.tab:0120', 'uvsp2006.tab:0121', 'uvsp2006.tab:0122', 'uvsp2006.tab:0123', 'uvsp2006.tab:0124', 'uvsp2006.tab:0125', 'uvsp2006.tab:0126', 'uvsp2006.tab:0127', 'uvsp2006.tab:0128', 'uvsp2006.tab:0129', 'uvsp2006.tab:0130', 'uvsp2006.tab:0131', 'uvsp2006.tab:0132', 'uvsp2006.tab:0133', 'uvsp2006.tab:0134', 'uvsp2006.tab:0135', 'uvsp2006.tab:0136', 'uvsp2006.tab:0137', 'uvsp2006.tab:0138', 'uvsp2006.tab:0139', 'uvsp2006.tab:0140', 'uvsp2006.tab:0141', 'uvsp2006.tab:0142', 'uvsp2006.tab:0143', 'uvsp2006.tab:0144', 'uvsp2006.tab:0145', 'uvsp2006.tab:0146', 'uvsp2006.tab:0147', 'uvsp2006.tab:0148', 'uvsp2006.tab:0149']"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase1(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,9230,0.0,4.1),band(johnson,v),23.0,vegamag)"
self.subset=False
self.etcid="['uvsp2006.tab:0000', 'uvsp2006.tab:0002', 'uvsp2006.tab:0003', 'uvsp2006.tab:0004', 'uvsp2006.tab:0005', 'uvsp2006.tab:0007', 'uvsp2006.tab:0008', 'uvsp2006.tab:0009', 'uvsp2006.tab:0010', 'uvsp2006.tab:0012', 'uvsp2006.tab:0013', 'uvsp2006.tab:0014', 'uvsp2006.tab:0015', 'uvsp2006.tab:0017', 'uvsp2006.tab:0018', 'uvsp2006.tab:0019', 'uvsp2006.tab:0020', 'uvsp2006.tab:0022', 'uvsp2006.tab:0023', 'uvsp2006.tab:0024', 'uvsp2006.tab:0025', 'uvsp2006.tab:0027', 'uvsp2006.tab:0028', 'uvsp2006.tab:0029', 'uvsp2006.tab:0030', 'uvsp2006.tab:0031', 'uvsp2006.tab:0032', 'uvsp2006.tab:0033', 'uvsp2006.tab:0034', 'uvsp2006.tab:0035', 'uvsp2006.tab:0036', 'uvsp2006.tab:0037', 'uvsp2006.tab:0038', 'uvsp2006.tab:0039', 'uvsp2006.tab:0040', 'uvsp2006.tab:0041', 'uvsp2006.tab:0042', 'uvsp2006.tab:0043', 'uvsp2006.tab:0044', 'uvsp2006.tab:0045', 'uvsp2006.tab:0046', 'uvsp2006.tab:0047', 'uvsp2006.tab:0048', 'uvsp2006.tab:0049', 'uvsp2006.tab:0050', 'uvsp2006.tab:0051', 'uvsp2006.tab:0052', 'uvsp2006.tab:0053', 'uvsp2006.tab:0054', 'uvsp2006.tab:0055', 'uvsp2006.tab:0056', 'uvsp2006.tab:0057', 'uvsp2006.tab:0058', 'uvsp2006.tab:0059', 'uvsp2006.tab:0060', 'uvsp2006.tab:0061', 'uvsp2006.tab:0062', 'uvsp2006.tab:0063', 'uvsp2006.tab:0064', 'uvsp2006.tab:0065', 'uvsp2006.tab:0066', 'uvsp2006.tab:0067', 'uvsp2006.tab:0068', 'uvsp2006.tab:0069', 'uvsp2006.tab:0070', 'uvsp2006.tab:0071', 'uvsp2006.tab:0072', 'uvsp2006.tab:0073', 'uvsp2006.tab:0074', 'uvsp2006.tab:0075', 'uvsp2006.tab:0076', 'uvsp2006.tab:0077', 'uvsp2006.tab:0078', 'uvsp2006.tab:0079', 'uvsp2006.tab:0080', 'uvsp2006.tab:0081', 'uvsp2006.tab:0082', 'uvsp2006.tab:0083', 'uvsp2006.tab:0084', 'uvsp2006.tab:0085', 'uvsp2006.tab:0086', 'uvsp2006.tab:0087', 'uvsp2006.tab:0088', 'uvsp2006.tab:0089', 'uvsp2006.tab:0090', 'uvsp2006.tab:0091', 'uvsp2006.tab:0092', 'uvsp2006.tab:0093', 'uvsp2006.tab:0094', 'uvsp2006.tab:0095', 'uvsp2006.tab:0096', 'uvsp2006.tab:0097', 'uvsp2006.tab:0098', 'uvsp2006.tab:0099', 'uvsp2006.tab:0100', 'uvsp2006.tab:0101']"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase2(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,9230,0.0,4.1),band(johnson,v),18.0,vegamag)"
self.subset=True
self.etcid="['uvsp2006.tab:0001', 'uvsp2006.tab:0006', 'uvsp2006.tab:0011', 'uvsp2006.tab:0016', 'uvsp2006.tab:0021', 'uvsp2006.tab:0026']"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase4(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,9230,0.0,4.1),band(johnson,v),17.0,vegamag)"
self.subset=False
self.etcid="['uvsp2006.tab:0102', 'uvsp2006.tab:0103', 'uvsp2006.tab:0104', 'uvsp2006.tab:0105', 'uvsp2006.tab:0106', 'uvsp2006.tab:0107', 'uvsp2006.tab:0108', 'uvsp2006.tab:0109', 'uvsp2006.tab:0110', 'uvsp2006.tab:0111', 'uvsp2006.tab:0112', 'uvsp2006.tab:0113', 'uvsp2006.tab:0114', 'uvsp2006.tab:0115', 'uvsp2006.tab:0116', 'uvsp2006.tab:0117', 'uvsp2006.tab:0118', 'uvsp2006.tab:0119', 'uvsp2006.tab:0120', 'uvsp2006.tab:0121', 'uvsp2006.tab:0122', 'uvsp2006.tab:0123', 'uvsp2006.tab:0124', 'uvsp2006.tab:0125']"
self.setglobal(__file__)
self.runpy()
class calcspecCase127(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,30000,0.0,4.0)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase5(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,30000,0.0,4.0)*ebmvx(0.04,gal1),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0126"
self.setglobal(__file__)
self.runpy()
class calcspecCase128(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,25400,0.0,3.9)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase6(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,25400,0.0,3.9)*ebmvx(0.08,gal1),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class calcspecCase129(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,18700,0.0,3.9)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase7(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,18700,0.0,3.9)*ebmvx(0.12,gal3),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0128"
self.setglobal(__file__)
self.runpy()
class calcspecCase130(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,15400,0.0,3.9)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase8(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,15400,0.0,3.9)*ebmvx(0.16,smc),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0129"
self.setglobal(__file__)
self.runpy()
class calcspecCase131(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,11900,0.0,4.0)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase9(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,11900,0.0,4.0)*ebmvx(0.2,lmc),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0130"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase10(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(icat(k93models,9230,0.0,4.1)*ebmvx(0.24,xgal),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0131"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase11(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_1.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.04,gal1)"
self.subset=False
self.etcid="uvsp2006.tab:0132"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase12(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_1.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.08,gal1)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase13(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_2.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.12,gal3)"
self.subset=False
self.etcid="uvsp2006.tab:0134"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase14(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_2.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.16,smc)"
self.subset=False
self.etcid="uvsp2006.tab:0135"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase15(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_3.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.2,lmc)"
self.subset=False
self.etcid="uvsp2006.tab:0136"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase16(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_4.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.24,xgal)"
self.subset=False
self.etcid="uvsp2006.tab:0137"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase17(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_5.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.04,gal1)"
self.subset=False
self.etcid="uvsp2006.tab:0138"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase18(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_5.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.08,gal1)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase19(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_6.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.12,gal3)"
self.subset=False
self.etcid="uvsp2006.tab:0140"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase20(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_10.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.16,smc)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase21(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_11.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.2,lmc)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase22(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_12.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.24,xgal)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase23(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_9.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.04,gal1)"
self.subset=False
self.etcid="uvsp2006.tab:0144"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase24(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(spec(/grp/hst/cdbs//grid/pickles/dat_uvk/pickles_uk_14.fits),band(cousins,i),23.0,vegamag)*ebmvx(0.08,gal1)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase25(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(z(spec(/grp/hst/cdbs//calspec/g191b2b_mod_004.fits),0.05),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0146"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase26(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(z(spec(/grp/hst/cdbs//calspec/gd153_mod_004.fits),0.1),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0147"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase27(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(z(spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits),0.15),band(johnson,b),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0148"
self.setglobal(__file__)
self.runpy()
class calcspecCase133(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="bb(10000)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase28(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(bb(10000),band(johnson,u),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0149"
self.setglobal(__file__)
self.runpy()
class countrateCase3(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.1,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=True
self.etcid="uvsp2006.tab:0150"
self.setglobal(__file__)
self.runpy()
class calcphotCase3(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.1,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=True
self.etcid="uvsp2006.tab:0150"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase29(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(bb(10000),band(johnson,r),23.0,vegamag)"
self.subset=True
self.etcid="uvsp2006.tab:0150"
self.setglobal(__file__)
self.runpy()
class countrateCase4(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),23.3,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0151"
self.setglobal(__file__)
self.runpy()
class calcphotCase4(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),23.3,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0151"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase30(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(bb(10000),band(johnson,i),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0151"
self.setglobal(__file__)
self.runpy()
class countrateCase5(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),21.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0152"
self.setglobal(__file__)
self.runpy()
class calcphotCase5(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),21.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0152"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase31(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(bb(10000),band(johnson,j),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0152"
self.setglobal(__file__)
self.runpy()
class countrateCase6(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.424602593467696,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0153"
self.setglobal(__file__)
self.runpy()
class calcphotCase6(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.424602593467696,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0153"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase32(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(bb(10000),band(johnson,k),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0153"
self.setglobal(__file__)
self.runpy()
class countrateCase7(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*0.5+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0154"
self.setglobal(__file__)
self.runpy()
class calcphotCase7(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*0.5+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0154"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase33(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(bb(10000),band(cousins,r),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0154"
self.setglobal(__file__)
self.runpy()
class calcspecCase139(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="pl(4000.0,-2.0,flam)"
self.subset=True
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class countrateCase8(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*2.0+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0155"
self.setglobal(__file__)
self.runpy()
class calcphotCase8(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)*2.0+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0155"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase34(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(pl(4000.0,-2.0,flam),band(bessell,h),23.0,vegamag)"
self.subset=True
self.etcid="uvsp2006.tab:0155"
self.setglobal(__file__)
self.runpy()
class countrateCase9(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0156"
self.setglobal(__file__)
self.runpy()
class calcphotCase9(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="spec(earthshine.fits)+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0156"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase35(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(pl(4000.0,-2.0,flam),band(Bessell,j),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0156"
self.setglobal(__file__)
self.runpy()
class countrateCase10(basecase.countrateCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0157"
self.setglobal(__file__)
self.runpy()
class calcphotCase10(basecase.calcphotCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280,bkg"
self.spectrum="rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="uvsp2006.tab:0157"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase36(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="wfc3,uvis2,g280"
self.spectrum="rn(pl(4000.0,-2.0,flam),band(bessell,k),23.0,vegamag)"
self.subset=False
self.etcid="uvsp2006.tab:0157"
self.setglobal(__file__)
self.runpy()
if __name__ == '__main__':
if 'debug' in sys.argv:
testutil.debug(__name__)
else:
testutil.testall(__name__,2)
#calcspec:141 - 133 dup =8
#thermback:0 - 0 dup =0
#calcphot:10 - 1 dup =9
#countrate:10 - 1 dup =9
#SpecSourcerateSpec:36 - 1 dup =35
| 60.518812
| 3,171
| 0.683529
| 4,065
| 30,562
| 5.063223
| 0.086101
| 0.249587
| 0.035565
| 0.04742
| 0.922699
| 0.918375
| 0.887572
| 0.882179
| 0.882179
| 0.874745
| 0
| 0.188869
| 0.139847
| 30,562
| 504
| 3,172
| 60.638889
| 0.594073
| 0.00409
| 0
| 0.733871
| 0
| 0.104839
| 0.536113
| 0.1898
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122984
| false
| 0
| 0.006048
| 0
| 0.252016
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e814ecb99afd9bd86775f35c4341ec2f82877045
| 79
|
py
|
Python
|
projections/hpd/__init__.py
|
ricardog/raster-project
|
37d508ca329d31d4b1d21614371596f4c1bca526
|
[
"Apache-2.0"
] | 1
|
2018-02-23T14:26:17.000Z
|
2018-02-23T14:26:17.000Z
|
projections/hpd/__init__.py
|
NaturalHistoryMuseum/raster-project
|
319a0f633de8cf2317eba5d82396036f01ce5262
|
[
"Apache-2.0"
] | null | null | null |
projections/hpd/__init__.py
|
NaturalHistoryMuseum/raster-project
|
319a0f633de8cf2317eba5d82396036f01ce5262
|
[
"Apache-2.0"
] | 1
|
2017-10-11T15:49:18.000Z
|
2017-10-11T15:49:18.000Z
|
from .wpp import WPP
from . import wpp
from . import sps
from . import hyde
| 13.166667
| 20
| 0.708861
| 13
| 79
| 4.307692
| 0.384615
| 0.535714
| 0.464286
| 0.678571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240506
| 79
| 5
| 21
| 15.8
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e816253affce8cbd29efa21f26f26fc5579b460b
| 6,139
|
py
|
Python
|
tests/integration/test_log_events_it.py
|
corylevine/okta-sdk-python
|
c86b8fdc4525e84199143c27213c0aebc6b2af8f
|
[
"Apache-2.0"
] | 145
|
2017-06-13T21:54:04.000Z
|
2022-02-25T05:44:34.000Z
|
tests/integration/test_log_events_it.py
|
corylevine/okta-sdk-python
|
c86b8fdc4525e84199143c27213c0aebc6b2af8f
|
[
"Apache-2.0"
] | 146
|
2017-06-02T17:46:12.000Z
|
2022-03-29T15:52:15.000Z
|
tests/integration/test_log_events_it.py
|
corylevine/okta-sdk-python
|
c86b8fdc4525e84199143c27213c0aebc6b2af8f
|
[
"Apache-2.0"
] | 98
|
2017-06-27T03:44:51.000Z
|
2022-03-23T04:58:18.000Z
|
import pytest
from tests.mocks import MockOktaClient
import okta.models as models
from okta.constants import DATETIME_FORMAT
import datetime as dt
class TestLogEventsResource:
"""
Integration Tests for the Log Events Resource
"""
SDK_PREFIX = "python_sdk"
@pytest.mark.asyncio
@pytest.mark.skip
async def test_get_logs(self):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Bookmark Application Object
APP_URL = "https://example.com/bookmark.htm"
APP_LABEL = "AddBookmarkApp-GetLogs"
app_settings_app = models.BookmarkApplicationSettingsApplication({
"requestIntegration": False,
"url": APP_URL
})
app_settings = models.BookmarkApplicationSettings({
"app": app_settings_app
})
bookmark_app_obj = models.BookmarkApplication({
"label": APP_LABEL,
"settings": app_settings
})
try:
# Create App in org
app, _, err = await client.create_application(bookmark_app_obj)
assert err is None
assert isinstance(app, models.Application)
assert isinstance(app, models.BookmarkApplication)
logs, _, err = await client.get_logs()
assert err is None
assert logs is not None
assert isinstance(logs, list)
if logs[0]:
assert isinstance(logs[0], models.LogEvent)
finally:
errors = []
# Deactivate & Delete created app
try:
_, err = await client.deactivate_application(app.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_application(app.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.asyncio
@pytest.mark.skip
async def test_get_logs_polling(self):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Bookmark Application Object
APP_URL = "https://example.com/bookmark.htm"
APP_LABEL = "AddBookmarkApp-GetLogs"
app_settings_app = models.BookmarkApplicationSettingsApplication({
"requestIntegration": False,
"url": APP_URL
})
app_settings = models.BookmarkApplicationSettings({
"app": app_settings_app
})
bookmark_app_obj = models.BookmarkApplication({
"label": APP_LABEL,
"settings": app_settings
})
try:
# Create App in org
app, _, err = await client.create_application(bookmark_app_obj)
assert err is None
assert isinstance(app, models.Application)
assert isinstance(app, models.BookmarkApplication)
now = dt.datetime.now(dt.timezone.utc)
now = now.replace(microsecond=0)
one_minute_after = now + dt.timedelta(minutes=1)
log_query_params = {
"sortOrder": "ASCENDING",
"until": one_minute_after.strftime(DATETIME_FORMAT)
}
logs, _, err = await client.get_logs(log_query_params)
assert err is None
assert logs is not None
assert isinstance(logs, list)
if logs[0]:
assert isinstance(logs[0], models.LogEvent)
finally:
errors = []
# Deactivate & Delete created app
try:
_, err = await client.deactivate_application(app.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_application(app.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.asyncio
@pytest.mark.skip
async def test_get_logs_bounded(self):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Bookmark Application Object
APP_URL = "https://example.com/bookmark.htm"
APP_LABEL = "AddBookmarkApp-GetLogs"
app_settings_app = models.BookmarkApplicationSettingsApplication({
"requestIntegration": False,
"url": APP_URL
})
app_settings = models.BookmarkApplicationSettings({
"app": app_settings_app
})
bookmark_app_obj = models.BookmarkApplication({
"label": APP_LABEL,
"settings": app_settings
})
try:
# Create App in org
app, _, err = await client.create_application(bookmark_app_obj)
assert err is None
assert isinstance(app, models.Application)
assert isinstance(app, models.BookmarkApplication)
# Retrieve logs
now = dt.datetime.now(dt.timezone.utc)
now = now.replace(microsecond=0)
one_hour_before = now - dt.timedelta(hours=1)
log_query_params = {
"since": one_hour_before.strftime(DATETIME_FORMAT),
"until": now.strftime(DATETIME_FORMAT)
}
logs, _, err = await client.get_logs(log_query_params)
assert err is None
assert logs is not None
assert isinstance(logs, list)
if logs[0]:
assert isinstance(logs[0], models.LogEvent)
finally:
errors = []
# Deactivate & Delete created app
try:
_, err = await client.deactivate_application(app.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_application(app.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
| 33.917127
| 75
| 0.568659
| 610
| 6,139
| 5.565574
| 0.17377
| 0.038881
| 0.049485
| 0.053019
| 0.879234
| 0.879234
| 0.87187
| 0.87187
| 0.87187
| 0.87187
| 0
| 0.003286
| 0.355595
| 6,139
| 180
| 76
| 34.105556
| 0.854904
| 0.06304
| 0
| 0.868056
| 0
| 0
| 0.055206
| 0.01153
| 0
| 0
| 0
| 0
| 0.208333
| 1
| 0
| false
| 0
| 0.034722
| 0
| 0.048611
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98e0ade28f70fe684689fc3177164d2be029204d
| 4,659
|
py
|
Python
|
tests/api/v1/test_flags.py
|
xmsec/LanCTFD
|
77659cca2aae4ab68cf25ddb5a7cbe63f597a9af
|
[
"Apache-2.0"
] | 3
|
2020-05-13T14:02:11.000Z
|
2022-03-12T08:09:34.000Z
|
tests/api/v1/test_flags.py
|
xmsec/LanCTFD
|
77659cca2aae4ab68cf25ddb5a7cbe63f597a9af
|
[
"Apache-2.0"
] | 6
|
2019-01-26T15:06:07.000Z
|
2019-02-11T01:48:20.000Z
|
tests/api/v1/test_flags.py
|
xmsec/LanCTFD
|
77659cca2aae4ab68cf25ddb5a7cbe63f597a9af
|
[
"Apache-2.0"
] | 4
|
2019-08-01T02:16:44.000Z
|
2022-03-12T08:09:35.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from tests.helpers import *
def test_api_flags_get_non_admin():
"""Can a user get /api/v1/flags if not admin"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/flags', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_flags_get_admin():
"""Can a user get /api/v1/flags if admin"""
app = create_ctfd()
with app.app_context():
with login_as_user(app, 'admin') as client:
r = client.get('/api/v1/flags', json="")
assert r.status_code == 200
destroy_ctfd(app)
def test_api_flags_post_non_admin():
"""Can a user post /api/v1/flags if not admin"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.post('/api/v1/flags')
assert r.status_code == 403
destroy_ctfd(app)
def test_api_flags_post_admin():
"""Can a user post /api/v1/flags if admin"""
app = create_ctfd()
with app.app_context():
gen_challenge(app.db)
with login_as_user(app, name="admin") as client:
r = client.post('/api/v1/flags', json={"content": "flag",
"type": "static",
"challenge": 1})
assert r.status_code == 200
destroy_ctfd(app)
def test_api_flag_types_get_non_admin():
"""Can a user get /api/v1/flags/types[/<type_name>] if not admin"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/flags/types', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_flag_types_get_admin():
"""Can a user get /api/v1/flags/types[/<type_name>] if admin"""
app = create_ctfd()
with app.app_context():
with login_as_user(app, 'admin') as client:
r = client.get('/api/v1/flags/types', json="")
assert r.status_code == 200
r = client.get('/api/v1/flags/types/static', json="")
assert r.status_code == 200
destroy_ctfd(app)
def test_api_flag_get_non_admin():
"""Can a user get /api/v1/flags/<flag_id> if not admin"""
app = create_ctfd()
with app.app_context():
with app.test_client() as client:
r = client.get('/api/v1/flags/1', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_flag_get_admin():
"""Can a user get /api/v1/flags/<flag_id> if admin"""
app = create_ctfd()
with app.app_context():
gen_challenge(app.db)
gen_flag(app.db, 1)
with login_as_user(app, 'admin') as client:
r = client.get('/api/v1/flags/1', json="")
assert r.status_code == 200
destroy_ctfd(app)
def test_api_flag_patch_non_admin():
"""Can a user patch /api/v1/flags/<flag_id> if not admin"""
app = create_ctfd()
with app.app_context():
gen_challenge(app.db)
gen_flag(app.db, 1)
with app.test_client() as client:
r = client.patch('/api/v1/flags/1', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_flag_patch_admin():
"""Can a user patch /api/v1/flags/<flag_id> if admin"""
app = create_ctfd()
with app.app_context():
gen_challenge(app.db)
gen_flag(app.db, 1)
with login_as_user(app, 'admin') as client:
r = client.patch('/api/v1/flags/1', json={
"content": "flag_edit",
"data": "",
"type": "static",
"id": "1"})
assert r.status_code == 200
assert r.get_json()['data']['content'] == "flag_edit"
destroy_ctfd(app)
def test_api_flag_delete_non_admin():
"""Can a user delete /api/v1/flags/<flag_id> if not admin"""
app = create_ctfd()
with app.app_context():
gen_challenge(app.db)
gen_flag(app.db, 1)
with app.test_client() as client:
r = client.delete('/api/v1/flags/1', json="")
assert r.status_code == 403
destroy_ctfd(app)
def test_api_flag_delete_admin():
"""Can a user patch /api/v1/flags/<flag_id> if admin"""
app = create_ctfd()
with app.app_context():
gen_challenge(app.db)
gen_flag(app.db, 1)
with login_as_user(app, 'admin') as client:
r = client.delete('/api/v1/flags/1', json="")
assert r.status_code == 200
assert r.get_json().get('data') is None
destroy_ctfd(app)
| 31.910959
| 71
| 0.578021
| 672
| 4,659
| 3.790179
| 0.08631
| 0.049077
| 0.098155
| 0.066353
| 0.931684
| 0.908127
| 0.906164
| 0.896349
| 0.854731
| 0.808795
| 0
| 0.02318
| 0.277742
| 4,659
| 145
| 72
| 32.131034
| 0.73373
| 0.135866
| 0
| 0.764151
| 0
| 0
| 0.081483
| 0.006559
| 0
| 0
| 0
| 0
| 0.141509
| 1
| 0.113208
| false
| 0
| 0.009434
| 0
| 0.122642
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7aec9d440e7eb04c80f684216487d067bb8d3a2
| 4,760
|
py
|
Python
|
src/wavestate/iirrational/fitters_ZPK/codings_cmn/base.py
|
wavestate/wavestate-iirrational
|
01d6dba8b2131fa2a099a74f17e6540f30cee606
|
[
"Apache-2.0"
] | null | null | null |
src/wavestate/iirrational/fitters_ZPK/codings_cmn/base.py
|
wavestate/wavestate-iirrational
|
01d6dba8b2131fa2a099a74f17e6540f30cee606
|
[
"Apache-2.0"
] | null | null | null |
src/wavestate/iirrational/fitters_ZPK/codings_cmn/base.py
|
wavestate/wavestate-iirrational
|
01d6dba8b2131fa2a099a74f17e6540f30cee606
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology.
# SPDX-FileCopyrightText: © 2021 Lee McCuller <mcculler@mit.edu>
# NOTICE: authors should document their contributions in concisely in NOTICE
# with details inline in source files, comments, and docstrings.
"""
"""
import numpy as np
from ... import TFmath
Ipi = np.pi * 1j
I2pi = np.pi * 2j
class BranchCutAmbiguity(Exception):
pass
class EmptyCopy(object):
pass
class CodingType(object):
disable = False
coding_id = None
gain_effect = 1
def __init__(self, sys):
# preserved through deepcopy so that codings_ignore set can be maintained
self.coding_id = id(self)
self.sys = sys
def clone(self, sys):
new = EmptyCopy()
d = dict(self.__dict__)
d["sys"] = sys
new.__dict__.update(d)
new.__class__ = self.__class__
return new
def roots(self):
rc = self.roots_c()
return self.roots_r() + rc + [r.conjugate() for r in rc]
def roots_r(self):
return []
def roots_c(self):
return []
def roots_Sf(self):
return self.roots()
def roots_r_Sf(self):
return self.roots_r()
def roots_c_Sf(self):
return self.roots_c()
def update_roots_Sf(self, *rs):
return self.update_roots(*rs)
def option_set(self, **kwargs):
return
def transfer_abs_sq(self):
# real/imaginary part of root
return TFmath.abs_sq(self.transfer())
def derivative_wtrans(self):
return self.transfer(), self.derivative()
def derivative_abs_sq_wtrans(self):
xfer = self.transfer()
jac = self.derivative()
jac_abs_sq = []
for der in jac:
jac_abs_sq.append(2 * (der.real * xfer.real + der.imag * xfer.imag))
return TFmath.abs_sq(xfer), jac_abs_sq
@property
def derivative_deadzoned(self):
return False
class CodingTypeZ(object):
disable = False
coding_id = None
gain_effect = 1
def __init__(self, sys):
# preserved through deepcopy so that codings_ignore set can be maintained
self.coding_id = id(self)
self.sys = sys
def clone(self, sys):
new = EmptyCopy()
d = dict(self.__dict__)
d["sys"] = sys
new.__dict__.update(d)
new.__class__ = self.__class__
return new
def roots(self):
rc = self.roots_c()
return self.roots_r() + rc + [r.conjugate() for r in rc]
def roots_r(self):
return []
def roots_c(self):
return []
def roots_Sf(self):
rs = []
for r in self.roots():
if r.imag == 0:
if r.real > 0:
r_Sf = (r.real - 1) * self.sys.F_nyquist_Hz
else:
raise BranchCutAmbiguity()
else:
F_Hz = np.angle(r) / np.pi * self.sys.F_nyquist_Hz
amp = abs(r)
BW = (amp - 1) * self.sys.F_nyquist_Hz
r_Sf = BW + 1j * F_Hz
rs.append(r_Sf)
return rs
def roots_r_Sf(self):
rs = []
for r in self.roots_r():
if r.real > 0:
r_Sf = (r.real - 1) * self.sys.F_nyquist_Hz
else:
raise BranchCutAmbiguity()
rs.append(r_Sf)
return rs
def roots_c_Sf(self):
rs = []
for r in self.roots_c():
F_Hz = np.angle(r) / np.pi * self.sys.F_nyquist_Hz
amp = abs(r)
BW = (amp - 1) * self.sys.F_nyquist_Hz
r_Sf = BW + 1j * F_Hz
rs.append(r_Sf)
return rs
def update_roots_Sf(self, *rs):
rZs = []
for r in rs:
F_Hz = r.imag
if F_Hz > self.sys.F_nyquist_Hz:
raise BranchCutAmbiguity()
amp = 1 + r.real / self.sys.F_nyquist_Hz
rZ = amp * np.exp(F_Hz / self.sys.F_nyquist_Hz * np.pi * 1j)
rZs.append(rZ)
return self.update_roots(*rZs)
def option_set(self, **kwargs):
return
def transfer_abs_sq(self):
# real/imaginary part of root
return TFmath.abs_sq(self.transfer())
def derivative_wtrans(self):
return self.transfer(), self.derivative()
def derivative_abs_sq_wtrans(self):
xfer = self.transfer()
jac = self.derivative()
jac_abs_sq = []
for der in jac:
jac_abs_sq.append(2 * (der.real * xfer.real + der.imag * xfer.imag))
return TFmath.abs_sq(xfer), jac_abs_sq
@property
def derivative_deadzoned(self):
return False
| 25.185185
| 81
| 0.562185
| 639
| 4,760
| 3.971831
| 0.195618
| 0.041371
| 0.028369
| 0.053191
| 0.777778
| 0.740741
| 0.724586
| 0.708826
| 0.671395
| 0.671395
| 0
| 0.00912
| 0.331933
| 4,760
| 188
| 82
| 25.319149
| 0.788365
| 0.115336
| 0
| 0.819549
| 0
| 0
| 0.001431
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0.015038
| 0.015038
| 0.120301
| 0.496241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
c7f2cd35d36cab9d3a0aca92b34aab1f128634a4
| 71,247
|
py
|
Python
|
Util/metrics.py
|
LamannaLeonardo/OLAM
|
7a6611912ebb40d39a934dd454efec4cbb7913d3
|
[
"MIT"
] | null | null | null |
Util/metrics.py
|
LamannaLeonardo/OLAM
|
7a6611912ebb40d39a934dd454efec4cbb7913d3
|
[
"MIT"
] | null | null | null |
Util/metrics.py
|
LamannaLeonardo/OLAM
|
7a6611912ebb40d39a934dd454efec4cbb7913d3
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2022, Leonardo Lamanna
# All rights reserved.
# This source code is licensed under the MIT-style license found in the
# LICENSE file in the root directory of this source tree.
import os
import re
from collections import defaultdict
import Configuration
def action_model_statistics():
real_precs_size, learned_precs_size = action_model_preconditions_size()
real_eff_pos_size, learned_eff_pos_size = action_model_eff_pos_size()
real_eff_neg_size, learned_eff_neg_size = action_model_eff_neg_size()
ins_pre, del_pre = action_model_preconditions_statistics()
ins_eff_pos, del_eff_pos = action_model_eff_pos_statistics()
ins_eff_neg, del_eff_neg = action_model_eff_neg_statistics()
precs_recall = action_model_prec_recall()
eff_pos_recall = action_model_eff_pos_recall()
eff_neg_recall = action_model_eff_neg_recall()
precs_precision = action_model_prec_precision()
eff_pos_precision = action_model_eff_pos_precision()
eff_neg_precision = action_model_eff_neg_precision()
overall_recall = action_model_overall_recall()
overall_precision = action_model_overall_precision()
return real_precs_size, learned_precs_size, real_eff_pos_size, learned_eff_pos_size, \
real_eff_neg_size, learned_eff_neg_size, ins_pre, del_pre, ins_eff_pos, del_eff_pos, \
ins_eff_neg, del_eff_neg, precs_recall, eff_pos_recall, eff_neg_recall, precs_precision, \
eff_pos_precision, eff_neg_precision, overall_recall, overall_precision
def action_model_statistics_with_uncertain_neg(uncert_neg_effects):
real_precs_size, learned_precs_size = action_model_preconditions_size()
real_eff_pos_size, learned_eff_pos_size = action_model_eff_pos_size()
real_eff_neg_size, learned_eff_neg_size = action_model_eff_neg_size()
uncert_neg_eff_count = 0
for k,v in uncert_neg_effects.items():
uncert_neg_eff_count += len(v)
learned_eff_neg_size += uncert_neg_eff_count
ins_pre, del_pre = action_model_preconditions_statistics()
ins_eff_pos, del_eff_pos = action_model_eff_pos_statistics()
ins_eff_neg, del_eff_neg = action_model_eff_neg_statistics_with_uncertain(uncert_neg_effects)
precs_recall = action_model_prec_recall()
eff_pos_recall = action_model_eff_pos_recall()
eff_neg_recall = action_model_eff_neg_recall_with_uncertain(uncert_neg_effects)
precs_precision = action_model_prec_precision()
eff_pos_precision = action_model_eff_pos_precision()
eff_neg_precision = action_model_eff_neg_precision_with_uncertain(uncert_neg_effects)
overall_recall = action_model_overall_recall_with_uncertain_neg(uncert_neg_effects)
overall_precision = action_model_overall_precision_with_uncertain_neg(uncert_neg_effects)
return real_precs_size, learned_precs_size, real_eff_pos_size, learned_eff_pos_size, \
real_eff_neg_size, learned_eff_neg_size, ins_pre, del_pre, ins_eff_pos, del_eff_pos, \
ins_eff_neg, del_eff_neg, precs_recall, eff_pos_recall, eff_neg_recall, precs_precision, \
eff_pos_precision, eff_neg_precision, overall_recall, overall_precision
def action_model_prec_recall():
tp_precs, fp_precs, fn_precs = action_model_preconditions_predictions()
if (tp_precs + fn_precs) == 0:
return 0
return tp_precs / (tp_precs + fn_precs)
def action_model_prec_precision():
tp_precs, fp_precs, fn_precs = action_model_preconditions_predictions()
if (tp_precs + fp_precs) == 0:
return 0
return tp_precs / (tp_precs + fp_precs)
def action_model_eff_recall():
tp_eff, fp_eff, fn_eff = action_model_eff_predictions()
if (tp_eff + fn_eff) == 0:
return 0
return tp_eff / (tp_eff + fn_eff)
def action_model_eff_pos_recall():
real_eff_pos_size, learned_eff_pos_size = action_model_eff_pos_size()
if real_eff_pos_size == 0:
return 1
tp_eff_pos, fp_eff_pos, fn_eff_pos = action_model_eff_pos_predictions()
if (tp_eff_pos + fn_eff_pos) == 0:
return 0
return tp_eff_pos / (tp_eff_pos + fn_eff_pos)
def action_model_eff_neg_recall():
real_eff_neg_size, learned_eff_neg_size = action_model_eff_neg_size()
if real_eff_neg_size == 0:
return 1
tp_eff_neg, fp_eff_neg, fn_eff_neg = action_model_eff_neg_predictions()
if (tp_eff_neg + fn_eff_neg) == 0:
return 0
return tp_eff_neg / (tp_eff_neg + fn_eff_neg)
def action_model_eff_neg_recall_with_uncertain(uncert_neg_eff):
real_eff_neg_size, learned_eff_neg_size = action_model_eff_neg_size()
for k,v in uncert_neg_eff.items():
learned_eff_neg_size += len(v)
if real_eff_neg_size == 0:
return 1
tp_eff_neg, fp_eff_neg, fn_eff_neg = action_model_eff_neg_predictions_with_uncert(uncert_neg_eff)
if (tp_eff_neg + fn_eff_neg) == 0:
return 0
return tp_eff_neg / (tp_eff_neg + fn_eff_neg)
def action_model_eff_pos_precision():
real_eff_pos_size, learned_eff_pos_size = action_model_eff_pos_size()
if real_eff_pos_size == 0:
return 1
tp_eff_pos, fp_eff_pos, fn_eff_pos = action_model_eff_pos_predictions()
if (tp_eff_pos + fp_eff_pos) == 0:
return 0
return tp_eff_pos / (tp_eff_pos + fp_eff_pos)
def action_model_eff_neg_precision():
real_eff_neg_size, learned_eff_neg_size = action_model_eff_neg_size()
if real_eff_neg_size == 0:
return 1
tp_eff_neg, fp_eff_neg, fn_eff_neg = action_model_eff_neg_predictions()
if (tp_eff_neg + fp_eff_neg) == 0:
return 0
return tp_eff_neg / (tp_eff_neg + fp_eff_neg)
def action_model_eff_neg_precision_with_uncertain(uncert_neg_eff):
real_eff_neg_size, learned_eff_neg_size = action_model_eff_neg_size()
for k,v in uncert_neg_eff.items():
learned_eff_neg_size += len(v)
if real_eff_neg_size == 0:
return 1
tp_eff_neg, fp_eff_neg, fn_eff_neg = action_model_eff_neg_predictions_with_uncert(uncert_neg_eff)
if (tp_eff_neg + fp_eff_neg) == 0:
return 0
return tp_eff_neg / (tp_eff_neg + fp_eff_neg)
def action_model_eff_precision():
tp_eff, fp_eff, fn_eff = action_model_eff_predictions()
if (tp_eff + fp_eff) == 0:
return 0
return tp_eff / (tp_eff + fp_eff)
def action_model_overall_precision():
tp_eff, fp_eff, fn_eff = action_model_eff_predictions()
tp_precs, fp_precs, fn_precs = action_model_preconditions_predictions()
all_tp = tp_precs + tp_eff
all_fp = fp_eff + fp_precs
all_fn = fn_eff + fn_precs
if (all_tp + all_fp) == 0:
return 0
return all_tp / (all_tp + all_fp)
def action_model_overall_precision_with_uncertain_neg(uncert_neg_eff):
tp_eff, fp_eff, fn_eff = action_model_eff_predictions_with_uncertain_neg(uncert_neg_eff)
tp_precs, fp_precs, fn_precs = action_model_preconditions_predictions()
all_tp = tp_precs + tp_eff
all_fp = fp_eff + fp_precs
all_fn = fn_eff + fn_precs
if (all_tp + all_fp) == 0:
return 0
return all_tp / (all_tp + all_fp)
def action_model_overall_recall():
tp_eff, fp_eff, fn_eff = action_model_eff_predictions()
tp_precs, fp_precs, fn_precs = action_model_preconditions_predictions()
all_tp = tp_precs + tp_eff
all_fp = fp_eff + fp_precs
all_fn = fn_eff + fn_precs
if (all_tp + all_fn) == 0:
return 0
return all_tp / (all_tp + all_fn)
def action_model_overall_recall_with_uncertain_neg(uncertain_neg_eff):
tp_eff, fp_eff, fn_eff = action_model_eff_predictions_with_uncertain_neg(uncertain_neg_eff)
tp_precs, fp_precs, fn_precs = action_model_preconditions_predictions()
all_tp = tp_precs + tp_eff
all_fp = fp_eff + fp_precs
all_fn = fn_eff + fn_precs
if (all_tp + all_fn) == 0:
return 0
return all_tp / (all_tp + all_fn)
def action_model_eff_neg_predictions():
real_action_eff_neg = defaultdict(list)
learned_action_eff_neg = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_neg[op_name] = cur_neg_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
# for k in range(len(cur_pos_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_neg[op_name] = cur_neg_effect
tp_eff_neg = 0
fp_eff_neg = 0
fn_eff_neg = 0
for key, value in real_action_eff_neg.items():
for pred in value:
if pred not in learned_action_eff_neg[key]:
fn_eff_neg += 1
for key, value in learned_action_eff_neg.items():
for pred in value:
if pred in real_action_eff_neg[key]:
tp_eff_neg += 1
else:
fp_eff_neg += 1
return tp_eff_neg, fp_eff_neg, fn_eff_neg
def action_model_eff_neg_predictions_with_uncert(uncert_neg_eff):
real_action_eff_neg = defaultdict(list)
learned_action_eff_neg = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_neg[op_name] = cur_neg_effect + ["(not {})".format(el) for el in uncert_neg_eff[op_name]]
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
# for k in range(len(cur_pos_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_neg[op_name] = cur_neg_effect
tp_eff_neg = 0
fp_eff_neg = 0
fn_eff_neg = 0
for key, value in real_action_eff_neg.items():
for pred in value:
if pred not in learned_action_eff_neg[key]:
fn_eff_neg += 1
for key, value in learned_action_eff_neg.items():
for pred in value:
if pred in real_action_eff_neg[key]:
tp_eff_neg += 1
else:
fp_eff_neg += 1
return tp_eff_neg, fp_eff_neg, fn_eff_neg
def action_model_eff_pos_predictions():
real_action_eff_pos = defaultdict(list)
learned_action_eff_pos = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_pos[op_name] = cur_pos_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
# for k in range(len(cur_neg_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
for k in range(len(cur_pos_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_pos[op_name] = cur_pos_effect
tp_eff_pos = 0
fp_eff_pos = 0
fn_eff_pos = 0
for key, value in real_action_eff_pos.items():
for pred in value:
if pred not in learned_action_eff_pos[key]:
fn_eff_pos += 1
for key, value in learned_action_eff_pos.items():
for pred in value:
if pred in real_action_eff_pos[key]:
tp_eff_pos += 1
else:
fp_eff_pos += 1
return tp_eff_pos, fp_eff_pos, fn_eff_pos
def action_model_eff_predictions():
real_action_eff = defaultdict(list)
learned_action_eff = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff[op_name] = cur_neg_effect + cur_pos_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
for k in range(len(cur_pos_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff[op_name] = cur_neg_effect + cur_pos_effect
tp_eff = 0
fp_eff = 0
fn_eff = 0
for key, value in real_action_eff.items():
for pred in value:
if pred not in learned_action_eff[key]:
fn_eff += 1
for key, value in learned_action_eff.items():
for pred in value:
if pred in real_action_eff[key]:
tp_eff += 1
else:
fp_eff += 1
return tp_eff, fp_eff, fn_eff
def action_model_eff_predictions_with_uncertain_neg(uncert_neg_eff):
real_action_eff = defaultdict(list)
learned_action_eff = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff[op_name] = cur_neg_effect + ["(not {})".format(el) for el in uncert_neg_eff[op_name]] + cur_pos_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
for k in range(len(cur_pos_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff[op_name] = cur_neg_effect + cur_pos_effect
tp_eff = 0
fp_eff = 0
fn_eff = 0
for key, value in real_action_eff.items():
for pred in value:
if pred not in learned_action_eff[key]:
fn_eff += 1
for key, value in learned_action_eff.items():
for pred in value:
if pred in real_action_eff[key]:
tp_eff += 1
else:
fp_eff += 1
return tp_eff, fp_eff, fn_eff
def action_model_preconditions_predictions():
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
benchmark_dir = dir
real_action_precond = defaultdict(list)
learned_action_precond = defaultdict(list)
# Store learned action model preconditions
for i in range(len(learned_action_model) - 2):
line = learned_action_model[i]
if line.strip().find("(:action ") != -1:
found_precond = False
action_name = line.strip().split()[1]
action_precond = []
for j in range(i + 1, len(learned_action_model) - 1):
if found_precond:
break
if learned_action_model[j].strip().find(":precondition") != -1:
found_precond = True
action_precond.append(learned_action_model[j])
for k in range(j + 1, len(learned_action_model)):
if learned_action_model[k].strip().find(":effect") != -1:
break
action_precond.append(learned_action_model[k].strip())
learned_action_precond[action_name] = list(set([el.replace(" ", "")
for el in sorted(re.findall("\([^()]*\)", "".join(action_precond)))
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]))
domain = Configuration.INSTANCE_DATA_PATH_PDDL.split("/")[-3]
benchmark_dir = Configuration.INSTANCE_DATA_PATH_PDDL.split("/")[-2]
with open("PDDL/domain.pddl") as r:
real_action_model = [el.lower() for el in r.read().split('\n') if el.strip() != ""]
for i in range(len(real_action_model) - 2):
line = real_action_model[i]
if line.strip().find("(:action ") != -1:
found_precond = False
action_name = line.strip().split()[1]
# action_params = [el.replace(" -", "").strip() for el in
# re.findall("\?[^ - ]* -", real_action_model[i + 1])]
action_params = [el for el in real_action_model[i + 1].replace("(","").replace(")","").strip().split()[1:]
if el.startswith("?")]
action_precond = []
for j in range(i + 1, len(real_action_model) - 1):
if found_precond:
break
if real_action_model[j].strip().find(":precondition") != -1:
found_precond = True
action_precond.append(real_action_model[j])
for k in range(j + 1, len(real_action_model)):
if real_action_model[k].strip().find(":effect") != -1:
break
action_precond.append(real_action_model[k])
# Replace action precondition objects name with "param_#"
for p in range(len(action_precond)):
for el in action_params:
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
action_precond[p] = action_precond[p].replace(" " + el + " ", " ?param_{} ".format(
action_params.index(el) + 1))
action_precond[p] = action_precond[p].replace("(" + el + " ", "(?param_{} ".format(
action_params.index(el) + 1))
action_precond[p] = action_precond[p].replace(" " + el + ")", " ?param_{})".format(
action_params.index(el) + 1))
real_action_precond[action_name] = list(set([el.replace(" ", "")
for el in sorted(
re.findall("\([^()]*\)", "".join(action_precond)))]))
tp_precs = 0
fp_precs = 0
fn_precs = 0
for key, value in real_action_precond.items():
for pred in value:
if pred not in learned_action_precond[key]:
fn_precs += 1
for key, value in learned_action_precond.items():
for pred in value:
if pred in real_action_precond[key]:
tp_precs += 1
else:
fp_precs += 1
return tp_precs, fp_precs, fn_precs
def action_model_preconditions_statistics():
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
benchmark_dir = dir
real_action_precond = defaultdict(list)
learned_action_precond = defaultdict(list)
# Store learned action model preconditions
for i in range(len(learned_action_model) - 2):
line = learned_action_model[i]
if line.strip().find("(:action ") != -1:
found_precond = False
action_name = line.strip().split()[1]
action_precond = []
for j in range(i + 1, len(learned_action_model) - 1):
if found_precond:
break
if learned_action_model[j].strip().find(":precondition") != -1:
found_precond = True
action_precond.append(learned_action_model[j])
for k in range(j + 1, len(learned_action_model)):
if learned_action_model[k].strip().find(":effect") != -1:
break
action_precond.append(learned_action_model[k].strip())
learned_action_precond[action_name] = list(set([el.replace(" ", "")
for el in sorted(re.findall("\([^()]*\)", "".join(action_precond)))
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]))
domain = Configuration.INSTANCE_DATA_PATH_PDDL.split("/")[-3]
benchmark_dir = Configuration.INSTANCE_DATA_PATH_PDDL.split("/")[-2]
with open("PDDL/domain.pddl") as r:
real_action_model = [el.lower() for el in r.read().split('\n') if el.strip() != ""]
for i in range(len(real_action_model) - 2):
line = real_action_model[i]
if line.strip().find("(:action ") != -1:
found_precond = False
action_name = line.strip().split()[1]
# action_params = [el.replace(" -", "").strip() for el in
# re.findall("\?[^ - ]* -", real_action_model[i + 1])]
action_params = [el for el in real_action_model[i + 1].replace("(","").replace(")","").strip().split()[1:]
if el.startswith("?")]
action_precond = []
for j in range(i + 1, len(real_action_model) - 1):
if found_precond:
break
if real_action_model[j].strip().find(":precondition") != -1:
found_precond = True
action_precond.append(real_action_model[j])
for k in range(j + 1, len(real_action_model)):
if real_action_model[k].strip().find(":effect") != -1:
break
action_precond.append(real_action_model[k])
# Replace action precondition objects name with "param_#"
for p in range(len(action_precond)):
for el in action_params:
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
action_precond[p] = action_precond[p].replace(" " + el + " ", " ?param_{} ".format(
action_params.index(el) + 1))
action_precond[p] = action_precond[p].replace("(" + el + " ", "(?param_{} ".format(
action_params.index(el) + 1))
action_precond[p] = action_precond[p].replace(" " + el + ")", " ?param_{})".format(
action_params.index(el) + 1))
real_action_precond[action_name] = list(set([el.replace(" ", "")
for el in sorted(
re.findall("\([^()]*\)", "".join(action_precond)))]))
# tp_precs = 0
# fp_precs = 0
# fn_precs = 0
ins_pre = 0
del_pre = 0
for key, value in real_action_precond.items():
for pred in value:
if pred not in learned_action_precond[key]:
ins_pre += 1
for key, value in learned_action_precond.items():
for pred in value:
if pred not in real_action_precond[key]:
del_pre += 1
return ins_pre, del_pre
def action_model_preconditions_size():
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
benchmark_dir = dir
real_action_precond = defaultdict(list)
learned_action_precond = defaultdict(list)
# Store learned action model preconditions
for i in range(len(learned_action_model) - 2):
line = learned_action_model[i]
if line.strip().find("(:action ") != -1:
found_precond = False
action_name = line.strip().split()[1]
action_precond = []
for j in range(i + 1, len(learned_action_model) - 1):
if found_precond:
break
if learned_action_model[j].strip().find(":precondition") != -1:
found_precond = True
action_precond.append(learned_action_model[j])
for k in range(j + 1, len(learned_action_model)):
if learned_action_model[k].strip().find(":effect") != -1:
break
action_precond.append(learned_action_model[k].strip())
learned_action_precond[action_name] = list(set([el.replace(" ", "")
for el in sorted(re.findall("\([^()]*\)", "".join(action_precond)))
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]))
domain = Configuration.INSTANCE_DATA_PATH_PDDL.split("/")[-3]
benchmark_dir = Configuration.INSTANCE_DATA_PATH_PDDL.split("/")[-2]
with open("PDDL/domain.pddl") as r:
real_action_model = [el.lower() for el in r.read().split('\n') if el.strip() != ""]
for i in range(len(real_action_model) - 2):
line = real_action_model[i]
if line.strip().find("(:action ") != -1:
found_precond = False
action_name = line.strip().split()[1]
# action_params = [el.replace(" -", "").strip() for el in
# re.findall("\?[^ - ]* -", real_action_model[i + 1])]
action_params = [el for el in real_action_model[i + 1].replace("(","").replace(")","").strip().split()[1:]
if el.startswith("?")]
action_precond = []
for j in range(i + 1, len(real_action_model) - 1):
if found_precond:
break
if real_action_model[j].strip().find(":precondition") != -1:
found_precond = True
action_precond.append(real_action_model[j])
for k in range(j + 1, len(real_action_model)):
if real_action_model[k].strip().find(":effect") != -1:
break
action_precond.append(real_action_model[k])
# Replace action precondition objects name with "param_#"
for p in range(len(action_precond)):
for el in action_params:
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
action_precond[p] = action_precond[p].replace(" " + el + " ", " ?param_{} ".format(
action_params.index(el) + 1))
action_precond[p] = action_precond[p].replace("(" + el + " ", "(?param_{} ".format(
action_params.index(el) + 1))
action_precond[p] = action_precond[p].replace(" " + el + ")", " ?param_{})".format(
action_params.index(el) + 1))
real_action_precond[action_name] = list(set([el.replace(" ", "")
for el in sorted(
re.findall("\([^()]*\)", "".join(action_precond)))]))
# tp_precs = 0
# fp_precs = 0
# fn_precs = 0
real_precs_size = 0
learned_precs_size = 0
for key, value in real_action_precond.items():
for pred in value:
real_precs_size += 1
for key, value in learned_action_precond.items():
for pred in value:
learned_precs_size += 1
return real_precs_size, learned_precs_size
def action_model_eff_pos_size():
real_action_eff_pos = defaultdict(list)
learned_action_eff_pos = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_pos[op_name] = cur_pos_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
# for k in range(len(cur_neg_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
for k in range(len(cur_pos_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_pos[op_name] = cur_pos_effect
# tp_eff = 0
# fp_eff = 0
# fn_eff = 0
real_size_eff_pos = 0
learned_size_eff_pos = 0
for key, value in real_action_eff_pos.items():
for pred in value:
real_size_eff_pos += 1
for key, value in learned_action_eff_pos.items():
for pred in value:
learned_size_eff_pos += 1
return real_size_eff_pos, learned_size_eff_pos
def action_model_eff_neg_size():
real_action_eff_neg = defaultdict(list)
learned_action_eff_neg = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_neg[op_name] = cur_neg_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
# for k in range(len(cur_pos_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_neg[op_name] = cur_neg_effect
# tp_eff = 0
# fp_eff = 0
# fn_eff = 0
real_size_eff_neg = 0
learned_size_eff_neg = 0
for key, value in real_action_eff_neg.items():
for pred in value:
real_size_eff_neg += 1
for key, value in learned_action_eff_neg.items():
for pred in value:
learned_size_eff_neg += 1
return real_size_eff_neg, learned_size_eff_neg
def action_model_eff_pos_statistics():
real_action_eff_pos = defaultdict(list)
learned_action_eff_pos = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_pos[op_name] = cur_pos_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
if "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for neg in cur_neg_effect:
if neg.replace("(not", "").strip()[:-1] in cur_pos_effect:
cur_pos_effect.remove(neg.replace("(not", "").strip()[:-1])
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
# for k in range(len(cur_neg_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
for k in range(len(cur_pos_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_pos[op_name] = cur_pos_effect
# tp_eff = 0
# fp_eff = 0
# fn_eff = 0
ins_add = 0
del_add = 0
for key, value in real_action_eff_pos.items():
for pred in value:
if pred not in learned_action_eff_pos[key]:
ins_add += 1
for key, value in learned_action_eff_pos.items():
for pred in value:
if pred not in real_action_eff_pos[key]:
del_add += 1
return ins_add, del_add
def action_model_eff_neg_statistics():
real_action_eff_neg = defaultdict(list)
learned_action_eff_neg = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_neg[op_name] = cur_neg_effect
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
# for k in range(len(cur_pos_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_neg[op_name] = cur_neg_effect
# tp_eff = 0
# fp_eff = 0
# fn_eff = 0
ins_del = 0
del_del = 0
for key, value in real_action_eff_neg.items():
for pred in value:
if pred not in learned_action_eff_neg[key]:
ins_del += 1
for key, value in learned_action_eff_neg.items():
for pred in value:
if pred not in real_action_eff_neg[key]:
del_del += 1
return ins_del, del_del
def action_model_eff_neg_statistics_with_uncertain(uncert_neg_eff):
real_action_eff_neg = defaultdict(list)
learned_action_eff_neg = defaultdict(list)
# Compute action model coverage and overfitting
with open("PDDL/domain_learned.pddl", "r") as f:
learned_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store learned action model effects
all_action_schema = " ".join(learned_action_model)[" ".join(learned_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not","").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
learned_action_eff_neg[op_name] = cur_neg_effect + ["(not {})".format(el) for el in uncert_neg_eff[op_name]]
with open("PDDL/domain.pddl", "r") as f:
real_action_model = [el.lower() for el in f.read().split('\n') if el.strip() != ""]
# Store real action model effects
all_action_schema = " ".join(real_action_model)[" ".join(real_action_model).index(":action "):]
# action_schema = re.findall("{}(.*?):effect".format(operator), " ".join(data))[0]
action_schema = [el.strip() for el in re.findall("(?:(?!:action).)*", all_action_schema) if
el.strip() != ""]
for schema in action_schema:
op_name = schema.split()[1]
op_params = [el for el in re.findall("\([^()]*\)", re.findall(":parameters.*:precondition", schema)[0])[0].strip()[1:-1].split()
if el.startswith("?")]
all_eff = re.findall(":effect.*", schema)[0].strip()[:-1].strip()
cur_neg_effect = re.findall("\(not[^)]*\)\)", all_eff)
# cur_pos_effect = [el for el in re.findall("\([^()]*\)", all_eff)
# if el not in [el.replace("(not", "").strip()[:-1] for el in cur_neg_effect]
# and "".join(el.split()) != "(and)" and "".join(el.split()) != "()"]
for k in range(len(cur_neg_effect)):
for j,param in enumerate(op_params):
# action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
cur_neg_effect[k] = cur_neg_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
# for k in range(len(cur_pos_effect)):
#
# for j,param in enumerate(op_params):
# # action_precond[p] = action_precond[p].replace(el, "?param_{}".format(action_params.index(el)+1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + " ", " ?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace("(" + param + " ", "(?param_{} ".format(j + 1))
# cur_pos_effect[k] = cur_pos_effect[k].replace(" " + param + ")", " ?param_{})".format(j + 1))
real_action_eff_neg[op_name] = cur_neg_effect
# tp_eff = 0
# fp_eff = 0
# fn_eff = 0
ins_del = 0
del_del = 0
for key, value in real_action_eff_neg.items():
for pred in value:
if pred not in learned_action_eff_neg[key]:
ins_del += 1
for key, value in learned_action_eff_neg.items():
for pred in value:
if pred not in real_action_eff_neg[key]:
del_del += 1
return ins_del, del_del
| 42.997586
| 145
| 0.520485
| 8,672
| 71,247
| 3.978321
| 0.015221
| 0.075565
| 0.046957
| 0.016696
| 0.979739
| 0.975507
| 0.964928
| 0.95713
| 0.955855
| 0.944464
| 0
| 0.009297
| 0.32668
| 71,247
| 1,656
| 146
| 43.023551
| 0.709872
| 0.191447
| 0
| 0.873223
| 0
| 0
| 0.05701
| 0.010264
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03436
| false
| 0
| 0.004739
| 0
| 0.097156
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4093bf069a7d7278b399ef5eb4ec212bdf81555b
| 45,705
|
py
|
Python
|
tests/sentry/db/test_parse_query.py
|
AlexWayfer/sentry
|
ef935cda2b2e960bd602fda590540882d1b0712d
|
[
"BSD-3-Clause"
] | 4
|
2019-05-27T13:55:07.000Z
|
2021-03-30T07:05:09.000Z
|
tests/sentry/db/test_parse_query.py
|
AlexWayfer/sentry
|
ef935cda2b2e960bd602fda590540882d1b0712d
|
[
"BSD-3-Clause"
] | 99
|
2019-05-20T14:16:33.000Z
|
2021-01-19T09:25:15.000Z
|
tests/sentry/db/test_parse_query.py
|
AlexWayfer/sentry
|
ef935cda2b2e960bd602fda590540882d1b0712d
|
[
"BSD-3-Clause"
] | 1
|
2020-08-10T07:55:40.000Z
|
2020-08-10T07:55:40.000Z
|
from __future__ import absolute_import
from sentry.testutils import TestCase
from sentry.testutils.helpers import parse_queries
class ParseQuery(TestCase):
def test_parse_query(self):
result = parse_queries(
[
{u'sql': u'QUERY = u\'INSERT INTO "sentry_useremail" ("user_id", "email", "validation_hash", "date_hash_added", "is_verified") VALUES (%s, %s, %s, %s, %s)\' - PARAMS = (1, u\'admin@localhost\', u\'i0NlOcwzPKoObK8uNfg7mowTlOnvvlSI\', u\'2018-05-16 08:02:39.022342\', False)',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'INSERT INTO "sentry_email" ("email", "date_added") VALUES (%s, %s)\' - PARAMS = (u\'admin@localhost\', u\'2018-05-16 08:02:39.023101\')',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'UPDATE "sentry_useremail" SET "is_verified" = %s WHERE ("sentry_useremail"."user_id" = %s AND "sentry_useremail"."email" = %s )\' - PARAMS = (True, 1, u\'admin@localhost\')',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'DELETE * FROM "sentry_organization"\' - PARAMS = (u\'baz\', u\'baz\', 0, u\'2018-05-16 08:02:39.025899\', u\'member\', 1)',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'INSERT INTO "sentry_organizationmember" ("organization_id", "user_id", "email", "role", "flags", "token", "date_added", "has_global_access", "type") VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)\' - PARAMS = (2, 1, None, u\'owner\', 0, None, u\'2018-05-16 08:02:39.026919\', True, 50)',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'UPDATE "sentry_projectoptions" SET "value" = %s WHERE ("sentry_projectoptions"."project_id" = %s AND "sentry_projectoptions"."key" = %s )\' - PARAMS = (u\'gAJYIAAAADgwNmQxZjQ1NThkZjExZTg5ZWExOGM4NTkwMGNhNWI3cQEu\', 2, u\'sentry:relay-rev\')',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'UPDATE "sentry_projectoptions" SET "value" = %s WHERE ("sentry_projectoptions"."project_id" = %s AND "sentry_projectoptions"."key" = %s )\' - PARAMS = (u\'gAJjZGF0ZXRpbWUKZGF0ZXRpbWUKcQFVCgfiBRAIAicApBhjcHl0egpfVVRDCnECKVJxA4ZScQQu\', 2, u\'sentry:relay-rev-lastchange\')',
u'time': u'0.000'},
{u'sql': u"QUERY = '\\n insert or ignore into sentry_projectcounter\\n (project_id, value) values (%s, 0);\\n ' - PARAMS = (2,)",
u'time': u'0.000'},
{u'sql': u"QUERY = '\\n select value from sentry_projectcounter\\n where project_id = %s\\n ' - PARAMS = (2,)",
u'time': u'0.000'},
{u'sql': u"QUERY = '\\n update sentry_projectcounter\\n set value = value + %s\\n where project_id = %s;\\n ' - PARAMS = (1, 2)",
u'time': u'0.000'},
{u'sql': u"QUERY = '\\n select changes();\\n ' - PARAMS = ()",
u'time': u'0.000'},
{u'sql': u'QUERY = u\'INSERT INTO "sentry_groupedmessage" ("project_id", "logger", "level", "message", "view", "num_comments", "platform", "status", "times_seen", "last_seen", "first_seen", "first_release_id", "resolved_at", "active_at", "time_spent_total", "time_spent_count", "score", "is_public", "data", "short_id") VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)\' - PARAMS = (2, u\'\', 40, u\'hello http://example.com\', u\'http://example.com\', 0, u\'javascript\', 0, 1, u\'2018-05-16 08:02:39\', u\'2018-05-16 08:02:39\', None, None, u\'2018-05-16 08:02:39\', 0, 0, 1526457759, False, u\'eJwVykEKg0AMheF9LjKuCk4dx16gFxDcSmgiHYgYOrHg7c0s//e+jrSHOQhWW3/84fJnCqAR3n2K45ByTi+oc7BL2fenW+INTzGvoT07GxIaeifoSEcnVkwaz7B8WeQAnaDWxw3kwCAZ\', 1)',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'UPDATE "sentry_grouphash" SET "group_id" = %s WHERE ("sentry_grouphash"."id" IN (%s) AND NOT ("sentry_grouphash"."state" = %s AND "sentry_grouphash"."state" IS NOT NULL))\' - PARAMS = (1, 1, 1)',
u'time': u'0.000'},
{u'sql': u'QUERY = u\'UPDATE "sentry_userreport" SET "environment_id" = %s, "group_id" = %s WHERE ("sentry_userreport"."project_id" = %s AND "sentry_userreport"."event_id" = %s )\' - PARAMS = (1, 1, 2, u\'45b41f6d313c442393aaa0293853d70f\')',
u'time': u'0.000'}]
)
assert result == {
'sentry_email': 1,
'sentry_groupedmessage': 1,
'sentry_grouphash': 1,
'sentry_organization': 1,
'sentry_organizationmember': 1,
'sentry_projectcounter': 2,
'sentry_projectoptions': 2,
'sentry_useremail': 2,
'sentry_userreport': 1
}
def test_parse_mysql_queries(self):
result = parse_queries(
[{u'sql': u'SAVEPOINT `s47055674149248_x49`', u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT `s47055674149248_x49`', u'time': u'0.000'},
{u'sql': u"SELECT `sentry_rawevent`.`id`, `sentry_rawevent`.`project_id`, `sentry_rawevent`.`event_id`, `sentry_rawevent`.`datetime`, `sentry_rawevent`.`data` FROM `sentry_rawevent` WHERE (`sentry_rawevent`.`event_id` = '1fa6e7d1c2674273be07852952e1bafc' AND `sentry_rawevent`.`project_id` = 815 )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_reprocessingreport`.`id`, `sentry_reprocessingreport`.`project_id`, `sentry_reprocessingreport`.`event_id`, `sentry_reprocessingreport`.`datetime` FROM `sentry_reprocessingreport` WHERE (`sentry_reprocessingreport`.`event_id` = '1fa6e7d1c2674273be07852952e1bafc' AND `sentry_reprocessingreport`.`project_id` = 815 )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_message`.`id`, `sentry_message`.`group_id`, `sentry_message`.`message_id`, `sentry_message`.`project_id`, `sentry_message`.`message`, `sentry_message`.`platform`, `sentry_message`.`datetime`, `sentry_message`.`time_spent`, `sentry_message`.`data` FROM `sentry_message` WHERE (`sentry_message`.`message_id` = '1fa6e7d1c2674273be07852952e1bafc' AND `sentry_message`.`project_id` = 815 )",
u'time': u'0.000'},
{u'sql': u'SAVEPOINT `s47055674149248_x50`', u'time': u'0.000'},
{u'sql': u"INSERT INTO `sentry_eventuser` (`project_id`, `hash`, `ident`, `email`, `username`, `name`, `ip_address`, `date_added`) VALUES (815, 'f528764d624db129b32c21fbca0cb8d6', NULL, NULL, NULL, NULL, '127.0.0.1', '2018-05-22 10:54:14')",
u'time': u'0.000'},
{u'sql': u'ROLLBACK TO SAVEPOINT `s47055674149248_x50`', u'time': u'0.000'},
{u'sql': u"SELECT `sentry_eventuser`.`id`, `sentry_eventuser`.`project_id`, `sentry_eventuser`.`hash`, `sentry_eventuser`.`ident`, `sentry_eventuser`.`email`, `sentry_eventuser`.`username`, `sentry_eventuser`.`name`, `sentry_eventuser`.`ip_address`, `sentry_eventuser`.`date_added` FROM `sentry_eventuser` WHERE (`sentry_eventuser`.`project_id` = 815 AND `sentry_eventuser`.`hash` = 'f528764d624db129b32c21fbca0cb8d6' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_grouphash`.`id`, `sentry_grouphash`.`project_id`, `sentry_grouphash`.`hash`, `sentry_grouphash`.`group_id`, `sentry_grouphash`.`group_tombstone_id`, `sentry_grouphash`.`state` FROM `sentry_grouphash` WHERE (`sentry_grouphash`.`project_id` = 815 AND `sentry_grouphash`.`hash` = '5d41402abc4b2a76b9719d911017c592' )",
u'time': u'0.000'},
{u'sql': u'SELECT `sentry_groupedmessage`.`id`, `sentry_groupedmessage`.`project_id`, `sentry_groupedmessage`.`logger`, `sentry_groupedmessage`.`level`, `sentry_groupedmessage`.`message`, `sentry_groupedmessage`.`view`, `sentry_groupedmessage`.`num_comments`, `sentry_groupedmessage`.`platform`, `sentry_groupedmessage`.`status`, `sentry_groupedmessage`.`times_seen`, `sentry_groupedmessage`.`last_seen`, `sentry_groupedmessage`.`first_seen`, `sentry_groupedmessage`.`first_release_id`, `sentry_groupedmessage`.`resolved_at`, `sentry_groupedmessage`.`active_at`, `sentry_groupedmessage`.`time_spent_total`, `sentry_groupedmessage`.`time_spent_count`, `sentry_groupedmessage`.`score`, `sentry_groupedmessage`.`is_public`, `sentry_groupedmessage`.`data`, `sentry_groupedmessage`.`short_id` FROM `sentry_groupedmessage` WHERE `sentry_groupedmessage`.`id` = 592 ',
u'time': u'0.001'},
{u'sql': u'SELECT `sentry_project`.`id`, `sentry_project`.`slug`, `sentry_project`.`name`, `sentry_project`.`forced_color`, `sentry_project`.`organization_id`, `sentry_project`.`public`, `sentry_project`.`date_added`, `sentry_project`.`status`, `sentry_project`.`first_event`, `sentry_project`.`flags`, `sentry_project`.`platform` FROM `sentry_project` WHERE `sentry_project`.`id` = 815 ',
u'time': u'0.000'},
{u'sql': u"UPDATE `sentry_groupedmessage` SET `times_seen` = `sentry_groupedmessage`.`times_seen` + 1, `score` = log(times_seen) * 600 + unix_timestamp(last_seen), `data` = 'eJwdyk0Kg0AMhuF9LjKuBH9mHC/gBQS3JZgUB1IanCj09k27fL/vaUg7WINgtcfJO5ebKYD2sHSxT3NOYxyhrsE+yr4PbomfeIl5/Z8XGxIaekdoSJMTKyY/PsF2sMgbNEOt7RfkkiAY', `last_seen` = '2018-05-22 10:54:14' WHERE `sentry_groupedmessage`.`id` = 592 ",
u'time': u'0.000'},
{u'sql': u'SAVEPOINT `s47055674149248_x51`', u'time': u'0.000'},
{u'sql': u'INSERT INTO `sentry_environmentproject` (`project_id`, `environment_id`, `is_hidden`) VALUES (815, 96, NULL)',
u'time': u'0.000'},
{u'sql': u'ROLLBACK TO SAVEPOINT `s47055674149248_x51`', u'time': u'0.000'},
{u'sql': u"UPDATE `sentry_userreport` SET `environment_id` = 96, `group_id` = 592 WHERE (`sentry_userreport`.`project_id` = 815 AND `sentry_userreport`.`event_id` = '1fa6e7d1c2674273be07852952e1bafc' )",
u'time': u'0.000'},
{u'sql': u'SAVEPOINT `s47055674149248_x52`', u'time': u'0.000'},
{u'sql': u"UPDATE `nodestore_node` SET `timestamp` = '2018-05-22 10:54:14', `data` = 'eJxtU01v2zAMvetX6BYX2BxLtmSnOxUDtgz92CFdcgxUm3G0OLEgK127ov99pOKmOxQBjJB8pKj3npLGCbaYON//hjpMmJPsphLqhg2LyQCH4J9TewjgN6aGIf01gEdMzpLGFdhm3do0jYdhwKzChJBlmuFPYKzZ8OGQW4SbFhBR0pwK2/bn1Iwtt9B1PXMii/3wFLzBghAEFpIWW3vYrB/BD7Y/UClnP+THC89DcIQoYjNtePQdJTSeg7XL6RSezN51kNb9HvMlzdmCaXA64SqWdE7M6CsztiQCPl+1eA4yJZhZ3vZ/bdeZqUoznqzsoen/DPzunutUfuGrnytdXPArh/NX8HBtw1TlZZprnlzP729vPvHO7oB/h3rXX/CvW9/vYSor4k9lWVpKvjAb4+3YhUdKZgyxUvd4x6dAK8qohiQ5+hirGGuM3ymSJbvDtoPZE8mSOH9btqLELHL94DFxUjijIblgLXYuJuedqIRUt3HCaWFK5diO/Rt7aME7jwJQtiDWciL95YU3sDHHLvDXVyppZkYdKSr/81yzo0wVj59ha91ZZHttScYiY60m1PvFCsGWZLgM/0YPjFcscrZcu34IKxu286gnJot4TQ812EdoCKbYN6GknlW6UAX1g/d9lL7QtH4RDRFMG1PRDQW6YTHp4BHISSpjYxMFggWnUKTkzWgq/9hoeBgiVUSebHt5PFGv0JrWXZ5fEuHKiOuH9HS75Vk7KlaxOGo3IpzCh3TSBx9ihjCNbk1a1HVM87OmVMSVh5a4jPTu4HltiR6dozJSRmXCs6PBmow2ikmhotoegmlMoHeqNUmnyTXBhi62VKyNrzn9By1kSX8=' WHERE `nodestore_node`.`id` = '9cwO83agTCqM5QNjewZF+g==' ",
u'time': u'0.000'},
{u'sql': u'SAVEPOINT `s47055674149248_x53`', u'time': u'0.000'},
{u'sql': u"INSERT INTO `nodestore_node` (`id`, `data`, `timestamp`) VALUES ('9cwO83agTCqM5QNjewZF+g==', 'eJxtU01v2zAMvetX6BYX2BxLtmSnOxUDtgz92CFdcgxUm3G0OLEgK127ov99pOKmOxQBjJB8pKj3npLGCbaYON//hjpMmJPsphLqhg2LyQCH4J9TewjgN6aGIf01gEdMzpLGFdhm3do0jYdhwKzChJBlmuFPYKzZ8OGQW4SbFhBR0pwK2/bn1Iwtt9B1PXMii/3wFLzBghAEFpIWW3vYrB/BD7Y/UClnP+THC89DcIQoYjNtePQdJTSeg7XL6RSezN51kNb9HvMlzdmCaXA64SqWdE7M6CsztiQCPl+1eA4yJZhZ3vZ/bdeZqUoznqzsoen/DPzunutUfuGrnytdXPArh/NX8HBtw1TlZZprnlzP729vPvHO7oB/h3rXX/CvW9/vYSor4k9lWVpKvjAb4+3YhUdKZgyxUvd4x6dAK8qohiQ5+hirGGuM3ymSJbvDtoPZE8mSOH9btqLELHL94DFxUjijIblgLXYuJuedqIRUt3HCaWFK5diO/Rt7aME7jwJQtiDWciL95YU3sDHHLvDXVyppZkYdKSr/81yzo0wVj59ha91ZZHttScYiY60m1PvFCsGWZLgM/0YPjFcscrZcu34IKxu286gnJot4TQ812EdoCKbYN6GknlW6UAX1g/d9lL7QtH4RDRFMG1PRDQW6YTHp4BHISSpjYxMFggWnUKTkzWgq/9hoeBgiVUSebHt5PFGv0JrWXZ5fEuHKiOuH9HS75Vk7KlaxOGo3IpzCh3TSBx9ihjCNbk1a1HVM87OmVMSVh5a4jPTu4HltiR6dozJSRmXCs6PBmow2ikmhotoegmlMoHeqNUmnyTXBhi62VKyNrzn9By1kSX8=', '2018-05-22 10:54:14')",
u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT `s47055674149248_x53`', u'time': u'0.000'},
{u'sql': u"INSERT INTO `sentry_message` (`group_id`, `message_id`, `project_id`, `message`, `platform`, `datetime`, `time_spent`, `data`) VALUES (592, '1fa6e7d1c2674273be07852952e1bafc', 815, 'hello http://example.com', 'javascript', '2018-05-22 10:54:14', NULL, 'eJzTSCkw5ApWz8tPSY3PTFHnKjAC8iyTy/0tjBPTQ5wLfU0D/bJSy6PctNNtbYHSxlzFegCVlg8K')",
u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT `s47055674149248_x52`', u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filterkey`.`id`, `sentry_filterkey`.`project_id`, `sentry_filterkey`.`key`, `sentry_filterkey`.`values_seen`, `sentry_filterkey`.`label`, `sentry_filterkey`.`status` FROM `sentry_filterkey` WHERE (`sentry_filterkey`.`project_id` = 815 AND `sentry_filterkey`.`key` = 'level' )",
u'time': u'0.001'},
{u'sql': u"SELECT `sentry_filtervalue`.`id`, `sentry_filtervalue`.`project_id`, `sentry_filtervalue`.`key`, `sentry_filtervalue`.`value`, `sentry_filtervalue`.`data`, `sentry_filtervalue`.`times_seen`, `sentry_filtervalue`.`last_seen`, `sentry_filtervalue`.`first_seen` FROM `sentry_filtervalue` WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'error' AND `sentry_filtervalue`.`key` = 'level' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filterkey`.`id`, `sentry_filterkey`.`project_id`, `sentry_filterkey`.`key`, `sentry_filterkey`.`values_seen`, `sentry_filterkey`.`label`, `sentry_filterkey`.`status` FROM `sentry_filterkey` WHERE (`sentry_filterkey`.`project_id` = 815 AND `sentry_filterkey`.`key` = 'url' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filtervalue`.`id`, `sentry_filtervalue`.`project_id`, `sentry_filtervalue`.`key`, `sentry_filtervalue`.`value`, `sentry_filtervalue`.`data`, `sentry_filtervalue`.`times_seen`, `sentry_filtervalue`.`last_seen`, `sentry_filtervalue`.`first_seen` FROM `sentry_filtervalue` WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'http://example.com' AND `sentry_filtervalue`.`key` = 'url' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filterkey`.`id`, `sentry_filterkey`.`project_id`, `sentry_filterkey`.`key`, `sentry_filterkey`.`values_seen`, `sentry_filterkey`.`label`, `sentry_filterkey`.`status` FROM `sentry_filterkey` WHERE (`sentry_filterkey`.`project_id` = 815 AND `sentry_filterkey`.`key` = 'sentry:user' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filtervalue`.`id`, `sentry_filtervalue`.`project_id`, `sentry_filtervalue`.`key`, `sentry_filtervalue`.`value`, `sentry_filtervalue`.`data`, `sentry_filtervalue`.`times_seen`, `sentry_filtervalue`.`last_seen`, `sentry_filtervalue`.`first_seen` FROM `sentry_filtervalue` WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'ip:127.0.0.1' AND `sentry_filtervalue`.`key` = 'sentry:user' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filterkey`.`id`, `sentry_filterkey`.`project_id`, `sentry_filterkey`.`key`, `sentry_filterkey`.`values_seen`, `sentry_filterkey`.`label`, `sentry_filterkey`.`status` FROM `sentry_filterkey` WHERE (`sentry_filterkey`.`project_id` = 815 AND `sentry_filterkey`.`key` = 'os.name' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filtervalue`.`id`, `sentry_filtervalue`.`project_id`, `sentry_filtervalue`.`key`, `sentry_filtervalue`.`value`, `sentry_filtervalue`.`data`, `sentry_filtervalue`.`times_seen`, `sentry_filtervalue`.`last_seen`, `sentry_filtervalue`.`first_seen` FROM `sentry_filtervalue` WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'Windows 8' AND `sentry_filtervalue`.`key` = 'os.name' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filterkey`.`id`, `sentry_filterkey`.`project_id`, `sentry_filterkey`.`key`, `sentry_filterkey`.`values_seen`, `sentry_filterkey`.`label`, `sentry_filterkey`.`status` FROM `sentry_filterkey` WHERE (`sentry_filterkey`.`project_id` = 815 AND `sentry_filterkey`.`key` = 'browser.name' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filtervalue`.`id`, `sentry_filtervalue`.`project_id`, `sentry_filtervalue`.`key`, `sentry_filtervalue`.`value`, `sentry_filtervalue`.`data`, `sentry_filtervalue`.`times_seen`, `sentry_filtervalue`.`last_seen`, `sentry_filtervalue`.`first_seen` FROM `sentry_filtervalue` WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'Chrome' AND `sentry_filtervalue`.`key` = 'browser.name' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filterkey`.`id`, `sentry_filterkey`.`project_id`, `sentry_filterkey`.`key`, `sentry_filterkey`.`values_seen`, `sentry_filterkey`.`label`, `sentry_filterkey`.`status` FROM `sentry_filterkey` WHERE (`sentry_filterkey`.`project_id` = 815 AND `sentry_filterkey`.`key` = 'browser' )",
u'time': u'0.000'},
{u'sql': u"SELECT `sentry_filtervalue`.`id`, `sentry_filtervalue`.`project_id`, `sentry_filtervalue`.`key`, `sentry_filtervalue`.`value`, `sentry_filtervalue`.`data`, `sentry_filtervalue`.`times_seen`, `sentry_filtervalue`.`last_seen`, `sentry_filtervalue`.`first_seen` FROM `sentry_filtervalue` WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'Chrome 28.0.1500' AND `sentry_filtervalue`.`key` = 'browser' )",
u'time': u'0.000'},
{u'sql': u'SAVEPOINT `s47055674149248_x54`', u'time': u'0.000'},
{u'sql': u"INSERT INTO `sentry_eventtag` (`project_id`, `group_id`, `event_id`, `key_id`, `value_id`, `date_added`) VALUES (815, 592, 373, 43, 42, '2018-05-22 10:54:14'), (815, 592, 373, 44, 43, '2018-05-22 10:54:14'), (815, 592, 373, 45, 44, '2018-05-22 10:54:14'), (815, 592, 373, 46, 45, '2018-05-22 10:54:14'), (815, 592, 373, 47, 46, '2018-05-22 10:54:14'), (815, 592, 373, 48, 47, '2018-05-22 10:54:14')",
u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT `s47055674149248_x54`', u'time': u'0.000'},
{u'sql': u"UPDATE `sentry_filtervalue` SET `times_seen` = `sentry_filtervalue`.`times_seen` + 1, `data` = NULL, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'error' AND `sentry_filtervalue`.`key` = 'level' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_messagefiltervalue` SET `times_seen` = `sentry_messagefiltervalue`.`times_seen` + 1, `project_id` = 815, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_messagefiltervalue`.`group_id` = 592 AND `sentry_messagefiltervalue`.`value` = 'error' AND `sentry_messagefiltervalue`.`key` = 'level' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_filtervalue` SET `times_seen` = `sentry_filtervalue`.`times_seen` + 1, `data` = NULL, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'http://example.com' AND `sentry_filtervalue`.`key` = 'url' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_messagefiltervalue` SET `times_seen` = `sentry_messagefiltervalue`.`times_seen` + 1, `project_id` = 815, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_messagefiltervalue`.`group_id` = 592 AND `sentry_messagefiltervalue`.`value` = 'http://example.com' AND `sentry_messagefiltervalue`.`key` = 'url' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_filtervalue` SET `times_seen` = `sentry_filtervalue`.`times_seen` + 1, `data` = NULL, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'ip:127.0.0.1' AND `sentry_filtervalue`.`key` = 'sentry:user' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_messagefiltervalue` SET `times_seen` = `sentry_messagefiltervalue`.`times_seen` + 1, `project_id` = 815, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_messagefiltervalue`.`group_id` = 592 AND `sentry_messagefiltervalue`.`value` = 'ip:127.0.0.1' AND `sentry_messagefiltervalue`.`key` = 'sentry:user' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_filtervalue` SET `times_seen` = `sentry_filtervalue`.`times_seen` + 1, `data` = NULL, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'Windows 8' AND `sentry_filtervalue`.`key` = 'os.name' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_messagefiltervalue` SET `times_seen` = `sentry_messagefiltervalue`.`times_seen` + 1, `project_id` = 815, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_messagefiltervalue`.`group_id` = 592 AND `sentry_messagefiltervalue`.`value` = 'Windows 8' AND `sentry_messagefiltervalue`.`key` = 'os.name' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_filtervalue` SET `times_seen` = `sentry_filtervalue`.`times_seen` + 1, `data` = NULL, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'Chrome' AND `sentry_filtervalue`.`key` = 'browser.name' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_messagefiltervalue` SET `times_seen` = `sentry_messagefiltervalue`.`times_seen` + 1, `project_id` = 815, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_messagefiltervalue`.`group_id` = 592 AND `sentry_messagefiltervalue`.`value` = 'Chrome' AND `sentry_messagefiltervalue`.`key` = 'browser.name' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_filtervalue` SET `times_seen` = `sentry_filtervalue`.`times_seen` + 1, `data` = NULL, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_filtervalue`.`project_id` = 815 AND `sentry_filtervalue`.`value` = 'Chrome 28.0.1500' AND `sentry_filtervalue`.`key` = 'browser' )",
u'time': u'0.001'},
{u'sql': u"UPDATE `sentry_messagefiltervalue` SET `times_seen` = `sentry_messagefiltervalue`.`times_seen` + 1, `project_id` = 815, `last_seen` = '2018-05-22 10:54:14' WHERE (`sentry_messagefiltervalue`.`group_id` = 592 AND `sentry_messagefiltervalue`.`value` = 'Chrome 28.0.1500' AND `sentry_messagefiltervalue`.`key` = 'browser' )",
u'time': u'0.001'},
{u'sql': u'SELECT `sentry_groupedmessage`.`id`, `sentry_groupedmessage`.`project_id`, `sentry_groupedmessage`.`logger`, `sentry_groupedmessage`.`level`, `sentry_groupedmessage`.`message`, `sentry_groupedmessage`.`view`, `sentry_groupedmessage`.`num_comments`, `sentry_groupedmessage`.`platform`, `sentry_groupedmessage`.`status`, `sentry_groupedmessage`.`times_seen`, `sentry_groupedmessage`.`last_seen`, `sentry_groupedmessage`.`first_seen`, `sentry_groupedmessage`.`first_release_id`, `sentry_groupedmessage`.`resolved_at`, `sentry_groupedmessage`.`active_at`, `sentry_groupedmessage`.`time_spent_total`, `sentry_groupedmessage`.`time_spent_count`, `sentry_groupedmessage`.`score`, `sentry_groupedmessage`.`is_public`, `sentry_groupedmessage`.`data`, `sentry_groupedmessage`.`short_id` FROM `sentry_groupedmessage` WHERE `sentry_groupedmessage`.`id` = 592 ',
u'time': u'0.001'},
{u'sql': u'SELECT `sentry_groupsnooze`.`id`, `sentry_groupsnooze`.`group_id`, `sentry_groupsnooze`.`until`, `sentry_groupsnooze`.`count`, `sentry_groupsnooze`.`window`, `sentry_groupsnooze`.`user_count`, `sentry_groupsnooze`.`user_window`, `sentry_groupsnooze`.`state`, `sentry_groupsnooze`.`actor_id` FROM `sentry_groupsnooze` WHERE `sentry_groupsnooze`.`group_id` = 592 ',
u'time': u'0.000'},
{u'sql': u'SELECT `sentry_grouprulestatus`.`id`, `sentry_grouprulestatus`.`project_id`, `sentry_grouprulestatus`.`rule_id`, `sentry_grouprulestatus`.`group_id`, `sentry_grouprulestatus`.`status`, `sentry_grouprulestatus`.`date_added`, `sentry_grouprulestatus`.`last_active` FROM `sentry_grouprulestatus` WHERE (`sentry_grouprulestatus`.`group_id` = 592 AND `sentry_grouprulestatus`.`rule_id` = 827 )',
u'time': u'0.001'},
{u'sql': u'SAVEPOINT `s47055674149248_x55`', u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT `s47055674149248_x55`', u'time': u'0.000'}]
)
assert result == {
'nodestore_node': 2,
'sentry_environmentproject': 1,
'sentry_eventtag': 1,
'sentry_eventuser': 1,
'sentry_filtervalue': 6,
'sentry_groupedmessage': 1,
'sentry_message': 1,
'sentry_messagefiltervalue': 6,
'sentry_userreport': 1
}
def test_parse_postgres_queries(self):
result = parse_queries([
{u'sql': u'SAVEPOINT "s47890194282880_x49"', u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT "s47890194282880_x49"', u'time': u'0.000'},
{u'sql': u'SELECT "sentry_rawevent"."id", "sentry_rawevent"."project_id", "sentry_rawevent"."event_id", "sentry_rawevent"."datetime", "sentry_rawevent"."data" FROM "sentry_rawevent" WHERE ("sentry_rawevent"."event_id" = \'1fba9e314001443b93285dc4411f1593\' AND "sentry_rawevent"."project_id" = 864 )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_reprocessingreport"."id", "sentry_reprocessingreport"."project_id", "sentry_reprocessingreport"."event_id", "sentry_reprocessingreport"."datetime" FROM "sentry_reprocessingreport" WHERE ("sentry_reprocessingreport"."event_id" = \'1fba9e314001443b93285dc4411f1593\' AND "sentry_reprocessingreport"."project_id" = 864 )',
u'time': u'0.001'},
{u'sql': u'SELECT "sentry_message"."id", "sentry_message"."group_id", "sentry_message"."message_id", "sentry_message"."project_id", "sentry_message"."message", "sentry_message"."platform", "sentry_message"."datetime", "sentry_message"."time_spent", "sentry_message"."data" FROM "sentry_message" WHERE ("sentry_message"."message_id" = \'1fba9e314001443b93285dc4411f1593\' AND "sentry_message"."project_id" = 864 )',
u'time': u'0.001'},
{u'sql': u'SAVEPOINT "s47890194282880_x50"', u'time': u'0.000'},
{u'sql': u'INSERT INTO "sentry_eventuser" ("project_id", "hash", "ident", "email", "username", "name", "ip_address", "date_added") VALUES (864, \'f528764d624db129b32c21fbca0cb8d6\', NULL, NULL, NULL, NULL, \'127.0.0.1\', \'2018-05-22 09:12:12.357888+00:00\') RETURNING "sentry_eventuser"."id"',
u'time': u'0.000'},
{u'sql': u'ROLLBACK TO SAVEPOINT "s47890194282880_x50"', u'time': u'0.000'},
{u'sql': u'SELECT "sentry_eventuser"."id", "sentry_eventuser"."project_id", "sentry_eventuser"."hash", "sentry_eventuser"."ident", "sentry_eventuser"."email", "sentry_eventuser"."username", "sentry_eventuser"."name", "sentry_eventuser"."ip_address", "sentry_eventuser"."date_added" FROM "sentry_eventuser" WHERE ("sentry_eventuser"."project_id" = 864 AND "sentry_eventuser"."hash" = \'f528764d624db129b32c21fbca0cb8d6\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_grouphash"."id", "sentry_grouphash"."project_id", "sentry_grouphash"."hash", "sentry_grouphash"."group_id", "sentry_grouphash"."group_tombstone_id", "sentry_grouphash"."state" FROM "sentry_grouphash" WHERE ("sentry_grouphash"."project_id" = 864 AND "sentry_grouphash"."hash" = \'5d41402abc4b2a76b9719d911017c592\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_groupedmessage"."id", "sentry_groupedmessage"."project_id", "sentry_groupedmessage"."logger", "sentry_groupedmessage"."level", "sentry_groupedmessage"."message", "sentry_groupedmessage"."view", "sentry_groupedmessage"."num_comments", "sentry_groupedmessage"."platform", "sentry_groupedmessage"."status", "sentry_groupedmessage"."times_seen", "sentry_groupedmessage"."last_seen", "sentry_groupedmessage"."first_seen", "sentry_groupedmessage"."first_release_id", "sentry_groupedmessage"."resolved_at", "sentry_groupedmessage"."active_at", "sentry_groupedmessage"."time_spent_total", "sentry_groupedmessage"."time_spent_count", "sentry_groupedmessage"."score", "sentry_groupedmessage"."is_public", "sentry_groupedmessage"."data", "sentry_groupedmessage"."short_id" FROM "sentry_groupedmessage" WHERE "sentry_groupedmessage"."id" = 662 ',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_project"."id", "sentry_project"."slug", "sentry_project"."name", "sentry_project"."forced_color", "sentry_project"."organization_id", "sentry_project"."public", "sentry_project"."date_added", "sentry_project"."status", "sentry_project"."first_event", "sentry_project"."flags", "sentry_project"."platform" FROM "sentry_project" WHERE "sentry_project"."id" = 864 ',
u'time': u'0.000'},
{u'sql': u'UPDATE "sentry_groupedmessage" SET "times_seen" = "sentry_groupedmessage"."times_seen" + 1, "score" = 1526980332, "data" = \'eJwVyksKhEAMRdF5NlKORKv89QbcgOBUgokopOlgRcHddxze905BWsMUBLMtJ6983EwBNMJYt7H7DFVKEfIU7FH2Pbkl3vAS82re58uGhIbeLRSknRM7TF7ew7yzyA90gJzLP+FOIA0=\', "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE "sentry_groupedmessage"."id" = 662 ',
u'time': u'0.001'},
{u'sql': u'SAVEPOINT "s47890194282880_x51"', u'time': u'0.000'},
{u'sql': u'INSERT INTO "sentry_environmentproject" ("project_id", "environment_id", "is_hidden") VALUES (864, 165, NULL) RETURNING "sentry_environmentproject"."id"',
u'time': u'0.000'},
{u'sql': u'ROLLBACK TO SAVEPOINT "s47890194282880_x51"', u'time': u'0.000'},
{u'sql': u'UPDATE "sentry_userreport" SET "environment_id" = 165, "group_id" = 662 WHERE ("sentry_userreport"."project_id" = 864 AND "sentry_userreport"."event_id" = \'1fba9e314001443b93285dc4411f1593\' )',
u'time': u'0.000'},
{u'sql': u'SAVEPOINT "s47890194282880_x52"', u'time': u'0.000'},
{u'sql': u'UPDATE "nodestore_node" SET "timestamp" = \'2018-05-22 09:12:12.374085+00:00\', "data" = \'eJxtU8Fu2zAMvesrfIsLbI4lS7KTnYoBW4qu7SFdcgxUm3G0OLEgq127ov8+UnHTSxHACMn3KOo9Km0cZ8uJ8/0fqMOEOcGuKi3ZsJwMcAz+JbPHAH5rahiy3wN4hBQsbZxElnUb0zQehgGzChNclFmOP46xZsOnTW4QblpAREl9KqQdzqkZW+2g63rmeB758By8wQLnBOaCBtt42G6ewA+2P1KpYFfi84EXIThCyEimCR99RwmN52BtPp3Cszm4DrK6P2C+pD47MA12J1zF0s7xGX1FzlYkwNfLFs9BoTgzq5v+n+06M1VZnqRre2z6v0Nye5/oTHxL1ndrLS+SS4f91/BwbcNUFWVW6CS9Xtzf/PqSdHYPyU+o9/1F8n3n+wNMRUX6qTzPSpEszdZ4O7LwSMGMIVXqHu/4HGhEEd0QZEcfYxVjjfGHRKJkt0g7mgOJLEjz92ErSsyi1g8eEyeHc2pScNYiczk5z0QllLqNHU4DU6pAOvK39tiCdx4NoKwk1QoS/fU1aWBrHruQvL1RSTMz+khR+bFyzZ4SVTx9hsy6syj2xpKLMmetJtTHvSRnK9q3HP/GFRhvKAu22rh+CGsbdotoJyZlvKWHGuwTNART7AdXQs+qvCgiH7zvo/NS0/Qy7kMwbUzFZZC4DMtJB09Ai6RyNpIo4Cw4hR6l73umis/3TElCqog8be388aS8ws20bn5+SIQrI64fstPtVmfrqFjF4mjdiHAK39HJHnyHOcI0Lmvaoq1jOjlbSkUceWhJyyjvHl42luTR+LBmyCEJXhz11bRmo5UUKqodIJjGBHqlWpNzmnYm2NBFSsXa+Jaz/zqNSI8=\' WHERE "nodestore_node"."id" = \'u9iv1Ih4RDqz5GtlwX3+TA==\' ',
u'time': u'0.000'},
{u'sql': u'SAVEPOINT "s47890194282880_x53"', u'time': u'0.000'},
{u'sql': u'INSERT INTO "nodestore_node" ("id", "data", "timestamp") VALUES (\'u9iv1Ih4RDqz5GtlwX3+TA==\', \'eJxtU8Fu2zAMvesrfIsLbI4lS7KTnYoBW4qu7SFdcgxUm3G0OLEgq127ov8+UnHTSxHACMn3KOo9Km0cZ8uJ8/0fqMOEOcGuKi3ZsJwMcAz+JbPHAH5rahiy3wN4hBQsbZxElnUb0zQehgGzChNclFmOP46xZsOnTW4QblpAREl9KqQdzqkZW+2g63rmeB758By8wQLnBOaCBtt42G6ewA+2P1KpYFfi84EXIThCyEimCR99RwmN52BtPp3Cszm4DrK6P2C+pD47MA12J1zF0s7xGX1FzlYkwNfLFs9BoTgzq5v+n+06M1VZnqRre2z6v0Nye5/oTHxL1ndrLS+SS4f91/BwbcNUFWVW6CS9Xtzf/PqSdHYPyU+o9/1F8n3n+wNMRUX6qTzPSpEszdZ4O7LwSMGMIVXqHu/4HGhEEd0QZEcfYxVjjfGHRKJkt0g7mgOJLEjz92ErSsyi1g8eEyeHc2pScNYiczk5z0QllLqNHU4DU6pAOvK39tiCdx4NoKwk1QoS/fU1aWBrHruQvL1RSTMz+khR+bFyzZ4SVTx9hsy6syj2xpKLMmetJtTHvSRnK9q3HP/GFRhvKAu22rh+CGsbdotoJyZlvKWHGuwTNART7AdXQs+qvCgiH7zvo/NS0/Qy7kMwbUzFZZC4DMtJB09Ai6RyNpIo4Cw4hR6l73umis/3TElCqog8be388aS8ws20bn5+SIQrI64fstPtVmfrqFjF4mjdiHAK39HJHnyHOcI0Lmvaoq1jOjlbSkUceWhJyyjvHl42luTR+LBmyCEJXhz11bRmo5UUKqodIJjGBHqlWpNzmnYm2NBFSsXa+Jaz/zqNSI8=\', \'2018-05-22 09:12:12.374085+00:00\')',
u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT "s47890194282880_x53"', u'time': u'0.000'},
{u'sql': u'INSERT INTO "sentry_message" ("group_id", "message_id", "project_id", "message", "platform", "datetime", "time_spent", "data") VALUES (662, \'1fba9e314001443b93285dc4411f1593\', 864, \'hello http://example.com\', \'javascript\', \'2018-05-22 09:12:12+00:00\', NULL, \'eJzTSCkw5ApWz8tPSY3PTFHnKjAC8kotM8sMPTNMglwKq0zdS3LKI4y1QxxtbYHSxlzFegCZxA8W\') RETURNING "sentry_message"."id"',
u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT "s47890194282880_x52"', u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filterkey"."id", "sentry_filterkey"."project_id", "sentry_filterkey"."key", "sentry_filterkey"."values_seen", "sentry_filterkey"."label", "sentry_filterkey"."status" FROM "sentry_filterkey" WHERE ("sentry_filterkey"."project_id" = 864 AND "sentry_filterkey"."key" = \'level\' )',
u'time': u'0.001'},
{u'sql': u'SELECT "sentry_filtervalue"."id", "sentry_filtervalue"."project_id", "sentry_filtervalue"."key", "sentry_filtervalue"."value", "sentry_filtervalue"."data", "sentry_filtervalue"."times_seen", "sentry_filtervalue"."last_seen", "sentry_filtervalue"."first_seen" FROM "sentry_filtervalue" WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'error\' AND "sentry_filtervalue"."key" = \'level\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filterkey"."id", "sentry_filterkey"."project_id", "sentry_filterkey"."key", "sentry_filterkey"."values_seen", "sentry_filterkey"."label", "sentry_filterkey"."status" FROM "sentry_filterkey" WHERE ("sentry_filterkey"."project_id" = 864 AND "sentry_filterkey"."key" = \'url\' )',
u'time': u'0.001'},
{u'sql': u'SELECT "sentry_filtervalue"."id", "sentry_filtervalue"."project_id", "sentry_filtervalue"."key", "sentry_filtervalue"."value", "sentry_filtervalue"."data", "sentry_filtervalue"."times_seen", "sentry_filtervalue"."last_seen", "sentry_filtervalue"."first_seen" FROM "sentry_filtervalue" WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'http://example.com\' AND "sentry_filtervalue"."key" = \'url\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filterkey"."id", "sentry_filterkey"."project_id", "sentry_filterkey"."key", "sentry_filterkey"."values_seen", "sentry_filterkey"."label", "sentry_filterkey"."status" FROM "sentry_filterkey" WHERE ("sentry_filterkey"."project_id" = 864 AND "sentry_filterkey"."key" = \'sentry:user\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filtervalue"."id", "sentry_filtervalue"."project_id", "sentry_filtervalue"."key", "sentry_filtervalue"."value", "sentry_filtervalue"."data", "sentry_filtervalue"."times_seen", "sentry_filtervalue"."last_seen", "sentry_filtervalue"."first_seen" FROM "sentry_filtervalue" WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'ip:127.0.0.1\' AND "sentry_filtervalue"."key" = \'sentry:user\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filterkey"."id", "sentry_filterkey"."project_id", "sentry_filterkey"."key", "sentry_filterkey"."values_seen", "sentry_filterkey"."label", "sentry_filterkey"."status" FROM "sentry_filterkey" WHERE ("sentry_filterkey"."project_id" = 864 AND "sentry_filterkey"."key" = \'os.name\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filtervalue"."id", "sentry_filtervalue"."project_id", "sentry_filtervalue"."key", "sentry_filtervalue"."value", "sentry_filtervalue"."data", "sentry_filtervalue"."times_seen", "sentry_filtervalue"."last_seen", "sentry_filtervalue"."first_seen" FROM "sentry_filtervalue" WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'Windows 8\' AND "sentry_filtervalue"."key" = \'os.name\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filterkey"."id", "sentry_filterkey"."project_id", "sentry_filterkey"."key", "sentry_filterkey"."values_seen", "sentry_filterkey"."label", "sentry_filterkey"."status" FROM "sentry_filterkey" WHERE ("sentry_filterkey"."project_id" = 864 AND "sentry_filterkey"."key" = \'browser.name\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filtervalue"."id", "sentry_filtervalue"."project_id", "sentry_filtervalue"."key", "sentry_filtervalue"."value", "sentry_filtervalue"."data", "sentry_filtervalue"."times_seen", "sentry_filtervalue"."last_seen", "sentry_filtervalue"."first_seen" FROM "sentry_filtervalue" WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'Chrome\' AND "sentry_filtervalue"."key" = \'browser.name\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filterkey"."id", "sentry_filterkey"."project_id", "sentry_filterkey"."key", "sentry_filterkey"."values_seen", "sentry_filterkey"."label", "sentry_filterkey"."status" FROM "sentry_filterkey" WHERE ("sentry_filterkey"."project_id" = 864 AND "sentry_filterkey"."key" = \'browser\' )',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_filtervalue"."id", "sentry_filtervalue"."project_id", "sentry_filtervalue"."key", "sentry_filtervalue"."value", "sentry_filtervalue"."data", "sentry_filtervalue"."times_seen", "sentry_filtervalue"."last_seen", "sentry_filtervalue"."first_seen" FROM "sentry_filtervalue" WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'Chrome 28.0.1500\' AND "sentry_filtervalue"."key" = \'browser\' )',
u'time': u'0.000'},
{u'sql': u'SAVEPOINT "s47890194282880_x54"', u'time': u'0.000'},
{u'sql': u'INSERT INTO "sentry_eventtag" ("project_id", "group_id", "event_id", "key_id", "value_id", "date_added") VALUES (864, 662, 454, 108, 108, \'2018-05-22 09:12:12+00:00\'), (864, 662, 454, 109, 109, \'2018-05-22 09:12:12+00:00\'), (864, 662, 454, 110, 110, \'2018-05-22 09:12:12+00:00\'), (864, 662, 454, 111, 111, \'2018-05-22 09:12:12+00:00\'), (864, 662, 454, 112, 112, \'2018-05-22 09:12:12+00:00\'), (864, 662, 454, 113, 113, \'2018-05-22 09:12:12+00:00\')',
u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT "s47890194282880_x54"', u'time': u'0.000'},
{u'sql': u'UPDATE "sentry_filtervalue" SET "times_seen" = "sentry_filtervalue"."times_seen" + 1, "data" = NULL, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'error\' AND "sentry_filtervalue"."key" = \'level\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_messagefiltervalue" SET "times_seen" = "sentry_messagefiltervalue"."times_seen" + 1, "project_id" = 864, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_messagefiltervalue"."group_id" = 662 AND "sentry_messagefiltervalue"."value" = \'error\' AND "sentry_messagefiltervalue"."key" = \'level\' )',
u'time': u'0.000'},
{u'sql': u'UPDATE "sentry_filtervalue" SET "times_seen" = "sentry_filtervalue"."times_seen" + 1, "data" = NULL, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'http://example.com\' AND "sentry_filtervalue"."key" = \'url\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_messagefiltervalue" SET "times_seen" = "sentry_messagefiltervalue"."times_seen" + 1, "project_id" = 864, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_messagefiltervalue"."group_id" = 662 AND "sentry_messagefiltervalue"."value" = \'http://example.com\' AND "sentry_messagefiltervalue"."key" = \'url\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_filtervalue" SET "times_seen" = "sentry_filtervalue"."times_seen" + 1, "data" = NULL, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'ip:127.0.0.1\' AND "sentry_filtervalue"."key" = \'sentry:user\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_messagefiltervalue" SET "times_seen" = "sentry_messagefiltervalue"."times_seen" + 1, "project_id" = 864, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_messagefiltervalue"."group_id" = 662 AND "sentry_messagefiltervalue"."value" = \'ip:127.0.0.1\' AND "sentry_messagefiltervalue"."key" = \'sentry:user\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_filtervalue" SET "times_seen" = "sentry_filtervalue"."times_seen" + 1, "data" = NULL, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'Windows 8\' AND "sentry_filtervalue"."key" = \'os.name\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_messagefiltervalue" SET "times_seen" = "sentry_messagefiltervalue"."times_seen" + 1, "project_id" = 864, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_messagefiltervalue"."group_id" = 662 AND "sentry_messagefiltervalue"."value" = \'Windows 8\' AND "sentry_messagefiltervalue"."key" = \'os.name\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_filtervalue" SET "times_seen" = "sentry_filtervalue"."times_seen" + 1, "data" = NULL, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'Chrome\' AND "sentry_filtervalue"."key" = \'browser.name\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_messagefiltervalue" SET "times_seen" = "sentry_messagefiltervalue"."times_seen" + 1, "project_id" = 864, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_messagefiltervalue"."group_id" = 662 AND "sentry_messagefiltervalue"."value" = \'Chrome\' AND "sentry_messagefiltervalue"."key" = \'browser.name\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_filtervalue" SET "times_seen" = "sentry_filtervalue"."times_seen" + 1, "data" = NULL, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_filtervalue"."project_id" = 864 AND "sentry_filtervalue"."value" = \'Chrome 28.0.1500\' AND "sentry_filtervalue"."key" = \'browser\' )',
u'time': u'0.001'},
{u'sql': u'UPDATE "sentry_messagefiltervalue" SET "times_seen" = "sentry_messagefiltervalue"."times_seen" + 1, "project_id" = 864, "last_seen" = \'2018-05-22 09:12:12+00:00\' WHERE ("sentry_messagefiltervalue"."group_id" = 662 AND "sentry_messagefiltervalue"."value" = \'Chrome 28.0.1500\' AND "sentry_messagefiltervalue"."key" = \'browser\' )',
u'time': u'0.001'},
{u'sql': u'SELECT "sentry_groupedmessage"."id", "sentry_groupedmessage"."project_id", "sentry_groupedmessage"."logger", "sentry_groupedmessage"."level", "sentry_groupedmessage"."message", "sentry_groupedmessage"."view", "sentry_groupedmessage"."num_comments", "sentry_groupedmessage"."platform", "sentry_groupedmessage"."status", "sentry_groupedmessage"."times_seen", "sentry_groupedmessage"."last_seen", "sentry_groupedmessage"."first_seen", "sentry_groupedmessage"."first_release_id", "sentry_groupedmessage"."resolved_at", "sentry_groupedmessage"."active_at", "sentry_groupedmessage"."time_spent_total", "sentry_groupedmessage"."time_spent_count", "sentry_groupedmessage"."score", "sentry_groupedmessage"."is_public", "sentry_groupedmessage"."data", "sentry_groupedmessage"."short_id" FROM "sentry_groupedmessage" WHERE "sentry_groupedmessage"."id" = 662 ',
u'time': u'0.001'},
{u'sql': u'SELECT "sentry_groupsnooze"."id", "sentry_groupsnooze"."group_id", "sentry_groupsnooze"."until", "sentry_groupsnooze"."count", "sentry_groupsnooze"."window", "sentry_groupsnooze"."user_count", "sentry_groupsnooze"."user_window", "sentry_groupsnooze"."state", "sentry_groupsnooze"."actor_id" FROM "sentry_groupsnooze" WHERE "sentry_groupsnooze"."group_id" = 662 ',
u'time': u'0.000'},
{u'sql': u'SELECT "sentry_grouprulestatus"."id", "sentry_grouprulestatus"."project_id", "sentry_grouprulestatus"."rule_id", "sentry_grouprulestatus"."group_id", "sentry_grouprulestatus"."status", "sentry_grouprulestatus"."date_added", "sentry_grouprulestatus"."last_active" FROM "sentry_grouprulestatus" WHERE ("sentry_grouprulestatus"."group_id" = 662 AND "sentry_grouprulestatus"."rule_id" = 935 )',
u'time': u'0.000'},
{u'sql': u'SAVEPOINT "s47890194282880_x55"', u'time': u'0.000'},
{u'sql': u'RELEASE SAVEPOINT "s47890194282880_x55"', u'time': u'0.000'}]
)
assert result == {
'nodestore_node': 2,
'sentry_environmentproject': 1,
'sentry_eventtag': 1,
'sentry_eventuser': 1,
'sentry_filtervalue': 6,
'sentry_groupedmessage': 1,
'sentry_message': 1,
'sentry_messagefiltervalue': 6,
'sentry_userreport': 1
}
| 163.232143
| 1,026
| 0.679029
| 5,332
| 45,705
| 5.598837
| 0.055889
| 0.117308
| 0.021103
| 0.029545
| 0.913878
| 0.894014
| 0.891468
| 0.885305
| 0.879342
| 0.870666
| 0
| 0.09006
| 0.144601
| 45,705
| 279
| 1,027
| 163.817204
| 0.673522
| 0
| 0
| 0.464945
| 0
| 0.313653
| 0.742545
| 0.4766
| 0
| 0
| 0
| 0
| 0.01107
| 1
| 0.01107
| false
| 0
| 0.01107
| 0
| 0.02583
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
40d2878d33026b03b359ba15ecb1df444b2388d3
| 147,377
|
py
|
Python
|
src/single_use/test_velocity_definitions_sigma_observable.py
|
MehnaazAsad/RESOLVE_Statistics
|
a7bdcc896ca2c51ab3417c46f07efe8c16825597
|
[
"MIT"
] | 1
|
2020-02-22T02:18:55.000Z
|
2020-02-22T02:18:55.000Z
|
src/single_use/test_velocity_definitions_sigma_observable.py
|
MehnaazAsad/RESOLVE_Statistics
|
a7bdcc896ca2c51ab3417c46f07efe8c16825597
|
[
"MIT"
] | null | null | null |
src/single_use/test_velocity_definitions_sigma_observable.py
|
MehnaazAsad/RESOLVE_Statistics
|
a7bdcc896ca2c51ab3417c46f07efe8c16825597
|
[
"MIT"
] | 1
|
2020-02-22T02:27:49.000Z
|
2020-02-22T02:27:49.000Z
|
"""
{This script tests different measurements of velocity for second observable.}
"""
from cosmo_utils.utils import work_paths as cwpaths
import matplotlib.pyplot as plt
from matplotlib import rc
import pandas as pd
import numpy as np
import scipy as sp
import random
import math
import os
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']}, size=25)
rc('text', usetex=True)
rc('text.latex', preamble=[r"\usepackage{amsmath}"])
rc('axes', linewidth=2)
rc('xtick.major', width=2, size=7)
rc('ytick.major', width=2, size=7)
def read_mock_catl(filename, catl_format='.hdf5'):
"""
Function to read ECO/RESOLVE catalogues.
Parameters
----------
filename: string
path and name of the ECO/RESOLVE catalogue to read
catl_format: string, optional (default = '.hdf5')
type of file to read.
Options:
- '.hdf5': Reads in a catalogue in HDF5 format
Returns
-------
mock_pd: pandas DataFrame
DataFrame with galaxy/group information
Examples
--------
# Specifying `filename`
>>> filename = 'ECO_catl.hdf5'
# Reading in Catalogue
>>> mock_pd = reading_catls(filename, format='.hdf5')
>>> mock_pd.head()
x y z vx vy vz \
0 10.225435 24.778214 3.148386 356.112457 -318.894409 366.721832
1 20.945772 14.500367 -0.237940 168.731766 37.558834 447.436951
2 21.335835 14.808488 0.004653 967.204407 -701.556763 -388.055115
3 11.102760 21.782235 2.947002 611.646484 -179.032089 113.388794
4 13.217764 21.214905 2.113904 120.689598 -63.448833 400.766541
loghalom cs_flag haloid halo_ngal ... cz_nodist vel_tot \
0 12.170 1 196005 1 ... 2704.599189 602.490355
1 11.079 1 197110 1 ... 2552.681697 479.667489
2 11.339 1 197131 1 ... 2602.377466 1256.285409
3 11.529 1 199056 1 ... 2467.277182 647.318259
4 10.642 1 199118 1 ... 2513.381124 423.326770
vel_tan vel_pec ra_orig groupid M_group g_ngal g_galtype \
0 591.399858 -115.068833 215.025116 0 11.702527 1 1
1 453.617221 155.924074 182.144134 1 11.524787 4 0
2 1192.742240 394.485714 182.213220 1 11.524787 4 0
3 633.928896 130.977416 210.441320 2 11.502205 1 1
4 421.064495 43.706352 205.525386 3 10.899680 1 1
halo_rvir
0 0.184839
1 0.079997
2 0.097636
3 0.113011
4 0.057210
"""
## Checking if file exists
if not os.path.exists(filename):
msg = '`filename`: {0} NOT FOUND! Exiting..'.format(filename)
raise ValueError(msg)
## Reading file
if catl_format=='.hdf5':
mock_pd = pd.read_hdf(filename)
else:
msg = '`catl_format` ({0}) not supported! Exiting...'.format(catl_format)
raise ValueError(msg)
return mock_pd
def read_data_catl(path_to_file, survey):
"""
Reads survey catalog from file
Parameters
----------
path_to_file: `string`
Path to survey catalog file
survey: `string`
Name of survey
Returns
---------
catl: `pandas.DataFrame`
Survey catalog with grpcz, abs rmag and stellar mass limits
volume: `float`
Volume of survey
z_median: `float`
Median redshift of survey
"""
if survey == 'eco':
# columns = ['name', 'radeg', 'dedeg', 'cz', 'grpcz', 'absrmag',
# 'logmstar', 'logmgas', 'grp', 'grpn', 'logmh', 'logmh_s',
# 'fc', 'grpmb', 'grpms','modelu_rcorr']
# 13878 galaxies
# eco_buff = pd.read_csv(path_to_file,delimiter=",", header=0)#, \usecols=columns)
eco_buff = read_mock_catl(path_to_file)
if mf_type == 'smf':
# 6456 galaxies
catl = eco_buff.loc[(eco_buff.grpcz.values >= 3000) &
(eco_buff.grpcz.values <= 7000) &
(eco_buff.absrmag.values <= -17.33)]
elif mf_type == 'bmf':
catl = eco_buff.loc[(eco_buff.grpcz.values >= 3000) &
(eco_buff.grpcz.values <= 7000) &
(eco_buff.absrmag.values <= -17.33)]
volume = 151829.26 # Survey volume without buffer [Mpc/h]^3
# volume = 192351.36 # Survey volume with buffer [Mpc/h]^3
# cvar = 0.125
z_median = np.median(catl.grpcz.values) / (3 * 10**5)
elif survey == 'resolvea' or survey == 'resolveb':
columns = ['name', 'radeg', 'dedeg', 'cz', 'grpcz', 'absrmag',
'logmstar', 'logmgas', 'grp', 'grpn', 'grpnassoc', 'logmh',
'logmh_s', 'fc', 'grpmb', 'grpms', 'f_a', 'f_b']
# 2286 galaxies
resolve_live18 = pd.read_csv(path_to_file, delimiter=",", header=0, \
usecols=columns)
if survey == 'resolvea':
if mf_type == 'smf':
catl = resolve_live18.loc[(resolve_live18.f_a.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17.33)]
elif mf_type == 'bmf':
catl = resolve_live18.loc[(resolve_live18.f_a.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17.33)]
volume = 13172.384 # Survey volume without buffer [Mpc/h]^3
# cvar = 0.30
z_median = np.median(resolve_live18.grpcz.values) / (3 * 10**5)
elif survey == 'resolveb':
if mf_type == 'smf':
# 487 - cz, 369 - grpcz
catl = resolve_live18.loc[(resolve_live18.f_b.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17)]
elif mf_type == 'bmf':
catl = resolve_live18.loc[(resolve_live18.f_b.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17)]
volume = 4709.8373 # *2.915 #Survey volume without buffer [Mpc/h]^3
# cvar = 0.58
z_median = np.median(resolve_live18.grpcz.values) / (3 * 10**5)
return catl, volume, z_median
def assign_colour_label_data(catl):
"""
Assign colour label to data
Parameters
----------
catl: pandas Dataframe
Data catalog
Returns
---------
catl: pandas Dataframe
Data catalog with colour label assigned as new column
"""
logmstar_arr = catl.logmstar.values
u_r_arr = catl.modelu_rcorr.values
colour_label_arr = np.empty(len(catl), dtype='str')
for idx, value in enumerate(logmstar_arr):
# Divisions taken from Moffett et al. 2015 equation 1
if value <= 9.1:
if u_r_arr[idx] > 1.457:
colour_label = 'R'
else:
colour_label = 'B'
if value > 9.1 and value < 10.1:
divider = 0.24 * value - 0.7
if u_r_arr[idx] > divider:
colour_label = 'R'
else:
colour_label = 'B'
if value >= 10.1:
if u_r_arr[idx] > 1.7:
colour_label = 'R'
else:
colour_label = 'B'
colour_label_arr[idx] = colour_label
catl['colour_label'] = colour_label_arr
return catl
def diff_smf(mstar_arr, volume, h1_bool, colour_flag=False):
"""
Calculates differential stellar mass function in units of h=1.0
Parameters
----------
mstar_arr: numpy array
Array of stellar masses
volume: float
Volume of survey or simulation
h1_bool: boolean
True if units of masses are h=1, False if units of masses are not h=1
Returns
---------
maxis: array
Array of x-axis mass values
phi: array
Array of y-axis values
err_tot: array
Array of error values per bin
bins: array
Array of bin edge values
"""
if not h1_bool:
# changing from h=0.7 to h=1 assuming h^-2 dependence
logmstar_arr = np.log10((10**mstar_arr) / 2.041)
else:
logmstar_arr = np.log10(mstar_arr)
if survey == 'eco' or survey == 'resolvea':
bin_min = np.round(np.log10((10**8.9) / 2.041), 1)
if survey == 'eco' and colour_flag == 'R':
bin_max = np.round(np.log10((10**11.5) / 2.041), 1)
bin_num = 6
elif survey == 'eco' and colour_flag == 'B':
bin_max = np.round(np.log10((10**11) / 2.041), 1)
bin_num = 6
elif survey == 'resolvea':
# different to avoid nan in inverse corr mat
bin_max = np.round(np.log10((10**11.5) / 2.041), 1)
bin_num = 7
else:
bin_max = np.round(np.log10((10**11.5) / 2.041), 1)
bin_num = 7
bins = np.linspace(bin_min, bin_max, bin_num)
elif survey == 'resolveb':
bin_min = np.round(np.log10((10**8.7) / 2.041), 1)
bin_max = np.round(np.log10((10**11.8) / 2.041), 1)
bins = np.linspace(bin_min, bin_max, 7)
# Unnormalized histogram and bin edges
counts, edg = np.histogram(logmstar_arr, bins=bins) # paper used 17 bins
dm = edg[1] - edg[0] # Bin width
maxis = 0.5 * (edg[1:] + edg[:-1]) # Mass axis i.e. bin centers
# Normalized to volume and bin width
err_poiss = np.sqrt(counts) / (volume * dm)
err_tot = err_poiss
phi = counts / (volume * dm) # not a log quantity
phi = np.log10(phi)
return maxis, phi, err_tot, bins, counts
def measure_all_smf(table, volume, data_bool, randint_logmstar=None):
"""
Calculates differential stellar mass function for all, red and blue galaxies
from mock/data
Parameters
----------
table: pandas Dataframe
Dataframe of either mock or data
volume: float
Volume of simulation/survey
cvar: float
Cosmic variance error
data_bool: Boolean
Data or mock
Returns
---------
3 multidimensional arrays of stellar mass, phi, total error in SMF and
counts per bin for all, red and blue galaxies
"""
colour_col = 'colour_label'
if data_bool:
logmstar_col = 'logmstar'
max_total, phi_total, err_total, bins_total, counts_total = \
diff_smf(table[logmstar_col], volume, False)
max_red, phi_red, err_red, bins_red, counts_red = \
diff_smf(table[logmstar_col].loc[table[colour_col] == 'R'],
volume, False, 'R')
max_blue, phi_blue, err_blue, bins_blue, counts_blue = \
diff_smf(table[logmstar_col].loc[table[colour_col] == 'B'],
volume, False, 'B')
else:
# logmstar_col = 'stellar_mass'
logmstar_col = '{0}'.format(randint_logmstar)
## Changed to 10**X because Behroozi mocks now have M* values in log
max_total, phi_total, err_total, bins_total, counts_total = \
diff_smf(10**(table[logmstar_col]), volume, True)
max_red, phi_red, err_red, bins_red, counts_red = \
diff_smf(10**(table[logmstar_col].loc[table[colour_col] == 'R']),
volume, True, 'R')
max_blue, phi_blue, err_blue, bins_blue, counts_blue = \
diff_smf(10**(table[logmstar_col].loc[table[colour_col] == 'B']),
volume, True, 'B')
return [max_total, phi_total, err_total, counts_total] , \
[max_red, phi_red, err_red, counts_red] , \
[max_blue, phi_blue, err_blue, counts_blue]
def std_func(bins, mass_arr, vel_arr):
"""
Calculate std from mean = 0
Parameters
----------
bins: array
Array of bins
mass_arr: array
Array of masses to be binned
vel_arr: array
Array of velocities
Returns
---------
std_arr: array
Standard deviation from 0 of velocity difference values in each mass bin
"""
last_index = len(bins)-1
std_arr = []
for index1, bin_edge in enumerate(bins):
cen_deltav_arr = []
for index2, stellar_mass in enumerate(mass_arr):
if stellar_mass >= bin_edge and index1 == last_index:
cen_deltav_arr.append(vel_arr[index2])
elif stellar_mass >= bin_edge and stellar_mass < bins[index1+1]:
cen_deltav_arr.append(vel_arr[index2])
mean = 0
# mean = np.mean(cen_deltav_arr)
diff_sqrd_arr = []
for value in cen_deltav_arr:
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
std_arr.append(std)
return std_arr
def mean_std_func(bins, mass_arr, vel_arr, groupid_arr):
mass_arr_bin_idxs = np.digitize(mass_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean_std_arr = []
len_std_arr = []
for idx in range(1, len(bins)):
cen_deltav_arr = []
grpid_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
cen_deltav_arr.append(np.array(vel_arr)[current_bin_idxs])
grpid_arr.append(np.array(groupid_arr)[current_bin_idxs])
mean = 0
std_arr = []
data_temp = {'group_id': np.array(grpid_arr).flatten(),
'deltav': np.array(cen_deltav_arr).flatten()}
df_temp = pd.DataFrame(data=data_temp)
groups = df_temp.groupby('group_id')
keys = groups.groups.keys()
for key in keys:
group = groups.get_group(key)
vels = group.deltav.values
diff_sqrd_arr = []
for value in vels:
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
std_arr.append(std)
len_std_arr.append(len(std_arr))
mean_std_arr.append(np.mean(std_arr))
return mean_std_arr
def median_std_func(bins, mass_arr, vel_arr, groupid_arr):
mass_arr_bin_idxs = np.digitize(mass_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
median_std_arr = []
len_std_arr = []
for idx in range(1, len(bins)):
cen_deltav_arr = []
grpid_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
cen_deltav_arr.append(np.array(vel_arr)[current_bin_idxs])
grpid_arr.append(np.array(groupid_arr)[current_bin_idxs])
mean = 0
std_arr = []
data_temp = {'group_id': np.array(grpid_arr).flatten(),
'deltav': np.array(cen_deltav_arr).flatten()}
df_temp = pd.DataFrame(data=data_temp)
groups = df_temp.groupby('group_id')
keys = groups.groups.keys()
for key in keys:
group = groups.get_group(key)
vels = group.deltav.values
diff_sqrd_arr = []
for value in vels:
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
std_arr.append(std)
len_std_arr.append(len(std_arr))
median_std_arr.append(np.median(std_arr))
return median_std_arr
def std_func_mod(bins, mass_arr, vel_arr):
mass_arr_bin_idxs = np.digitize(mass_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean = 0
std_arr = []
for idx in range(1, len(bins)):
cen_deltav_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
cen_deltav_arr.append(np.array(vel_arr)[current_bin_idxs])
diff_sqrd_arr = []
# mean = np.mean(cen_deltav_arr)
for value in cen_deltav_arr:
# print(mean)
# print(np.mean(cen_deltav_arr))
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
# print(std)
# print(np.std(cen_deltav_arr))
std_arr.append(std)
return std_arr
def mean_grphalo_func(bins, logmstar_arr, loghalom_arr):
mass_arr_bin_idxs = np.digitize(logmstar_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean_halomass_arr = []
for idx in range(1, len(bins)):
halomass_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
halomass_arr.append(np.array(loghalom_arr)[current_bin_idxs])
mean_halomass = np.mean(np.array(halomass_arr).flatten())
mean_halomass_arr.append(mean_halomass)
return mean_halomass_arr
def mean_grphalo_vcirc_func(bins, logmstar_arr, loghalom_arr):
mass_arr_bin_idxs = np.digitize(logmstar_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean_vcirc_arr = []
for idx in range(1, len(bins)):
halomass_arr = []
delta_mean = 200
omega_m = 0.3
rho_crit = 2.77*10**11 # assuming h=1.0 # h^2 . Msun/Mpc^3
G = 4.3*10**-9 # Mpc . Msun^-1 . (km/s)^2
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
halomass_arr.append(np.array(loghalom_arr)[current_bin_idxs])
halomass_arr = np.array(halomass_arr).flatten()
# radius in Mpc
halo_radius = ((3*(10**halomass_arr)) / (4*np.pi*delta_mean*omega_m*rho_crit))**(1/3)
halo_vcirc = np.sqrt((G * (10**halomass_arr))/halo_radius)
mean_vcirc = np.mean(halo_vcirc)
mean_vcirc_arr.append(mean_vcirc)
return mean_vcirc_arr
def get_deltav_sigma_data(df):
"""
Measure spread in velocity dispersion separately for red and blue galaxies
by binning up central stellar mass (changes logmstar units from h=0.7 to h=1)
Parameters
----------
df: pandas Dataframe
Data catalog
Returns
---------
std_red: numpy array
Spread in velocity dispersion of red galaxies
centers_red: numpy array
Bin centers of central stellar mass for red galaxies
std_blue: numpy array
Spread in velocity dispersion of blue galaxies
centers_blue: numpy array
Bin centers of central stellar mass for blue galaxies
"""
catl = df.copy()
if survey == 'eco' or survey == 'resolvea':
catl = catl.loc[catl.logmstar >= 8.9]
elif survey == 'resolveb':
catl = catl.loc[catl.logmstar >= 8.7]
catl.logmstar = np.log10((10**catl.logmstar) / 2.041)
red_subset_grpids = np.unique(catl.groupid.loc[(catl.\
colour_label == 'R') & (catl.g_galtype == 1)].values)
blue_subset_grpids = np.unique(catl.groupid.loc[(catl.\
colour_label == 'B') & (catl.g_galtype == 1)].values)
# Calculating spread in velocity dispersion for galaxies in groups with a
# red central
red_singleton_counter = 0
red_deltav_arr = []
red_cen_stellar_mass_arr = []
for key in red_subset_grpids:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
singleton_counter += 1
else:
cen_stellar_mass = group.logmstar.loc[group.g_galtype.\
values == 1].values[0]
mean_cz_grp = np.round(np.mean(group.cz.values),2)
deltav = group.cz.values - len(group)*[mean_cz_grp]
for val in deltav:
red_deltav_arr.append(val)
red_cen_stellar_mass_arr.append(cen_stellar_mass)
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
red_stellar_mass_bins = np.linspace(8.6,11.2,6)
elif survey == 'resolveb':
red_stellar_mass_bins = np.linspace(8.4,11.0,6)
std_red = std_func_mod(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr)
std_red = np.array(std_red)
# Calculating spread in velocity dispersion for galaxies in groups with a
# blue central
blue_singleton_counter = 0
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
for key in blue_subset_grpids:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
blue_singleton_counter += 1
else:
cen_stellar_mass = group.logmstar.loc[group.g_galtype\
.values == 1].values[0]
mean_cz_grp = np.round(np.mean(group.cz.values),2)
deltav = group.cz.values - len(group)*[mean_cz_grp]
for val in deltav:
blue_deltav_arr.append(val)
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
elif survey == 'resolveb':
blue_stellar_mass_bins = np.linspace(8.4,10.4,6)
std_blue = std_func_mod(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_deltav_arr)
std_blue = np.array(std_blue)
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
return std_red, centers_red, std_blue, centers_blue
def get_deltav_sigma_mocks_qmcolour(survey, mock_df):
"""
Calculate spread in velocity dispersion from survey mocks (logmstar converted
to h=1 units before analysis)
Parameters
----------
survey: string
Name of survey
path: string
Path to mock catalogs
Returns
---------
std_red_arr: numpy array
Spread in velocity dispersion of red galaxies
centers_red_arr: numpy array
Bin centers of central stellar mass for red galaxies
std_blue_arr: numpy array
Spread in velocity dispersion of blue galaxies
centers_blue_arr: numpy array
Bin centers of central stellar mass for blue galaxies
"""
mock_pd = mock_df.copy()
mock_pd.logmstar = np.log10((10**mock_pd.logmstar) / 2.041)
red_subset_grpids = np.unique(mock_pd.groupid.loc[(mock_pd.\
colour_label == 'R') & (mock_pd.g_galtype == 1)].values)
blue_subset_grpids = np.unique(mock_pd.groupid.loc[(mock_pd.\
colour_label == 'B') & (mock_pd.g_galtype == 1)].values)
# Calculating spread in velocity dispersion for galaxies in groups
# with a red central
red_deltav_arr = []
red_cen_stellar_mass_arr = []
for key in red_subset_grpids:
group = mock_pd.loc[mock_pd.groupid == key]
cen_stellar_mass = group.logmstar.loc[group.g_galtype.\
values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
# Velocity difference
deltav = group.cz.values - len(group)*[mean_cz_grp]
for val in deltav:
red_deltav_arr.append(val)
red_cen_stellar_mass_arr.append(cen_stellar_mass)
# print(max(red_cen_stellar_mass_arr))
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
red_stellar_mass_bins = np.linspace(8.6,11.2,6)
elif survey == 'resolveb':
red_stellar_mass_bins = np.linspace(8.4,11.0,6)
std_red = std_func(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr)
std_red = np.array(std_red)
# Calculating spread in velocity dispersion for galaxies in groups
# with a blue central
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
for key in blue_subset_grpids:
group = mock_pd.loc[mock_pd.groupid == key]
cen_stellar_mass = group.logmstar.loc[group.g_galtype\
.values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
# Velocity difference
deltav = group.cz.values - len(group)*[mean_cz_grp]
for val in deltav:
blue_deltav_arr.append(val)
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
# print(max(blue_cen_stellar_mass_arr))
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
elif survey == 'resolveb':
blue_stellar_mass_bins = np.linspace(8.4,10.4,6)
std_blue = std_func(blue_stellar_mass_bins, \
blue_cen_stellar_mass_arr, blue_deltav_arr)
std_blue = np.array(std_blue)
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
# last_red_bin = centers_red[-1] + (centers_red[-1] - centers_red[-2])
# centers_red = np.insert(centers_red, len(centers_red), last_red_bin)
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
# last_blue_bin = centers_blue[-1] + (centers_blue[-1] - centers_blue[-2])
# centers_blue = np.insert(centers_blue, len(centers_blue), last_blue_bin)
centers_red = np.array(centers_red)
centers_blue = np.array(centers_blue)
return std_red, std_blue, centers_red, centers_blue
def get_err_data(survey, path):
"""
Calculate error in data SMF from mocks
Parameters
----------
survey: string
Name of survey
path: string
Path to mock catalogs
Returns
---------
err_total: array
Standard deviation of phi values between all mocks and for all galaxies
err_red: array
Standard deviation of phi values between all mocks and for red galaxies
err_blue: array
Standard deviation of phi values between all mocks and for blue galaxies
"""
if survey == 'eco':
mock_name = 'ECO'
num_mocks = 8
min_cz = 3000
max_cz = 7000
mag_limit = -17.33
mstar_limit = 8.9
volume = 151829.26 # Survey volume without buffer [Mpc/h]^3
elif survey == 'resolvea':
mock_name = 'A'
num_mocks = 59
min_cz = 4500
max_cz = 7000
mag_limit = -17.33
mstar_limit = 8.9
volume = 13172.384 # Survey volume without buffer [Mpc/h]^3
elif survey == 'resolveb':
mock_name = 'B'
num_mocks = 104
min_cz = 4500
max_cz = 7000
mag_limit = -17
mstar_limit = 8.7
volume = 4709.8373 # Survey volume without buffer [Mpc/h]^3
phi_arr_total = []
phi_arr_red = []
phi_arr_blue = []
sig_arr_red = []
sig_arr_blue = []
cen_arr_red = []
cen_arr_blue = []
# colour_err_arr = []
# colour_corr_mat_inv = []
box_id_arr = np.linspace(5001,5008,8)
for box in box_id_arr:
box = int(box)
temp_path = path + '{0}/{1}_m200b_catls/'.format(box,
mock_name)
for num in range(num_mocks):
filename = temp_path + '{0}_cat_{1}_Planck_memb_cat.hdf5'.format(
mock_name, num)
mock_pd = read_mock_catl(filename)
# Using the same survey definition as in mcmc smf i.e excluding the
# buffer
mock_pd = mock_pd.loc[(mock_pd.cz.values >= min_cz) & \
(mock_pd.cz.values <= max_cz) & (mock_pd.M_r.values <= mag_limit) &\
(mock_pd.logmstar.values >= mstar_limit)]
## Using best-fit found for old ECO data using optimize_hybridqm_eco,py
# Mstar_q = 10.39 # Msun/h
# Mh_q = 14.85 # Msun/h
# mu = 0.65
# nu = 0.16
## Using best-fit found for new ECO data using optimize_hybridqm_eco,py
Mstar_q = 10.49 # Msun/h
Mh_q = 14.03 # Msun/h
mu = 0.69
nu = 0.148
theta = [Mstar_q, Mh_q, mu, nu]
f_red_c, f_red_s = hybrid_quenching_model(theta, mock_pd, 'nonvishnu')
mock_pd = assign_colour_label_mock(f_red_c, f_red_s, mock_pd)
logmstar_arr = mock_pd.logmstar.values
#Measure SMF of mock using diff_smf function
max_total, phi_total, err_total, bins_total, counts_total = \
diff_smf(logmstar_arr, volume, False)
max_red, phi_red, err_red, bins_red, counts_red = \
diff_smf(mock_pd.logmstar.loc[mock_pd.colour_label.values == 'R'],
volume, False, 'R')
max_blue, phi_blue, err_blue, bins_blue, counts_blue = \
diff_smf(mock_pd.logmstar.loc[mock_pd.colour_label.values == 'B'],
volume, False, 'B')
phi_arr_total.append(phi_total)
phi_arr_red.append(phi_red)
phi_arr_blue.append(phi_blue)
sig_red, sig_blue, cen_red, cen_blue = \
get_deltav_sigma_mocks_qmcolour(survey, mock_pd)
sig_arr_red.append(sig_red)
sig_arr_blue.append(sig_blue)
cen_arr_red.append(cen_red)
cen_arr_blue.append(cen_blue)
phi_arr_total = np.array(phi_arr_total)
phi_arr_red = np.array(phi_arr_red)
phi_arr_blue = np.array(phi_arr_blue)
sig_arr_red = np.array(sig_arr_red)
sig_arr_blue = np.array(sig_arr_blue)
cen_arr_red = np.array(cen_arr_red)
cen_arr_blue = np.array(cen_arr_blue)
# Covariance matrix for total phi (all galaxies)
cov_mat = np.cov(phi_arr_total, rowvar=False) # default norm is N-1
err_total = np.sqrt(cov_mat.diagonal())
phi_red_0 = phi_arr_red[:,0]
phi_red_1 = phi_arr_red[:,1]
phi_red_2 = phi_arr_red[:,2]
phi_red_3 = phi_arr_red[:,3]
phi_red_4 = phi_arr_red[:,4]
phi_blue_0 = phi_arr_blue[:,0]
phi_blue_1 = phi_arr_blue[:,1]
phi_blue_2 = phi_arr_blue[:,2]
phi_blue_3 = phi_arr_blue[:,3]
phi_blue_4 = phi_arr_blue[:,4]
dv_red_0 = sig_arr_red[:,0]
dv_red_1 = sig_arr_red[:,1]
dv_red_2 = sig_arr_red[:,2]
dv_red_3 = sig_arr_red[:,3]
dv_red_4 = sig_arr_red[:,4]
dv_blue_0 = sig_arr_blue[:,0]
dv_blue_1 = sig_arr_blue[:,1]
dv_blue_2 = sig_arr_blue[:,2]
dv_blue_3 = sig_arr_blue[:,3]
dv_blue_4 = sig_arr_blue[:,4]
combined_df = pd.DataFrame({'phi_red_0':phi_red_0, 'phi_red_1':phi_red_1,\
'phi_red_2':phi_red_2, 'phi_red_3':phi_red_3, 'phi_red_4':phi_red_4, \
'phi_blue_0':phi_blue_0, 'phi_blue_1':phi_blue_1,
'phi_blue_2':phi_blue_2, 'phi_blue_3':phi_blue_3,
'phi_blue_4':phi_blue_4, \
'dv_red_0':dv_red_0, 'dv_red_1':dv_red_1, 'dv_red_2':dv_red_2, \
'dv_red_3':dv_red_3, 'dv_red_4':dv_red_4, \
'dv_blue_0':dv_blue_0, 'dv_blue_1':dv_blue_1, 'dv_blue_2':dv_blue_2, \
'dv_blue_3':dv_blue_3, 'dv_blue_4':dv_blue_4})
# Correlation matrix of phi and deltav colour measurements combined
corr_mat_colour = combined_df.corr()
corr_mat_inv_colour = np.linalg.inv(corr_mat_colour.values)
err_colour = np.sqrt(np.diag(combined_df.cov()))
return err_total, err_colour
def hybrid_quenching_model(theta, gals_df, mock, randint=None):
"""
Apply hybrid quenching model from Zu and Mandelbaum 2015
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
f_red_cen: array
Array of central red fractions
f_red_sat: array
Array of satellite red fractions
"""
# parameter values from Table 1 of Zu and Mandelbaum 2015 "prior case"
Mstar_q = theta[0] # Msun/h
Mh_q = theta[1] # Msun/h
mu = theta[2]
nu = theta[3]
cen_hosthalo_mass_arr, sat_hosthalo_mass_arr = get_host_halo_mock(gals_df, \
mock)
cen_stellar_mass_arr, sat_stellar_mass_arr = get_stellar_mock(gals_df, mock, \
randint)
f_red_cen = 1 - np.exp(-((cen_stellar_mass_arr/(10**Mstar_q))**mu))
g_Mstar = np.exp(-((sat_stellar_mass_arr/(10**Mstar_q))**mu))
h_Mh = np.exp(-((sat_hosthalo_mass_arr/(10**Mh_q))**nu))
f_red_sat = 1 - (g_Mstar * h_Mh)
return f_red_cen, f_red_sat
def get_host_halo_mock(gals_df, mock):
"""
Get host halo mass from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_halos: array
Array of central host halo masses
sat_halos: array
Array of satellite host halo masses
"""
df = gals_df.copy()
# groups = df.groupby('halo_id')
# keys = groups.groups.keys()
# for key in keys:
# group = groups.get_group(key)
# for index, value in enumerate(group.cs_flag):
# if value == 1:
# cen_halos.append(group.loghalom.values[index])
# else:
# sat_halos.append(group.loghalom.values[index])
if mock == 'vishnu':
cen_halos = []
sat_halos = []
for index, value in enumerate(df.cs_flag):
if value == 1:
cen_halos.append(df.halo_mvir.values[index])
else:
sat_halos.append(df.halo_mvir.values[index])
else:
cen_halos = []
sat_halos = []
for index, value in enumerate(df.cs_flag):
if value == 1:
cen_halos.append(10**(df.loghalom.values[index]))
else:
sat_halos.append(10**(df.loghalom.values[index]))
cen_halos = np.array(cen_halos)
sat_halos = np.array(sat_halos)
return cen_halos, sat_halos
def get_stellar_mock(gals_df, mock, randint=None):
"""
Get stellar mass from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_gals: array
Array of central stellar masses
sat_gals: array
Array of satellite stellar masses
"""
df = gals_df.copy()
if mock == 'vishnu':
# These masses are log
cen_gals = []
sat_gals = []
for idx,value in enumerate(df.cs_flag):
if value == 1:
cen_gals.append(10**(df['{0}'.format(randint)].values[idx]))
elif value == 0:
sat_gals.append(10**(df['{0}'.format(randint)].values[idx]))
else:
cen_gals = []
sat_gals = []
for idx,value in enumerate(df.cs_flag):
if value == 1:
cen_gals.append((10**(df.logmstar.values[idx]))/2.041)
elif value == 0:
sat_gals.append((10**(df.logmstar.values[idx]))/2.041)
cen_gals = np.array(cen_gals)
sat_gals = np.array(sat_gals)
return cen_gals, sat_gals
def assign_colour_label_mock(f_red_cen, f_red_sat, gals_df, drop_fred=False):
"""
Assign colour label to mock catalog
Parameters
----------
f_red_cen: array
Array of central red fractions
f_red_sat: array
Array of satellite red fractions
gals_df: pandas Dataframe
Mock catalog
drop_fred: boolean
Whether or not to keep red fraction column after colour has been
assigned
Returns
---------
df: pandas Dataframe
Dataframe with colour label and random number assigned as
new columns
"""
# Copy of dataframe
df = gals_df.copy()
# Saving labels
color_label_arr = [[] for x in range(len(df))]
rng_arr = [[] for x in range(len(df))]
# Adding columns for f_red to df
df.loc[:, 'f_red'] = np.zeros(len(df))
df.loc[df['cs_flag'] == 1, 'f_red'] = f_red_cen
df.loc[df['cs_flag'] == 0, 'f_red'] = f_red_sat
# Converting to array
f_red_arr = df['f_red'].values
# Looping over galaxies
for ii, cs_ii in enumerate(df['cs_flag']):
# Draw a random number
rng = np.random.uniform()
# Comparing against f_red
if (rng >= f_red_arr[ii]):
color_label = 'B'
else:
color_label = 'R'
# Saving to list
color_label_arr[ii] = color_label
rng_arr[ii] = rng
## Assigning to DataFrame
df.loc[:, 'colour_label'] = color_label_arr
df.loc[:, 'rng'] = rng_arr
# Dropping 'f_red` column
if drop_fred:
df.drop('f_red', axis=1, inplace=True)
return df
global survey
global path_to_figures
global gal_group_df_subset
dict_of_paths = cwpaths.cookiecutter_paths()
path_to_raw = dict_of_paths['raw_dir']
path_to_proc = dict_of_paths['proc_dir']
path_to_interim = dict_of_paths['int_dir']
path_to_figures = dict_of_paths['plot_dir']
path_to_external = dict_of_paths['ext_dir']
path_to_data = dict_of_paths['data_dir']
machine = 'mac'
mf_type = 'smf'
survey = 'eco'
if survey == 'eco':
# catl_file = path_to_raw + "eco/eco_all.csv"
## New catalog with group finder run on subset after applying M* and cz cuts
# catl_file = path_to_proc + "gal_group_eco_data.hdf5"
catl_file = path_to_proc + "gal_group_eco_data_vol_update.hdf5"
path_to_mocks = path_to_data + 'mocks/m200b/eco/'
elif survey == 'resolvea' or survey == 'resolveb':
catl_file = path_to_raw + "RESOLVE_liveJune2018.csv"
catl, volume, z_median = read_data_catl(catl_file, survey)
catl = assign_colour_label_data(catl)
std_red, centers_red, std_blue, centers_blue = get_deltav_sigma_data(catl)
# err_total_data, err_colour_data = \
# get_err_data(survey, path_to_mocks)
catl.logmstar = np.log10((10**catl.logmstar) / 2.041)
catl.M_group = np.log10((10**catl.M_group) / 2.041)
catl.logmh_s = np.log10((10**catl.logmh_s) / 2.041)
catl.logmh = np.log10((10**catl.logmh) / 2.041)
## Unnecessary for new data since cut applied before group finding
if survey == 'eco' or survey == 'resolvea':
catl = catl.loc[catl.logmstar >= np.log10((10**8.9)/2.041)]
elif survey == 'resolveb':
catl = catl.loc[catl.logmstar >= np.log10((10**8.7)/2.041)]
### USE IF NEW DATA
red_subset_grpids = np.unique(catl.groupid.loc[(catl.\
colour_label == 'R') & (catl.g_galtype == 1)].values)
blue_subset_grpids = np.unique(catl.groupid.loc[(catl.\
colour_label == 'B') & (catl.g_galtype == 1)].values)
### USE IF OLD DATA
red_subset_grpids = np.unique(catl.grp.loc[(catl.\
colour_label == 'R') & (catl.fc == 1)].values)
blue_subset_grpids = np.unique(catl.grp.loc[(catl.\
colour_label == 'B') & (catl.fc == 1)].values)
# Calculating spread in velocity dispersion for galaxies in groups with a
# red central
### USE IF OLD DATA
red_deltav_arr = []
red_cen_stellar_mass_arr = []
grpid_arr = []
red_cen_cz_arr = []
red_mean_cz_arr = []
red_grp_halo_mass_arr = []
for key in red_subset_grpids:
group = catl.loc[catl.grp == key]
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.fc.\
values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.fc == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_grp_halo_mass_arr.append(grp_halo_mass)
red_cen_cz_arr.append(cen_cz_grp)
red_mean_cz_arr.append(mean_cz_grp)
for val in deltav:
red_deltav_arr.append(val)
red_cen_stellar_mass_arr.append(cen_stellar_mass)
grpid_arr.append(key)
# if len(group) > 5:
# break
### USE IF NEW DATA
red_singleton_counter = 0
red_deltav_arr = []
red_cen_stellar_mass_arr = []
red_grpid_arr = []
red_cen_cz_arr = []
red_mean_cz_arr = []
red_grp_halo_mass_arr = []
for key in red_subset_grpids:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
red_singleton_counter += 1
else:
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.g_galtype.\
values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_grp_halo_mass_arr.append(grp_halo_mass)
red_cen_cz_arr.append(cen_cz_grp)
red_mean_cz_arr.append(mean_cz_grp)
for val in deltav:
red_deltav_arr.append(val)
red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_grpid_arr.append(key)
# if len(group) > 5:
# break
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
red_stellar_mass_bins = np.linspace(8.6,11.2,6)
# red_stellar_mass_bins = np.linspace(8.9,11.5,6) # h=0.7
elif survey == 'resolveb':
red_stellar_mass_bins = np.linspace(8.4,11.0,6)
std_red = std_func_mod(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr)
std_red = np.array(std_red)
mean_std_red = mean_std_func(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr, red_grpid_arr)
median_std_red = median_std_func(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr, red_grpid_arr)
mean_halo_red = mean_grphalo_func(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_grp_halo_mass_arr)
mean_vcirc_red = mean_grphalo_vcirc_func(red_stellar_mass_bins,
red_cen_stellar_mass_arr, red_grp_halo_mass_arr)
# Calculating spread in velocity dispersion for galaxies in groups with a
# blue central
### USE IF OLD DATA
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
grpid_arr = []
blue_cen_cz_arr = []
blue_mean_cz_arr = []
blue_grp_halo_mass_arr = []
for key in blue_subset_grpids:
group = catl.loc[catl.grp == key]
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.fc\
.values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.fc == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_grp_halo_mass_arr.append(grp_halo_mass)
blue_cen_cz_arr.append(cen_cz_grp)
blue_mean_cz_arr.append(mean_cz_grp)
for val in deltav:
blue_deltav_arr.append(val)
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
grpid_arr.append(key)
### USE IF NEW DATA
blue_singleton_counter = 0
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
blue_grpid_arr = []
blue_cen_cz_arr = []
blue_mean_cz_arr = []
blue_grp_halo_mass_arr = []
for key in blue_subset_grpids:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
blue_singleton_counter += 1
else:
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.g_galtype\
.values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_grp_halo_mass_arr.append(grp_halo_mass)
blue_cen_cz_arr.append(cen_cz_grp)
blue_mean_cz_arr.append(mean_cz_grp)
for val in deltav:
blue_deltav_arr.append(val)
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_grpid_arr.append(key)
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
# blue_stellar_mass_bins = np.linspace(8.9,11,6) #h=0.7
elif survey == 'resolveb':
blue_stellar_mass_bins = np.linspace(8.4,10.4,6)
std_blue = std_func_mod(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_deltav_arr)
std_blue = np.array(std_blue)
mean_std_blue = mean_std_func(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_deltav_arr, blue_grpid_arr)
median_std_blue = median_std_func(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_deltav_arr, blue_grpid_arr)
mean_halo_blue = mean_grphalo_func(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_grp_halo_mass_arr)
mean_vcirc_blue = mean_grphalo_vcirc_func(blue_stellar_mass_bins,
blue_cen_stellar_mass_arr, blue_grp_halo_mass_arr)
# centers_red = 0.5 * (result_red[1][1:] + \
# result_red[1][:-1])
# centers_blue = 0.5 * (result_blue[1][1:] + \
# result_blue[1][:-1])
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
# last_red_bin = centers_red[-1] + (centers_red[-1] - centers_red[-2])
# centers_red = np.insert(centers_red, len(centers_red), last_red_bin)
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
# last_blue_bin = centers_blue[-1] + (centers_blue[-1] - centers_blue[-2])
# centers_blue = np.insert(centers_blue, len(centers_blue), last_blue_bin)
fig1 = plt.figure()
# plt.errorbar(centers_red,std_red,yerr=err_colour_data[10:15],
# color='darkred',fmt='p-',ecolor='darkred',markersize=10,capsize=10,
# capthick=1.0,zorder=10)
# plt.errorbar(centers_blue,std_blue,yerr=err_colour_data[15:20],
# color='darkblue',fmt='p-',ecolor='darkblue',markersize=10,capsize=10,
# capthick=1.0,zorder=10)
plt.scatter(centers_red,std_red,color='darkred',s=350,marker='p')
plt.scatter(centers_blue,std_blue,color='darkblue',s=350,marker='p')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$\sigma \left[\mathrm{km/s} \right]$', fontsize=30)
# plt.title('Spread in velocity difference from group cz as included in catalog')
plt.title('Spread in velocity difference from central cz of group')
# plt.title('Spread in velocity difference from mean cz of group')
plt.show()
fig2 = plt.figure()
plt.scatter(centers_red,mean_std_red,color='darkred',s=350, marker='p')
plt.scatter(centers_blue,mean_std_blue,color='darkblue',s=350, marker='p')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$\bar{\sigma} \left[\mathrm{km/s} \right]$', fontsize=30)
plt.title('Mean of spread in velocity difference from central cz of group')
# plt.title('Mean of spread in velocity difference from mean cz of group')
plt.show()
# Plot of comparison between using mean of all satellite velocities to measure
# velocity difference and using central velocity
fig3, ax3 = plt.subplots()
ax4 = ax3.twinx()
ax4.set_ylabel(r'Mean cz (red) [km/s]')
imshowax4 = ax4.scatter(red_cen_cz_arr, red_mean_cz_arr, c=red_grp_halo_mass_arr, cmap='Reds', s=50)
# ax4.set_ylim(ax4.get_ylim()[::-1])
plt.gca().invert_yaxis()
imshowax3 = ax3.scatter(blue_cen_cz_arr, blue_mean_cz_arr, c=blue_grp_halo_mass_arr, cmap='Blues', s=50)
ax3.plot(np.linspace(2530, 7470, 20), np.linspace(2530, 7470, 20), '--k')
ax4.plot(np.linspace(2530, 7470, 20), np.linspace(2530, 7470, 20), '--k')
cbb = plt.colorbar(mappable=imshowax3, shrink=0.5, pad=-0.1)
cbr = plt.colorbar(mappable=imshowax4, shrink=0.5, pad=0.15)
cbb.set_label(r'Group halo mass $[{M_\odot}/h]$', rotation=90, labelpad=20)
ax3.set_xlabel(r'Central cz [km/s]')
ax3.set_ylabel(r'Mean cz (blue) [km/s]')
plt.title('Comparison of central cz and mean cz values of groups')
plt.show()
fig4 = plt.figure()
plt.scatter(centers_red, mean_halo_red, color='darkred',s=350,marker='p')
plt.scatter(centers_blue,mean_halo_blue,color='darkblue',s=350,marker='p')
plt.xlabel('Group central M* [Msun/h]')
plt.ylabel('Mean group halo mass [Msun/h]')
plt.title(r'Group halo mass (HAM with $M_{r}$) vs group central stellar mass')
plt.show()
fig5 = plt.figure()
plt.scatter(centers_red, mean_vcirc_red, color='darkred',s=350, marker='p')
plt.scatter(centers_blue,mean_vcirc_blue,color='darkblue',s=350, marker='p')
plt.xlabel('Group central M* [Msun/h]')
plt.ylabel('Mean group halo circular velocity [km/s]')
plt.title(r'Group halo cirvular velocity vs group central stellar mass')
plt.show()
# Plot of comparison between using spread in velocity across all groups per bin
# and mean of spreads per group per bin
fig6 = plt.figure()
plt.scatter(std_red, mean_std_red, c='indianred', s=50)
plt.scatter(std_blue, mean_std_blue, c='cornflowerblue', s=50)
plt.plot(np.linspace(0, 350, 20), np.linspace(0, 350, 20), '--k')
plt.xlabel('Sigma')
plt.ylabel('Mean sigma')
plt.show()
################################################################################
############################### SIMULATION DATA ################################
################################################################################
def read_chi2(path_to_file):
"""
Reads chi-squared values from file
Parameters
----------
path_to_file: string
Path to chi-squared values file
Returns
---------
chi2: array
Array of reshaped chi^2 values to match chain values
"""
chi2_df = pd.read_csv(path_to_file,header=None,names=['chisquared'])
chi2 = chi2_df.chisquared.values
return chi2
def read_mcmc(path_to_file):
"""
Reads mcmc chain from file
Parameters
----------
path_to_file: string
Path to mcmc chain file
Returns
---------
emcee_table: pandas dataframe
Dataframe of mcmc chain values with NANs removed
"""
colnames = ['mstar_q','mh_q','mu','nu']
emcee_table = pd.read_csv(path_to_file, names=colnames,
delim_whitespace=True, header=None)
emcee_table = emcee_table[emcee_table.mstar_q.values != '#']
emcee_table.mstar_q = emcee_table.mstar_q.astype(np.float64)
emcee_table.mh_q = emcee_table.mh_q.astype(np.float64)
emcee_table.mu = emcee_table.mu.astype(np.float64)
emcee_table.nu = emcee_table.nu.astype(np.float64)
return emcee_table
def read_mock_catl(filename, catl_format='.hdf5'):
"""
Function to read ECO/RESOLVE catalogues.
Parameters
----------
filename: string
path and name of the ECO/RESOLVE catalogue to read
catl_format: string, optional (default = '.hdf5')
type of file to read.
Options:
- '.hdf5': Reads in a catalogue in HDF5 format
Returns
-------
mock_pd: pandas DataFrame
DataFrame with galaxy/group information
Examples
--------
# Specifying `filename`
>>> filename = 'ECO_catl.hdf5'
# Reading in Catalogue
>>> mock_pd = reading_catls(filename, format='.hdf5')
>>> mock_pd.head()
x y z vx vy vz \
0 10.225435 24.778214 3.148386 356.112457 -318.894409 366.721832
1 20.945772 14.500367 -0.237940 168.731766 37.558834 447.436951
2 21.335835 14.808488 0.004653 967.204407 -701.556763 -388.055115
3 11.102760 21.782235 2.947002 611.646484 -179.032089 113.388794
4 13.217764 21.214905 2.113904 120.689598 -63.448833 400.766541
loghalom cs_flag haloid halo_ngal ... cz_nodist vel_tot \
0 12.170 1 196005 1 ... 2704.599189 602.490355
1 11.079 1 197110 1 ... 2552.681697 479.667489
2 11.339 1 197131 1 ... 2602.377466 1256.285409
3 11.529 1 199056 1 ... 2467.277182 647.318259
4 10.642 1 199118 1 ... 2513.381124 423.326770
vel_tan vel_pec ra_orig groupid M_group g_ngal g_galtype \
0 591.399858 -115.068833 215.025116 0 11.702527 1 1
1 453.617221 155.924074 182.144134 1 11.524787 4 0
2 1192.742240 394.485714 182.213220 1 11.524787 4 0
3 633.928896 130.977416 210.441320 2 11.502205 1 1
4 421.064495 43.706352 205.525386 3 10.899680 1 1
halo_rvir
0 0.184839
1 0.079997
2 0.097636
3 0.113011
4 0.057210
"""
## Checking if file exists
if not os.path.exists(filename):
msg = '`filename`: {0} NOT FOUND! Exiting..'.format(filename)
raise ValueError(msg)
## Reading file
if catl_format=='.hdf5':
mock_pd = pd.read_hdf(filename)
else:
msg = '`catl_format` ({0}) not supported! Exiting...'.format(catl_format)
raise ValueError(msg)
return mock_pd
def get_paramvals_percentile(mcmc_table, pctl, chi2, randints_df):
"""
Isolates 68th percentile lowest chi^2 values and takes random 100 sample
Parameters
----------
mcmc_table: pandas dataframe
Mcmc chain dataframe
pctl: int
Percentile to use
chi2: array
Array of chi^2 values
Returns
---------
mcmc_table_pctl: pandas dataframe
Sample of 100 68th percentile lowest chi^2 values
"""
pctl = pctl/100
mcmc_table['chi2'] = chi2
mcmc_table['mock_num'] = randints_df.mock_num.values.astype(int)
mcmc_table = mcmc_table.sort_values('chi2').reset_index(drop=True)
slice_end = int(pctl*len(mcmc_table))
mcmc_table_pctl = mcmc_table[:slice_end]
# Best fit params are the parameters that correspond to the smallest chi2
bf_params = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][:4]
bf_chi2 = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][4]
bf_randint = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][5].astype(int)
# Randomly sample 100 lowest chi2
mcmc_table_pctl = mcmc_table_pctl.drop_duplicates().sample(100)
return mcmc_table_pctl, bf_params, bf_chi2, bf_randint
def assign_cen_sat_flag(gals_df):
"""
Assign centrals and satellites flag to dataframe
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
gals_df: pandas dataframe
Mock catalog with centrals/satellites flag as new column
"""
C_S = []
for idx in range(len(gals_df)):
if gals_df['halo_hostid'][idx] == gals_df['halo_id'][idx]:
C_S.append(1)
else:
C_S.append(0)
C_S = np.array(C_S)
gals_df['cs_flag'] = C_S
return gals_df
def get_host_halo_mock(gals_df, mock):
"""
Get host halo mass from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_halos: array
Array of central host halo masses
sat_halos: array
Array of satellite host halo masses
"""
df = gals_df.copy()
# groups = df.groupby('halo_id')
# keys = groups.groups.keys()
# for key in keys:
# group = groups.get_group(key)
# for index, value in enumerate(group.cs_flag):
# if value == 1:
# cen_halos.append(group.loghalom.values[index])
# else:
# sat_halos.append(group.loghalom.values[index])
if mock == 'vishnu':
cen_halos = []
sat_halos = []
for index, value in enumerate(df.cs_flag):
if value == 1:
cen_halos.append(df.halo_mvir.values[index])
else:
sat_halos.append(df.halo_mvir.values[index])
else:
cen_halos = []
sat_halos = []
for index, value in enumerate(df.cs_flag):
if value == 1:
cen_halos.append(10**(df.loghalom.values[index]))
else:
sat_halos.append(10**(df.loghalom.values[index]))
cen_halos = np.array(cen_halos)
sat_halos = np.array(sat_halos)
return cen_halos, sat_halos
def get_stellar_mock(gals_df, mock, randint=None):
"""
Get stellar mass from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_gals: array
Array of central stellar masses
sat_gals: array
Array of satellite stellar masses
"""
df = gals_df.copy()
if mock == 'vishnu':
# These masses are log
cen_gals = []
sat_gals = []
for idx,value in enumerate(df.cs_flag):
if value == 1:
cen_gals.append(10**(df['{0}'.format(randint)].values[idx]))
elif value == 0:
sat_gals.append(10**(df['{0}'.format(randint)].values[idx]))
else:
cen_gals = []
sat_gals = []
for idx,value in enumerate(df.cs_flag):
if value == 1:
cen_gals.append((10**(df.logmstar.values[idx]))/2.041)
elif value == 0:
sat_gals.append((10**(df.logmstar.values[idx]))/2.041)
cen_gals = np.array(cen_gals)
sat_gals = np.array(sat_gals)
return cen_gals, sat_gals
def hybrid_quenching_model(theta, gals_df, mock, randint=None):
"""
Apply hybrid quenching model from Zu and Mandelbaum 2015
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
f_red_cen: array
Array of central red fractions
f_red_sat: array
Array of satellite red fractions
"""
# parameter values from Table 1 of Zu and Mandelbaum 2015 "prior case"
Mstar_q = theta[0] # Msun/h
Mh_q = theta[1] # Msun/h
mu = theta[2]
nu = theta[3]
cen_hosthalo_mass_arr, sat_hosthalo_mass_arr = get_host_halo_mock(gals_df, \
mock)
cen_stellar_mass_arr, sat_stellar_mass_arr = get_stellar_mock(gals_df, mock, \
randint)
f_red_cen = 1 - np.exp(-((cen_stellar_mass_arr/(10**Mstar_q))**mu))
g_Mstar = np.exp(-((sat_stellar_mass_arr/(10**Mstar_q))**mu))
h_Mh = np.exp(-((sat_hosthalo_mass_arr/(10**Mh_q))**nu))
f_red_sat = 1 - (g_Mstar * h_Mh)
return f_red_cen, f_red_sat
def assign_colour_label_mock(f_red_cen, f_red_sat, gals_df, drop_fred=False):
"""
Assign colour label to mock catalog
Parameters
----------
f_red_cen: array
Array of central red fractions
f_red_sat: array
Array of satellite red fractions
gals_df: pandas Dataframe
Mock catalog
drop_fred: boolean
Whether or not to keep red fraction column after colour has been
assigned
Returns
---------
df: pandas Dataframe
Dataframe with colour label and random number assigned as
new columns
"""
# Copy of dataframe
df = gals_df.copy()
# Saving labels
color_label_arr = [[] for x in range(len(df))]
rng_arr = [[] for x in range(len(df))]
# Adding columns for f_red to df
df.loc[:, 'f_red'] = np.zeros(len(df))
df.loc[df['cs_flag'] == 1, 'f_red'] = f_red_cen
df.loc[df['cs_flag'] == 0, 'f_red'] = f_red_sat
# Converting to array
f_red_arr = df['f_red'].values
# Looping over galaxies
for ii, cs_ii in enumerate(df['cs_flag']):
# Draw a random number
rng = np.random.uniform()
# Comparing against f_red
if (rng >= f_red_arr[ii]):
color_label = 'B'
else:
color_label = 'R'
# Saving to list
color_label_arr[ii] = color_label
rng_arr[ii] = rng
## Assigning to DataFrame
df.loc[:, 'colour_label'] = color_label_arr
df.loc[:, 'rng'] = rng_arr
# Dropping 'f_red` column
if drop_fred:
df.drop('f_red', axis=1, inplace=True)
return df
def mean_std_func(bins, mass_arr, vel_arr, groupid_arr):
mass_arr_bin_idxs = np.digitize(mass_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean_std_arr = []
len_std_arr = []
for idx in range(1, len(bins)):
cen_deltav_arr = []
grpid_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
cen_deltav_arr.append(np.array(vel_arr)[current_bin_idxs])
grpid_arr.append(np.array(groupid_arr)[current_bin_idxs])
mean = 0
std_arr = []
data_temp = {'group_id': np.array(grpid_arr).flatten(),
'deltav': np.array(cen_deltav_arr).flatten()}
df_temp = pd.DataFrame(data=data_temp)
groups = df_temp.groupby('group_id')
keys = groups.groups.keys()
for key in keys:
group = groups.get_group(key)
vels = group.deltav.values
diff_sqrd_arr = []
for value in vels:
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
std_arr.append(std)
len_std_arr.append(len(std_arr))
mean_std_arr.append(np.mean(std_arr))
return mean_std_arr
def std_func_mod(bins, mass_arr, vel_arr):
mass_arr_bin_idxs = np.digitize(mass_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean = 0
std_arr = []
for idx in range(1, len(bins)):
cen_deltav_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
cen_deltav_arr.append(np.array(vel_arr)[current_bin_idxs])
diff_sqrd_arr = []
# mean = np.mean(cen_deltav_arr)
for value in cen_deltav_arr:
# print(mean)
# print(np.mean(cen_deltav_arr))
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
# print(std)
# print(np.std(cen_deltav_arr))
std_arr.append(std)
return std_arr
def mean_halo_func(bins, logmstar_arr, loghalom_arr):
mass_arr_bin_idxs = np.digitize(logmstar_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean_halomass_arr = []
for idx in range(1, len(bins)):
halomass_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
halomass_arr.append(np.array(loghalom_arr)[current_bin_idxs])
mean_halomass = np.mean(np.array(halomass_arr).flatten())
mean_halomass_arr.append(mean_halomass)
return mean_halomass_arr
def mean_halo_vcirc_func(bins, logmstar_arr, loghalom_arr, halor_arr):
mass_arr_bin_idxs = np.digitize(logmstar_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean_vcirc_arr = []
for idx in range(1, len(bins)):
halomass_arr = []
halorvir_arr = []
delta_mean = 200
omega_m = 0.3
rho_crit = 2.77*10**11 # assuming h=1.0 # h^2 . Msun/Mpc^3
G = 4.3*10**-9 # Mpc . Msun^-1 . (km/s)^2
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
halomass_arr.append(np.array(loghalom_arr)[current_bin_idxs])
halomass_arr = np.array(halomass_arr).flatten()
halorvir_arr.append(np.array(halor_arr)[current_bin_idxs])
halorvir_arr = np.array(halorvir_arr).flatten()
# radius in Mpc
# halo_radius = ((3*(10**halomass_arr)) / (4*np.pi*delta_mean*omega_m*rho_crit))**(1/3)
halo_vcirc = np.sqrt((G * (10**halomass_arr))/halorvir_arr)
mean_vcirc = np.mean(halo_vcirc)
mean_vcirc_arr.append(mean_vcirc)
return mean_vcirc_arr
global survey
global path_to_figures
global gal_group_df_subset
dict_of_paths = cwpaths.cookiecutter_paths()
path_to_raw = dict_of_paths['raw_dir']
path_to_proc = dict_of_paths['proc_dir']
path_to_interim = dict_of_paths['int_dir']
path_to_figures = dict_of_paths['plot_dir']
path_to_external = dict_of_paths['ext_dir']
path_to_data = dict_of_paths['data_dir']
machine = 'mac'
mf_type = 'smf'
survey = 'eco'
nproc = 2
if machine == 'bender':
halo_catalog = '/home/asadm2/.astropy/cache/halotools/halo_catalogs/'\
'vishnu/rockstar/vishnu_rockstar_test.hdf5'
elif machine == 'mac':
halo_catalog = path_to_raw + 'vishnu_rockstar_test.hdf5'
chi2_file = path_to_proc + 'smhm_colour_run17/{0}_colour_chi2.txt'.\
format(survey)
chain_file = path_to_proc + 'smhm_colour_run17/mcmc_{0}_colour_raw.txt'.\
format(survey)
randint_file = path_to_proc + 'smhm_colour_run17/{0}_colour_mocknum.txt'.\
format(survey)
if survey == 'eco':
# catl_file = path_to_raw + "eco/eco_all.csv"
## New catalog with group finder run on subset after applying M* and cz cuts
catl_file = path_to_proc + "gal_group_eco_data.hdf5"
path_to_mocks = path_to_data + 'mocks/m200b/eco/'
elif survey == 'resolvea' or survey == 'resolveb':
catl_file = path_to_raw + "RESOLVE_liveJune2018.csv"
print('Reading files')
chi2 = read_chi2(chi2_file)
mcmc_table = read_mcmc(chain_file)
mock_nums_df = pd.read_csv(randint_file, header=None, names=['mock_num'],
dtype=int)
gal_group_df = read_mock_catl(path_to_proc + "gal_group.hdf5")
print('Getting data in specific percentile')
mcmc_table_pctl, bf_params, bf_chi2, bf_randint = \
get_paramvals_percentile(mcmc_table, 68, chi2, mock_nums_df)
## Use only the mocks that are in the random sample of 100
# Count the first 20 + 22nd + 123-131 columns of general information from
# mock catalog (halo + rsd)
idx_arr = np.insert(np.linspace(0,20,21), len(np.linspace(0,20,21)), (22, 123,
124, 125, 126, 127, 128, 129, 130, 131)).astype(int)
names_arr = [x for x in gal_group_df.columns.values[idx_arr]]
for idx in mcmc_table_pctl.mock_num.unique():
names_arr.append('{0}_y'.format(idx))
names_arr.append('groupid_{0}'.format(idx))
names_arr.append('g_galtype_{0}'.format(idx))
names_arr = np.array(names_arr)
gal_group_df_subset = gal_group_df[names_arr]
# Renaming the "1_y" column kept from line 1896 because of case where it was
# also in mcmc_table_ptcl.mock_num and was selected twice
gal_group_df_subset.columns.values[30] = "behroozi_bf"
for idx in mcmc_table_pctl.mock_num.unique():
gal_group_df_subset = gal_group_df_subset.rename(columns=\
{'{0}_y'.format(idx):'{0}'.format(idx)})
cols_to_use = ['halo_hostid', 'halo_id', 'halo_mvir', 'halo_macc', 'halo_rvir',
'cz', \
'{0}'.format(bf_randint), \
'g_galtype_{0}'.format(bf_randint), \
'groupid_{0}'.format(bf_randint)]
gals_df = gal_group_df_subset[cols_to_use]
gals_df = gals_df.dropna(subset=['g_galtype_{0}'.\
format(bf_randint),'groupid_{0}'.format(bf_randint)]).\
reset_index(drop=True)
gals_df = assign_cen_sat_flag(gals_df)
f_red_cen, f_red_sat = hybrid_quenching_model(bf_params, gals_df,
'vishnu', bf_randint)
gals_df = assign_colour_label_mock(f_red_cen, f_red_sat, gals_df)
grpid_col = 'groupid_{0}'.format(bf_randint)
galtype_col = 'g_galtype_{0}'.format(bf_randint)
logmstar_col = '{0}'.format(bf_randint)
red_subset_grpids = np.unique(gals_df.halo_id.loc[(gals_df.\
colour_label == 'R') & (gals_df.cs_flag == 1)].values)
blue_subset_grpids = np.unique(gals_df.halo_id.loc[(gals_df.\
colour_label == 'B') & (gals_df.cs_flag == 1)].values)
red_singleton_counter = 0
red_deltav_arr = []
red_cen_stellar_mass_arr = []
red_grpid_arr = []
red_cen_cz_arr = []
red_mean_cz_arr = []
red_halo_mass_arr = []
red_halo_rvir_arr = []
red_host_halo_mass_arr = []
red_host_halo_rvir_arr = []
for key in red_subset_grpids:
group = gals_df.loc[gals_df.halo_hostid == key]
if len(group) == 1:
red_singleton_counter += 1
else:
# host_halo_mass = group.halo_mvir.loc[group.cs_flag.\
# values == 1].values[0]
halo_macc = group.halo_macc.values
host_halo_rvir = group.halo_rvir.loc[group.cs_flag.\
values == 1].values[0]
# halo_rvir = group.halo_rvir.values
cen_stellar_mass = group[logmstar_col].loc[group.cs_flag.\
values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.cs_flag == 1].values[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_host_halo_mass_arr.append(halo_macc)
red_host_halo_rvir_arr.append(host_halo_rvir)
red_cen_cz_arr.append(cen_cz_grp)
red_mean_cz_arr.append(mean_cz_grp)
for idx, val in enumerate(deltav):
red_deltav_arr.append(val)
# red_halo_mass_arr.append(halo_macc[idx])
# red_halo_rvir_arr.append(halo_rvir[idx])
# red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_grpid_arr.append(key)
# if len(group) > 5:
# break
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
red_stellar_mass_bins = np.linspace(8.6,11.2,6)
# red_stellar_mass_bins = np.linspace(8.6,11.5,6)
elif survey == 'resolveb':
red_stellar_mass_bins = np.linspace(8.4,11.0,6)
blue_singleton_counter = 0
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
blue_grpid_arr = []
blue_cen_cz_arr = []
blue_mean_cz_arr = []
blue_halo_mass_arr = []
blue_halo_rvir_arr = []
blue_host_halo_mass_arr = []
blue_host_halo_rvir_arr = []
for key in blue_subset_grpids:
group = gals_df.loc[gals_df.halo_hostid == key]
if len(group) == 1:
blue_singleton_counter += 1
else:
# host_halo_mass = group.halo_mvir.loc[group.cs_flag.\
# values == 1].values[0]
halo_macc = group.halo_macc.values
host_halo_rvir = group.halo_rvir.loc[group.cs_flag.\
values == 1].values[0]
# halo_rvir = group.halo_rvir.values
cen_stellar_mass = group[logmstar_col].loc[group.cs_flag.\
values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.cs_flag == 1].values[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_host_halo_mass_arr.append(halo_macc)
blue_host_halo_rvir_arr.append(host_halo_rvir)
blue_cen_cz_arr.append(cen_cz_grp)
blue_mean_cz_arr.append(mean_cz_grp)
for idx, val in enumerate(deltav):
blue_deltav_arr.append(val)
# blue_halo_mass_arr.append(halo_macc[idx])
# blue_halo_rvir_arr.append(halo_rvir[idx])
# blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_grpid_arr.append(key)
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
# blue_stellar_mass_bins = np.linspace(8.9,11,6)
elif survey == 'resolveb':
blue_stellar_mass_bins = np.linspace(8.4,10.4,6)
std_red = std_func_mod(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr)
std_red = np.array(std_red)
mean_std_red = mean_std_func(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr, red_grpid_arr)
std_blue = std_func_mod(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_deltav_arr)
std_blue = np.array(std_blue)
mean_std_blue = mean_std_func(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
blue_deltav_arr, blue_grpid_arr)
mean_halo_red = mean_halo_func(red_stellar_mass_bins, red_cen_stellar_mass_arr,
np.log10(red_host_halo_mass_arr))
mean_vcirc_red = mean_halo_vcirc_func(red_stellar_mass_bins,
red_cen_stellar_mass_arr, np.log10(red_host_halo_mass_arr),
red_host_halo_rvir_arr)
mean_halo_blue = mean_halo_func(blue_stellar_mass_bins, blue_cen_stellar_mass_arr,
np.log10(blue_host_halo_mass_arr))
mean_vcirc_blue = mean_halo_vcirc_func(blue_stellar_mass_bins,
blue_cen_stellar_mass_arr, np.log10(blue_host_halo_mass_arr),
blue_host_halo_rvir_arr)
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
fig7 = plt.figure()
plt.scatter(centers_red,std_red,color='darkred',s=350,marker='p')
plt.scatter(centers_blue,std_blue,color='darkblue',s=350,marker='p')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$\sigma \left[\mathrm{km/s} \right]$', fontsize=30)
plt.title('Spread in velocity difference from halo central cz')
plt.show()
fig8 = plt.figure()
plt.scatter(centers_red,mean_std_red,color='darkred',s=350, marker='p')
plt.scatter(centers_blue,mean_std_blue,color='darkblue',s=350, marker='p')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$\sigma \left[\mathrm{km/s} \right]$', fontsize=30)
plt.title('Mean of spread in velocity difference from halo central cz')
plt.show()
fig9 = plt.figure()
plt.scatter(centers_red, mean_halo_red, color='darkred',s=350,marker='p')
plt.scatter(centers_blue,mean_halo_blue,color='darkblue',s=350,marker='p')
plt.xlabel('Halo central M* [Msun/h]')
plt.ylabel('Mean halo mass [Msun/h]')
plt.show()
fig10 = plt.figure()
plt.scatter(centers_red, mean_vcirc_red, color='darkred',s=350, marker='p')
plt.scatter(centers_blue,mean_vcirc_blue,color='darkblue',s=350, marker='p')
plt.xlabel('Halo central M* [Msun/h]')
plt.ylabel('Mean halo circular velocity [km/s]')
plt.show()
################################################################################
#! Comparing distribution of Behroozi parameters before and after adding second
#! observable
################################################################################
from cosmo_utils.utils import work_paths as cwpaths
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def read_chi2(path_to_file):
"""
Reads chi-squared values from file
Parameters
----------
path_to_file: string
Path to chi-squared values file
Returns
---------
chi2: array
Array of reshaped chi^2 values to match chain values
"""
chi2_df = pd.read_csv(path_to_file,header=None,names=['chisquared'])
# Applies to runs prior to run 5?
if mf_type == 'smf' and survey == 'eco' and ver==1.0:
# Needed to reshape since flattened along wrong axis,
# didn't correspond to chain
test_reshape = chi2_df.chisquared.values.reshape((1000,250))
chi2 = np.ndarray.flatten(np.array(test_reshape),'F')
else:
chi2 = chi2_df.chisquared.values
return chi2
def read_mcmc(path_to_file):
"""
Reads mcmc chain from file
Parameters
----------
path_to_file: string
Path to mcmc chain file
Returns
---------
emcee_table: pandas dataframe
Dataframe of mcmc chain values with NANs removed
"""
colnames = ['mhalo_c','mstellar_c','lowmass_slope','highmass_slope',\
'scatter']
if mf_type == 'smf' and survey == 'eco' and ver==1.0:
emcee_table = pd.read_csv(path_to_file,names=colnames,sep='\s+',\
dtype=np.float64)
else:
emcee_table = pd.read_csv(path_to_file, names=colnames,
delim_whitespace=True, header=None)
emcee_table = emcee_table[emcee_table.mhalo_c.values != '#']
emcee_table.mhalo_c = emcee_table.mhalo_c.astype(np.float64)
emcee_table.mstellar_c = emcee_table.mstellar_c.astype(np.float64)
emcee_table.lowmass_slope = emcee_table.lowmass_slope.astype(np.float64)
# Cases where last parameter was a NaN and its value was being written to
# the first element of the next line followed by 4 NaNs for the other
# parameters
for idx,row in enumerate(emcee_table.values):
if np.isnan(row)[4] == True and np.isnan(row)[3] == False:
scatter_val = emcee_table.values[idx+1][0]
row[4] = scatter_val
# Cases where rows of NANs appear
emcee_table = emcee_table.dropna(axis='index', how='any').\
reset_index(drop=True)
return emcee_table
def get_paramvals_percentile(table, percentile, chi2_arr):
"""
Isolates 68th percentile lowest chi^2 values and takes random 1000 sample
Parameters
----------
table: pandas dataframe
Mcmc chain dataframe
pctl: int
Percentile to use
chi2_arr: array
Array of chi^2 values
Returns
---------
subset: ndarray
Random 100 sample of param values from 68th percentile
"""
percentile = percentile/100
table['chi2'] = chi2_arr
table = table.sort_values('chi2').reset_index(drop=True)
slice_end = int(percentile*len(table))
mcmc_table_pctl = table[:slice_end]
# Best fit params are the parameters that correspond to the smallest chi2
bf_params = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][:5]
subset = mcmc_table_pctl.drop_duplicates().sample(100).values[:,:5]
subset = np.insert(subset, 0, bf_params, axis=0)
return subset
dict_of_paths = cwpaths.cookiecutter_paths()
path_to_raw = dict_of_paths['raw_dir']
path_to_data = dict_of_paths['data_dir']
path_to_proc = dict_of_paths['proc_dir']
survey = 'eco'
machine = 'mac'
mf_type = 'smf'
ver = 2.0
## Subset of 100 from latest total smf run on which group finding was done
chi2_file = path_to_proc + 'smhm_run6/{0}_chi2.txt'.format(survey)
if mf_type == 'smf' and survey == 'eco' and ver == 1.0:
chain_file = path_to_proc + 'mcmc_{0}.dat'.format(survey)
else:
chain_file = path_to_proc + 'smhm_run6/mcmc_{0}_raw.txt'.\
format(survey)
print('Reading chi-squared file')
chi2 = read_chi2(chi2_file)
print('Reading mcmc chain file')
mcmc_table = read_mcmc(chain_file)
print('Getting subset of 100 Behroozi parameters')
mcmc_table_subset = get_paramvals_percentile(mcmc_table, 68, chi2)
## Latest colour run using both observables
def read_chi2(path_to_file):
"""
Reads chi-squared values from file
Parameters
----------
path_to_file: string
Path to chi-squared values file
Returns
---------
chi2: array
Array of reshaped chi^2 values to match chain values
"""
chi2_df = pd.read_csv(path_to_file,header=None,names=['chisquared'])
chi2 = chi2_df.chisquared.values
return chi2
def read_mcmc(path_to_file):
"""
Reads mcmc chain from file
Parameters
----------
path_to_file: string
Path to mcmc chain file
Returns
---------
emcee_table: pandas dataframe
Dataframe of mcmc chain values with NANs removed
"""
colnames = ['mstar_q','mh_q','mu','nu']
emcee_table = pd.read_csv(path_to_file, names=colnames,
delim_whitespace=True, header=None)
emcee_table = emcee_table[emcee_table.mstar_q.values != '#']
emcee_table.mstar_q = emcee_table.mstar_q.astype(np.float64)
emcee_table.mh_q = emcee_table.mh_q.astype(np.float64)
emcee_table.mu = emcee_table.mu.astype(np.float64)
emcee_table.nu = emcee_table.nu.astype(np.float64)
return emcee_table
def get_paramvals_percentile(mcmc_table, pctl, chi2, randints_df):
"""
Isolates 68th percentile lowest chi^2 values and takes random 100 sample
Parameters
----------
mcmc_table: pandas dataframe
Mcmc chain dataframe
pctl: int
Percentile to use
chi2: array
Array of chi^2 values
Returns
---------
mcmc_table_pctl: pandas dataframe
Sample of 100 68th percentile lowest chi^2 values
"""
pctl = pctl/100
mcmc_table['chi2'] = chi2
mcmc_table['mock_num'] = randints_df.mock_num.values.astype(int)
mcmc_table = mcmc_table.sort_values('chi2').reset_index(drop=True)
slice_end = int(pctl*len(mcmc_table))
mcmc_table_pctl = mcmc_table[:slice_end]
# Best fit params are the parameters that correspond to the smallest chi2
bf_params = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][:4]
bf_chi2 = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][4]
bf_randint = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][5].astype(int)
# Randomly sample 100 lowest chi2
mcmc_table_pctl = mcmc_table_pctl.drop_duplicates().sample(100)
return mcmc_table_pctl, bf_params, bf_chi2, bf_randint
chi2_file = path_to_proc + 'smhm_colour_run17/{0}_colour_chi2.txt'.\
format(survey)
chain_file = path_to_proc + 'smhm_colour_run17/mcmc_{0}_colour_raw.txt'.\
format(survey)
randint_file = path_to_proc + 'smhm_colour_run17/{0}_colour_mocknum.txt'.\
format(survey)
chi2 = read_chi2(chi2_file)
mcmc_table = read_mcmc(chain_file)
mock_nums_df = pd.read_csv(randint_file, header=None, names=['mock_num'],
dtype=int)
mcmc_table_pctl, bf_params, bf_chi2, bf_randint = \
get_paramvals_percentile(mcmc_table, 68, chi2, mock_nums_df)
mock_nums_picked = mcmc_table_pctl['mock_num']
mhalo_arr = []
mstar_arr = []
lowslope = []
highslope = []
scatter = []
for idx in mock_nums_picked:
mhalo_arr.append(mcmc_table_subset.T[0][idx-1])
mstar_arr.append(mcmc_table_subset.T[1][idx-1])
lowslope.append(mcmc_table_subset.T[2][idx-1])
highslope.append(mcmc_table_subset.T[3][idx-1])
scatter.append(mcmc_table_subset.T[4][idx-1])
plt.clf()
ax1 = plt.subplot2grid(shape=(2,6), loc=(0,0), colspan=2)
ax2 = plt.subplot2grid((2,6), (0,2), colspan=2)
ax3 = plt.subplot2grid((2,6), (0,4), colspan=2)
ax4 = plt.subplot2grid((2,6), (1,1), colspan=2)
ax5 = plt.subplot2grid((2,6), (1,3), colspan=2)
# ax1.hist(mcmc_table_b10_full['mhalo_c'], histtype='step', lw=3, color='r', ls='-', label='full chain')
ax1.hist(mcmc_table_subset.T[0], histtype='step', lw=3, color='r', ls='-', label='68% full B10', bins=np.linspace(11.5, 12.8, 8))
ax1.hist(mhalo_arr, histtype='step', lw=3, color='r', ls='dashdot', label='68% (+ 2nd observable)', bins=np.linspace(11.5, 12.8, 8))
# ax2.hist(mcmc_table_b10_full['mstellar_c'], histtype='step', lw=3, color='g', ls='-', label='full chain')
ax2.hist(mcmc_table_subset.T[1], histtype='step', lw=3, color='b', ls='-', label='68% full B10', bins=np.linspace(10.4, 10.9, 8))
ax2.hist(mstar_arr, histtype='step', lw=3, color='b', ls='dashdot', label='68% (+ 2nd observable)', bins=np.linspace(10.4, 10.9, 8))
# ax3.hist(mcmc_table_b10_full['lowmass_slope'], histtype='step', lw=3, color='b', ls='-', label='full chain')
ax3.hist(mcmc_table_subset.T[2], histtype='step', lw=3, color='g', ls='-', label='68% full B10', bins=np.linspace(0.2, 0.5, 8))
ax3.hist(lowslope, histtype='step', lw=3, color='g', ls='dashdot', label='68% (+ 2nd observable)', bins=np.linspace(0.2, 0.5, 8))
# ax4.hist(mcmc_table_b10_full['highmass_slope'], histtype='step', lw=3, color='y', ls='-', label='full chain')
ax4.hist(mcmc_table_subset.T[3], histtype='step', lw=3, color='y', ls='-', label='68% full B10', bins=np.linspace(0.2, 1.2, 8))
ax4.hist(highslope, histtype='step', lw=3, color='y', ls='dashdot', label='68% (+ 2nd observable)', bins=np.linspace(0.2, 1.2, 8))
# ax5.hist(mcmc_table_b10_full['scatter'], histtype='step', lw=3, color='violet', ls='-', label='full chain')
ax5.hist(mcmc_table_subset.T[4], histtype='step', lw=3, color='violet', ls='-', label='68% full B10', bins=np.linspace(0.1, 0.5, 8))
ax5.hist(scatter, histtype='step', lw=3, color='violet', ls='dashdot', label='68% (+ 2nd observable)', bins=np.linspace(0.1, 0.5, 8))
ax1.title.set_text('Characteristic halo mass')
ax2.title.set_text('Characteristic stellar mass')
ax3.title.set_text('Low mass slope')
ax4.title.set_text('High mass slope')
ax5.title.set_text('Log-normal scatter in stellar mass')
plt.legend(loc='best')
plt.show()
################################################################################
#! In a bin of M* what does the distribution of M_h look like?
################################################################################
from cosmo_utils.utils.stats_funcs import Stats_one_arr
from cosmo_utils.utils import work_paths as cwpaths
import matplotlib.pyplot as plt
from matplotlib import rc
import numpy as np
import pandas as pd
import os
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']}, size=25)
rc('text', usetex=True)
rc('text.latex', preamble=[r"\usepackage{amsmath}"])
rc('axes', linewidth=2)
rc('xtick.major', width=2, size=7)
rc('ytick.major', width=2, size=7)
def read_mock_catl(filename, catl_format='.hdf5'):
"""
Function to read ECO/RESOLVE catalogues.
Parameters
----------
filename: string
path and name of the ECO/RESOLVE catalogue to read
catl_format: string, optional (default = '.hdf5')
type of file to read.
Options:
- '.hdf5': Reads in a catalogue in HDF5 format
Returns
-------
mock_pd: pandas DataFrame
DataFrame with galaxy/group information
Examples
--------
# Specifying `filename`
>>> filename = 'ECO_catl.hdf5'
# Reading in Catalogue
>>> mock_pd = reading_catls(filename, format='.hdf5')
>>> mock_pd.head()
x y z vx vy vz \
0 10.225435 24.778214 3.148386 356.112457 -318.894409 366.721832
1 20.945772 14.500367 -0.237940 168.731766 37.558834 447.436951
2 21.335835 14.808488 0.004653 967.204407 -701.556763 -388.055115
3 11.102760 21.782235 2.947002 611.646484 -179.032089 113.388794
4 13.217764 21.214905 2.113904 120.689598 -63.448833 400.766541
loghalom cs_flag haloid halo_ngal ... cz_nodist vel_tot \
0 12.170 1 196005 1 ... 2704.599189 602.490355
1 11.079 1 197110 1 ... 2552.681697 479.667489
2 11.339 1 197131 1 ... 2602.377466 1256.285409
3 11.529 1 199056 1 ... 2467.277182 647.318259
4 10.642 1 199118 1 ... 2513.381124 423.326770
vel_tan vel_pec ra_orig groupid M_group g_ngal g_galtype \
0 591.399858 -115.068833 215.025116 0 11.702527 1 1
1 453.617221 155.924074 182.144134 1 11.524787 4 0
2 1192.742240 394.485714 182.213220 1 11.524787 4 0
3 633.928896 130.977416 210.441320 2 11.502205 1 1
4 421.064495 43.706352 205.525386 3 10.899680 1 1
halo_rvir
0 0.184839
1 0.079997
2 0.097636
3 0.113011
4 0.057210
"""
## Checking if file exists
if not os.path.exists(filename):
msg = '`filename`: {0} NOT FOUND! Exiting..'.format(filename)
raise ValueError(msg)
## Reading file
if catl_format=='.hdf5':
mock_pd = pd.read_hdf(filename)
else:
msg = '`catl_format` ({0}) not supported! Exiting...'.format(catl_format)
raise ValueError(msg)
return mock_pd
def assign_cen_sat_flag(gals_df):
"""
Assign centrals and satellites flag to dataframe
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
gals_df: pandas dataframe
Mock catalog with centrals/satellites flag as new column
"""
C_S = []
for idx in range(len(gals_df)):
if gals_df['halo_hostid'][idx] == gals_df['halo_id'][idx]:
C_S.append(1)
else:
C_S.append(0)
C_S = np.array(C_S)
gals_df['cs_flag'] = C_S
return gals_df
def get_host_halo_mock(gals_df, mock):
"""
Get host halo mass from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_halos: array
Array of central host halo masses
sat_halos: array
Array of satellite host halo masses
"""
df = gals_df.copy()
# groups = df.groupby('halo_id')
# keys = groups.groups.keys()
# for key in keys:
# group = groups.get_group(key)
# for index, value in enumerate(group.cs_flag):
# if value == 1:
# cen_halos.append(group.loghalom.values[index])
# else:
# sat_halos.append(group.loghalom.values[index])
if mock == 'vishnu':
cen_halos = []
sat_halos = []
for index, value in enumerate(df.cs_flag):
if value == 1:
cen_halos.append(df.halo_mvir.values[index])
else:
sat_halos.append(df.halo_mvir.values[index])
else:
cen_halos = []
sat_halos = []
for index, value in enumerate(df.cs_flag):
if value == 1:
cen_halos.append(10**(df.loghalom.values[index]))
else:
sat_halos.append(10**(df.loghalom.values[index]))
cen_halos = np.array(cen_halos)
sat_halos = np.array(sat_halos)
return cen_halos, sat_halos
def get_stellar_mock(gals_df, mock, randint=None):
"""
Get stellar mass from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_gals: array
Array of central stellar masses
sat_gals: array
Array of satellite stellar masses
"""
df = gals_df.copy()
if mock == 'vishnu':
# These masses are log
cen_gals = []
sat_gals = []
for idx,value in enumerate(df.cs_flag):
if value == 1:
cen_gals.append(10**(df['{0}'.format(randint)].values[idx]))
elif value == 0:
sat_gals.append(10**(df['{0}'.format(randint)].values[idx]))
else:
cen_gals = []
sat_gals = []
for idx,value in enumerate(df.cs_flag):
if value == 1:
cen_gals.append((10**(df.logmstar.values[idx]))/2.041)
elif value == 0:
sat_gals.append((10**(df.logmstar.values[idx]))/2.041)
cen_gals = np.array(cen_gals)
sat_gals = np.array(sat_gals)
return cen_gals, sat_gals
def hybrid_quenching_model(theta, gals_df, mock, randint=None):
"""
Apply hybrid quenching model from Zu and Mandelbaum 2015
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
f_red_cen: array
Array of central red fractions
f_red_sat: array
Array of satellite red fractions
"""
# parameter values from Table 1 of Zu and Mandelbaum 2015 "prior case"
Mstar_q = theta[0] # Msun/h
Mh_q = theta[1] # Msun/h
mu = theta[2]
nu = theta[3]
cen_hosthalo_mass_arr, sat_hosthalo_mass_arr = get_host_halo_mock(gals_df, \
mock)
cen_stellar_mass_arr, sat_stellar_mass_arr = get_stellar_mock(gals_df, mock, \
randint)
f_red_cen = 1 - np.exp(-((cen_stellar_mass_arr/(10**Mstar_q))**mu))
g_Mstar = np.exp(-((sat_stellar_mass_arr/(10**Mstar_q))**mu))
h_Mh = np.exp(-((sat_hosthalo_mass_arr/(10**Mh_q))**nu))
f_red_sat = 1 - (g_Mstar * h_Mh)
return f_red_cen, f_red_sat
def get_best_fit_model(best_fit_params, best_fit_mocknum):
"""
Get SMF and SMHM information of best fit model given a survey
Parameters
----------
survey: string
Name of survey
Returns
---------
max_model: array
Array of x-axis mass values
phi_model: array
Array of y-axis values
err_tot_model: array
Array of error values per bin
cen_gals: array
Array of central galaxy masses
cen_halos: array
Array of central halo masses
"""
cols_to_use = ['halo_hostid', 'halo_id', 'halo_mvir', 'cz', 'halo_macc', \
'{0}'.format(best_fit_mocknum), \
'g_galtype_{0}'.format(best_fit_mocknum), \
'groupid_{0}'.format(best_fit_mocknum)]
gals_df = gal_group_df_subset[cols_to_use]
gals_df = gals_df.dropna(subset=['g_galtype_{0}'.\
format(best_fit_mocknum),'groupid_{0}'.format(best_fit_mocknum)]).\
reset_index(drop=True)
gals_df[['g_galtype_{0}'.format(best_fit_mocknum), \
'groupid_{0}'.format(best_fit_mocknum)]] = \
gals_df[['g_galtype_{0}'.format(best_fit_mocknum),\
'groupid_{0}'.format(best_fit_mocknum)]].astype(int)
gals_df = assign_cen_sat_flag(gals_df)
f_red_cen, f_red_sat = hybrid_quenching_model(best_fit_params, gals_df,
'vishnu', best_fit_mocknum)
gals_df = assign_colour_label_mock(f_red_cen, f_red_sat, gals_df)
# v_sim = 130**3
v_sim = 890641.5172927063
total_model, red_model, blue_model = measure_all_smf(gals_df, v_sim
, False, best_fit_mocknum)
cen_gals_red, cen_halos_red, cen_gals_blue, cen_halos_blue, f_red_cen_red,\
f_red_cen_blue = get_centrals_mock(gals_df, best_fit_mocknum)
std_red, std_blue, std_centers_red, std_centers_blue = \
get_deltav_sigma_vishnu_qmcolour(gals_df, best_fit_mocknum)
max_red = red_model[0]
phi_red = red_model[1]
max_blue = blue_model[0]
phi_blue = blue_model[1]
return max_red, phi_red, max_blue, phi_blue, cen_gals_red, cen_halos_red,\
cen_gals_blue, cen_halos_blue, f_red_cen_red, f_red_cen_blue, std_red, \
std_blue, std_centers_red, std_centers_blue
def assign_colour_label_mock(f_red_cen, f_red_sat, gals_df, drop_fred=False):
"""
Assign colour label to mock catalog
Parameters
----------
f_red_cen: array
Array of central red fractions
f_red_sat: array
Array of satellite red fractions
gals_df: pandas Dataframe
Mock catalog
drop_fred: boolean
Whether or not to keep red fraction column after colour has been
assigned
Returns
---------
df: pandas Dataframe
Dataframe with colour label and random number assigned as
new columns
"""
# Copy of dataframe
df = gals_df.copy()
# Saving labels
color_label_arr = [[] for x in range(len(df))]
rng_arr = [[] for x in range(len(df))]
# Adding columns for f_red to df
df.loc[:, 'f_red'] = np.zeros(len(df))
df.loc[df['cs_flag'] == 1, 'f_red'] = f_red_cen
df.loc[df['cs_flag'] == 0, 'f_red'] = f_red_sat
# Converting to array
f_red_arr = df['f_red'].values
# Looping over galaxies
for ii, cs_ii in enumerate(df['cs_flag']):
# Draw a random number
rng = np.random.uniform()
# Comparing against f_red
if (rng >= f_red_arr[ii]):
color_label = 'B'
else:
color_label = 'R'
# Saving to list
color_label_arr[ii] = color_label
rng_arr[ii] = rng
## Assigning to DataFrame
df.loc[:, 'colour_label'] = color_label_arr
df.loc[:, 'rng'] = rng_arr
# Dropping 'f_red` column
if drop_fred:
df.drop('f_red', axis=1, inplace=True)
return df
def diff_smf(mstar_arr, volume, h1_bool, colour_flag=False):
"""
Calculates differential stellar mass function in units of h=1.0
Parameters
----------
mstar_arr: numpy array
Array of stellar masses
volume: float
Volume of survey or simulation
h1_bool: boolean
True if units of masses are h=1, False if units of masses are not h=1
Returns
---------
maxis: array
Array of x-axis mass values
phi: array
Array of y-axis values
err_tot: array
Array of error values per bin
bins: array
Array of bin edge values
"""
if not h1_bool:
# changing from h=0.7 to h=1 assuming h^-2 dependence
logmstar_arr = np.log10((10**mstar_arr) / 2.041)
else:
logmstar_arr = np.log10(mstar_arr)
if survey == 'eco' or survey == 'resolvea':
bin_min = np.round(np.log10((10**8.9) / 2.041), 1)
if survey == 'eco' and colour_flag == 'R':
bin_max = np.round(np.log10((10**11.5) / 2.041), 1)
bin_num = 6
elif survey == 'eco' and colour_flag == 'B':
bin_max = np.round(np.log10((10**11) / 2.041), 1)
bin_num = 6
elif survey == 'resolvea':
# different to avoid nan in inverse corr mat
bin_max = np.round(np.log10((10**11.5) / 2.041), 1)
bin_num = 7
else:
bin_max = np.round(np.log10((10**11.5) / 2.041), 1)
bin_num = 7
bins = np.linspace(bin_min, bin_max, bin_num)
elif survey == 'resolveb':
bin_min = np.round(np.log10((10**8.7) / 2.041), 1)
bin_max = np.round(np.log10((10**11.8) / 2.041), 1)
bins = np.linspace(bin_min, bin_max, 7)
# Unnormalized histogram and bin edges
counts, edg = np.histogram(logmstar_arr, bins=bins) # paper used 17 bins
dm = edg[1] - edg[0] # Bin width
maxis = 0.5 * (edg[1:] + edg[:-1]) # Mass axis i.e. bin centers
# Normalized to volume and bin width
err_poiss = np.sqrt(counts) / (volume * dm)
err_tot = err_poiss
phi = counts / (volume * dm) # not a log quantity
phi = np.log10(phi)
return maxis, phi, err_tot, bins, counts
def measure_all_smf(table, volume, data_bool, randint_logmstar=None):
"""
Calculates differential stellar mass function for all, red and blue galaxies
from mock/data
Parameters
----------
table: pandas Dataframe
Dataframe of either mock or data
volume: float
Volume of simulation/survey
cvar: float
Cosmic variance error
data_bool: Boolean
Data or mock
Returns
---------
3 multidimensional arrays of stellar mass, phi, total error in SMF and
counts per bin for all, red and blue galaxies
"""
colour_col = 'colour_label'
if data_bool:
logmstar_col = 'logmstar'
max_total, phi_total, err_total, bins_total, counts_total = \
diff_smf(table[logmstar_col], volume, False)
max_red, phi_red, err_red, bins_red, counts_red = \
diff_smf(table[logmstar_col].loc[table[colour_col] == 'R'],
volume, False, 'R')
max_blue, phi_blue, err_blue, bins_blue, counts_blue = \
diff_smf(table[logmstar_col].loc[table[colour_col] == 'B'],
volume, False, 'B')
else:
# logmstar_col = 'stellar_mass'
logmstar_col = '{0}'.format(randint_logmstar)
## Changed to 10**X because Behroozi mocks now have M* values in log
max_total, phi_total, err_total, bins_total, counts_total = \
diff_smf(10**(table[logmstar_col]), volume, True)
max_red, phi_red, err_red, bins_red, counts_red = \
diff_smf(10**(table[logmstar_col].loc[table[colour_col] == 'R']),
volume, True, 'R')
max_blue, phi_blue, err_blue, bins_blue, counts_blue = \
diff_smf(10**(table[logmstar_col].loc[table[colour_col] == 'B']),
volume, True, 'B')
return [max_total, phi_total, err_total, counts_total] , \
[max_red, phi_red, err_red, counts_red] , \
[max_blue, phi_blue, err_blue, counts_blue]
def std_func_mod(bins, mass_arr, vel_arr):
mass_arr_bin_idxs = np.digitize(mass_arr, bins)
# Put all galaxies that would have been in the bin after the last in the
# bin as well i.e galaxies with bin number 5 and 6 from previous line all
# go in one bin
for idx, value in enumerate(mass_arr_bin_idxs):
if value == 6:
mass_arr_bin_idxs[idx] = 5
mean = 0
std_arr = []
for idx in range(1, len(bins)):
cen_deltav_arr = []
current_bin_idxs = np.argwhere(mass_arr_bin_idxs == idx)
cen_deltav_arr.append(np.array(vel_arr)[current_bin_idxs])
diff_sqrd_arr = []
# mean = np.mean(cen_deltav_arr)
for value in cen_deltav_arr:
# print(mean)
# print(np.mean(cen_deltav_arr))
diff = value - mean
diff_sqrd = diff**2
diff_sqrd_arr.append(diff_sqrd)
mean_diff_sqrd = np.mean(diff_sqrd_arr)
std = np.sqrt(mean_diff_sqrd)
# print(std)
# print(np.std(cen_deltav_arr))
std_arr.append(std)
return std_arr
def get_deltav_sigma_vishnu_qmcolour(gals_df, randint):
"""
Calculate spread in velocity dispersion from Vishnu mock (logmstar already
in h=1)
Parameters
----------
survey: string
Name of survey
path: string
Path to mock catalogs
Returns
---------
std_red_arr: numpy array
Spread in velocity dispersion of red galaxies
centers_red_arr: numpy array
Bin centers of central stellar mass for red galaxies
std_blue_arr: numpy array
Spread in velocity dispersion of blue galaxies
centers_blue_arr: numpy array
Bin centers of central stellar mass for blue galaxies
"""
mock_pd = gals_df.copy()
if survey == 'eco':
mock_name = 'ECO'
num_mocks = 8
min_cz = 3000
max_cz = 7000
mag_limit = -17.33
mstar_limit = 8.9
volume = 151829.26 # Survey volume without buffer [Mpc/h]^3
elif survey == 'resolvea':
mock_name = 'A'
num_mocks = 59
min_cz = 4500
max_cz = 7000
mag_limit = -17.33
mstar_limit = 8.9
volume = 13172.384 # Survey volume without buffer [Mpc/h]^3
elif survey == 'resolveb':
mock_name = 'B'
num_mocks = 104
min_cz = 4500
max_cz = 7000
mag_limit = -17
mstar_limit = 8.7
volume = 4709.8373 # Survey volume without buffer [Mpc/h]^3
logmstar_col = '{0}'.format(randint)
g_galtype_col = 'g_galtype_{0}'.format(randint)
groupid_col = 'groupid_{0}'.format(randint)
# Using the same survey definition as in mcmc smf i.e excluding the
# buffer except no M_r cut since vishnu mock has no M_r info
mock_pd = mock_pd.loc[(mock_pd.cz.values >= min_cz) & \
(mock_pd.cz.values <= max_cz) & \
(mock_pd[logmstar_col].values >= np.log10((10**mstar_limit)/2.041))]
red_subset_grpids = np.unique(mock_pd[groupid_col].loc[(mock_pd.\
colour_label == 'R') & (mock_pd[g_galtype_col] == 1)].values)
blue_subset_grpids = np.unique(mock_pd[groupid_col].loc[(mock_pd.\
colour_label == 'B') & (mock_pd[g_galtype_col] == 1)].values)
# Calculating spread in velocity dispersion for galaxies in groups
# with a red central
red_deltav_arr = []
red_cen_stellar_mass_arr = []
for key in red_subset_grpids:
group = mock_pd.loc[mock_pd[groupid_col] == key]
cen_stellar_mass = group['{0}'.format(randint)].loc[group[g_galtype_col].\
values == 1].values[0]
mean_cz_grp = np.round(np.mean(group.cz.values),2)
deltav = group.cz.values - len(group)*[mean_cz_grp]
for val in deltav:
red_deltav_arr.append(val)
red_cen_stellar_mass_arr.append(cen_stellar_mass)
# print(max(red_cen_stellar_mass_arr))
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
red_stellar_mass_bins = np.linspace(8.6,11.2,6)
elif survey == 'resolveb':
red_stellar_mass_bins = np.linspace(8.4,11.0,6)
std_red = std_func_mod(red_stellar_mass_bins, red_cen_stellar_mass_arr,
red_deltav_arr)
std_red = np.array(std_red)
# Calculating spread in velocity dispersion for galaxies in groups
# with a blue central
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
for key in blue_subset_grpids:
group = mock_pd.loc[mock_pd[groupid_col] == key]
cen_stellar_mass = group['{0}'.format(randint)].loc[group[g_galtype_col]\
.values == 1].values[0]
mean_cz_grp = np.round(np.mean(group.cz.values),2)
deltav = group.cz.values - len(group)*[mean_cz_grp]
for val in deltav:
blue_deltav_arr.append(val)
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
# print(max(blue_cen_stellar_mass_arr))
if survey == 'eco' or survey == 'resolvea':
# TODO : check if this is actually correct for resolve a
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
elif survey == 'resolveb':
blue_stellar_mass_bins = np.linspace(8.4,10.4,6)
std_blue = std_func_mod(blue_stellar_mass_bins, \
blue_cen_stellar_mass_arr, blue_deltav_arr)
std_blue = np.array(std_blue)
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
return std_red, std_blue, centers_red, centers_blue
def get_centrals_mock(gals_df, randint=None):
"""
Get centrals from mock catalog
Parameters
----------
gals_df: pandas dataframe
Mock catalog
Returns
---------
cen_gals: array
Array of central galaxy masses
cen_halos: array
Array of central halo masses
"""
C_S = []
for idx in range(len(gals_df)):
if gals_df['halo_hostid'][idx] == gals_df['halo_id'][idx]:
C_S.append(1)
else:
C_S.append(0)
C_S = np.array(C_S)
gals_df['C_S'] = C_S
cen_gals_red = []
cen_halos_red = []
cen_gals_blue = []
cen_halos_blue = []
f_red_cen_gals_red = []
f_red_cen_gals_blue = []
for idx,value in enumerate(gals_df['C_S']):
if value == 1:
if gals_df['colour_label'][idx] == 'R':
cen_gals_red.append(gals_df['{0}'.format(randint)][idx])
cen_halos_red.append(gals_df['halo_mvir'][idx])
f_red_cen_gals_red.append(gals_df['f_red'][idx])
elif gals_df['colour_label'][idx] == 'B':
cen_gals_blue.append(gals_df['{0}'.format(randint)][idx])
cen_halos_blue.append(gals_df['halo_mvir'][idx])
f_red_cen_gals_blue.append(gals_df['f_red'][idx])
# if value == 0:
# if gals_df['colour_label'][idx] == 'R':
# cen_gals_red.append(gals_df['{0}'.format(randint)][idx])
# cen_halos_red.append(gals_df['halo_macc'][idx])
# f_red_cen_gals_red.append(gals_df['f_red'][idx])
# elif gals_df['colour_label'][idx] == 'B':
# cen_gals_blue.append(gals_df['{0}'.format(randint)][idx])
# cen_halos_blue.append(gals_df['halo_macc'][idx])
# f_red_cen_gals_blue.append(gals_df['f_red'][idx])
cen_gals_red = np.array(cen_gals_red)
cen_halos_red = np.log10(np.array(cen_halos_red))
cen_gals_blue = np.array(cen_gals_blue)
cen_halos_blue = np.log10(np.array(cen_halos_blue))
return cen_gals_red, cen_halos_red, cen_gals_blue, cen_halos_blue, \
f_red_cen_gals_red, f_red_cen_gals_blue
global survey
global path_to_figures
global mf_type
global gal_group_df_subset
dict_of_paths = cwpaths.cookiecutter_paths()
path_to_raw = dict_of_paths['raw_dir']
path_to_data = dict_of_paths['data_dir']
path_to_proc = dict_of_paths['proc_dir']
ver = 2.0
machine = 'mac'
mf_type = 'smf'
survey = 'eco'
nproc = 2
if machine == 'bender':
halo_catalog = '/home/asadm2/.astropy/cache/halotools/halo_catalogs/'\
'vishnu/rockstar/vishnu_rockstar_test.hdf5'
elif machine == 'mac':
halo_catalog = path_to_raw + 'vishnu_rockstar_test.hdf5'
def read_chi2(path_to_file):
"""
Reads chi-squared values from file
Parameters
----------
path_to_file: string
Path to chi-squared values file
Returns
---------
chi2: array
Array of reshaped chi^2 values to match chain values
"""
chi2_df = pd.read_csv(path_to_file,header=None,names=['chisquared'])
# Applies to runs prior to run 5?
if mf_type == 'smf' and survey == 'eco' and ver==1.0:
# Needed to reshape since flattened along wrong axis,
# didn't correspond to chain
test_reshape = chi2_df.chisquared.values.reshape((1000,250))
chi2 = np.ndarray.flatten(np.array(test_reshape),'F')
else:
chi2 = chi2_df.chisquared.values
return chi2
def read_mcmc(path_to_file):
"""
Reads mcmc chain from file
Parameters
----------
path_to_file: string
Path to mcmc chain file
Returns
---------
emcee_table: pandas dataframe
Dataframe of mcmc chain values with NANs removed
"""
colnames = ['mhalo_c','mstellar_c','lowmass_slope','highmass_slope',\
'scatter']
if mf_type == 'smf' and survey == 'eco' and ver==1.0:
emcee_table = pd.read_csv(path_to_file,names=colnames,sep='\s+',\
dtype=np.float64)
else:
emcee_table = pd.read_csv(path_to_file, names=colnames,
delim_whitespace=True, header=None)
emcee_table = emcee_table[emcee_table.mhalo_c.values != '#']
emcee_table.mhalo_c = emcee_table.mhalo_c.astype(np.float64)
emcee_table.mstellar_c = emcee_table.mstellar_c.astype(np.float64)
emcee_table.lowmass_slope = emcee_table.lowmass_slope.astype(np.float64)
# Cases where last parameter was a NaN and its value was being written to
# the first element of the next line followed by 4 NaNs for the other
# parameters
for idx,row in enumerate(emcee_table.values):
if np.isnan(row)[4] == True and np.isnan(row)[3] == False:
scatter_val = emcee_table.values[idx+1][0]
row[4] = scatter_val
# Cases where rows of NANs appear
emcee_table = emcee_table.dropna(axis='index', how='any').\
reset_index(drop=True)
return emcee_table
def get_paramvals_percentile(table, percentile, chi2_arr):
"""
Isolates 68th percentile lowest chi^2 values and takes random 1000 sample
Parameters
----------
table: pandas dataframe
Mcmc chain dataframe
pctl: int
Percentile to use
chi2_arr: array
Array of chi^2 values
Returns
---------
subset: ndarray
Random 100 sample of param values from 68th percentile
"""
percentile = percentile/100
table['chi2'] = chi2_arr
table = table.sort_values('chi2').reset_index(drop=True)
slice_end = int(percentile*len(table))
mcmc_table_pctl = table[:slice_end]
# Best fit params are the parameters that correspond to the smallest chi2
bf_params = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][:5]
subset = mcmc_table_pctl.drop_duplicates().sample(100).values[:,:5]
subset = np.insert(subset, 0, bf_params, axis=0)
return subset
## Subset of 100 from latest total smf run on which group finding was done
chi2_file = path_to_proc + 'smhm_run6/{0}_chi2.txt'.format(survey)
if mf_type == 'smf' and survey == 'eco' and ver == 1.0:
chain_file = path_to_proc + 'mcmc_{0}.dat'.format(survey)
else:
chain_file = path_to_proc + 'smhm_run6/mcmc_{0}_raw.txt'.\
format(survey)
print('Reading chi-squared file')
chi2 = read_chi2(chi2_file)
print('Reading mcmc chain file')
mcmc_table = read_mcmc(chain_file)
print('Getting subset of 100 Behroozi parameters')
mcmc_table_subset = get_paramvals_percentile(mcmc_table, 68, chi2)
## Latest colour run
def read_chi2(path_to_file):
"""
Reads chi-squared values from file
Parameters
----------
path_to_file: string
Path to chi-squared values file
Returns
---------
chi2: array
Array of reshaped chi^2 values to match chain values
"""
chi2_df = pd.read_csv(path_to_file,header=None,names=['chisquared'])
chi2 = chi2_df.chisquared.values
return chi2
def read_mcmc(path_to_file):
"""
Reads mcmc chain from file
Parameters
----------
path_to_file: string
Path to mcmc chain file
Returns
---------
emcee_table: pandas dataframe
Dataframe of mcmc chain values with NANs removed
"""
colnames = ['mstar_q','mh_q','mu','nu']
emcee_table = pd.read_csv(path_to_file, names=colnames,
delim_whitespace=True, header=None)
emcee_table = emcee_table[emcee_table.mstar_q.values != '#']
emcee_table.mstar_q = emcee_table.mstar_q.astype(np.float64)
emcee_table.mh_q = emcee_table.mh_q.astype(np.float64)
emcee_table.mu = emcee_table.mu.astype(np.float64)
emcee_table.nu = emcee_table.nu.astype(np.float64)
return emcee_table
def get_paramvals_percentile(mcmc_table, pctl, chi2, randints_df):
"""
Isolates 68th percentile lowest chi^2 values and takes random 100 sample
Parameters
----------
mcmc_table: pandas dataframe
Mcmc chain dataframe
pctl: int
Percentile to use
chi2: array
Array of chi^2 values
Returns
---------
mcmc_table_pctl: pandas dataframe
Sample of 100 68th percentile lowest chi^2 values
"""
pctl = pctl/100
mcmc_table['chi2'] = chi2
mcmc_table['mock_num'] = randints_df.mock_num.values.astype(int)
mcmc_table = mcmc_table.sort_values('chi2').reset_index(drop=True)
slice_end = int(pctl*len(mcmc_table))
mcmc_table_pctl = mcmc_table[:slice_end]
# Best fit params are the parameters that correspond to the smallest chi2
bf_params = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][:4]
bf_chi2 = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][4]
bf_randint = mcmc_table_pctl.drop_duplicates().reset_index(drop=True).\
values[0][5].astype(int)
# Randomly sample 100 lowest chi2
mcmc_table_pctl = mcmc_table_pctl.drop_duplicates().sample(100)
return mcmc_table_pctl, bf_params, bf_chi2, bf_randint
chi2_file = path_to_proc + 'smhm_colour_run17/{0}_colour_chi2.txt'.\
format(survey)
chain_file = path_to_proc + 'smhm_colour_run17/mcmc_{0}_colour_raw.txt'.\
format(survey)
randint_file = path_to_proc + 'smhm_colour_run17/{0}_colour_mocknum.txt'.\
format(survey)
if survey == 'eco':
# catl_file = path_to_raw + "eco/eco_all.csv"
## New catalog with group finder run on subset after applying M* and cz cuts
catl_file = path_to_proc + "gal_group_eco_data.hdf5"
path_to_mocks = path_to_data + 'mocks/m200b/eco/'
elif survey == 'resolvea' or survey == 'resolveb':
catl_file = path_to_raw + "RESOLVE_liveJune2018.csv"
print('Reading files')
chi2_colour = read_chi2(chi2_file)
mcmc_table_colour = read_mcmc(chain_file)
mock_nums_df = pd.read_csv(randint_file, header=None, names=['mock_num'],
dtype=int)
gal_group_df = read_mock_catl(path_to_proc + "gal_group.hdf5")
mcmc_table_pctl, bf_params, bf_chi2, bf_randint = \
get_paramvals_percentile(mcmc_table_colour, 68, chi2_colour, mock_nums_df)
mock_nums_picked = mcmc_table_pctl['mock_num']
mhalo_arr = []
mstar_arr = []
lowslope = []
highslope = []
scatter = []
for idx in mock_nums_picked:
mhalo_arr.append(mcmc_table_subset.T[0][idx-1])
mstar_arr.append(mcmc_table_subset.T[1][idx-1])
lowslope.append(mcmc_table_subset.T[2][idx-1])
highslope.append(mcmc_table_subset.T[3][idx-1])
scatter.append(mcmc_table_subset.T[4][idx-1])
## Use only the mocks that are in the random sample of 100
# Count the first 20 + 22nd + 123-131 columns of general information from
# mock catalog (halo + rsd)
idx_arr = np.insert(np.linspace(0,20,21), len(np.linspace(0,20,21)), (22, 123,
124, 125, 126, 127, 128, 129, 130, 131)).astype(int)
names_arr = [x for x in gal_group_df.columns.values[idx_arr]]
for idx in mcmc_table_pctl.mock_num.unique():
names_arr.append('{0}_y'.format(idx))
names_arr.append('groupid_{0}'.format(idx))
names_arr.append('g_galtype_{0}'.format(idx))
names_arr = np.array(names_arr)
gal_group_df_subset = gal_group_df[names_arr]
# Renaming the "1_y" column kept from line 1896 because of case where it was
# also in mcmc_table_ptcl.mock_num and was selected twice
gal_group_df_subset.columns.values[30] = "behroozi_bf"
for idx in mcmc_table_pctl.mock_num.unique():
gal_group_df_subset = gal_group_df_subset.rename(columns=\
{'{0}_y'.format(idx):'{0}'.format(idx)})
maxis_bf_red, phi_bf_red, maxis_bf_blue, phi_bf_blue, cen_gals_red, \
cen_halos_red, cen_gals_blue, cen_halos_blue, f_red_cen_red, \
f_red_cen_blue, std_bf_red, std_bf_blue, std_cen_bf_red, \
std_cen_bf_blue = get_best_fit_model(bf_params, bf_randint)
x_bf_red,y_bf_red,y_std_bf_red,y_std_err_bf_red,x_red_data,y_red_data = \
Stats_one_arr(cen_halos_red,cen_gals_red,base=0.4,bin_statval='center',
arr_digit='y',statfunc=np.nanmedian)
x_bf_blue,y_bf_blue,y_std_bf_blue,y_std_err_bf_blue,x_blue_data,y_blue_data = \
Stats_one_arr(cen_halos_blue,cen_gals_blue,base=0.4,bin_statval='center',
arr_digit='y',statfunc=np.nanmedian)
fig1 = plt.figure(figsize=(10,10))
plt.plot(x_bf_red,y_bf_red,color='darkred',lw=3,label='Best-fit',zorder=10)
plt.plot(x_bf_blue,y_bf_blue,color='darkblue',lw=3,
label='Best-fit',zorder=10)
### Errors not using range of model lines but using std calculated
plt.fill_between(x_bf_red, y_bf_red+y_std_bf_red, y_bf_red-y_std_bf_red,
color='indianred', alpha=0.6)
plt.fill_between(x_bf_blue, y_bf_blue+y_std_bf_blue, y_bf_blue-y_std_bf_blue,
color='cornflowerblue', alpha=0.6)
plt.xlabel(r'\boldmath$\log_{10}\ M_{h} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$',fontsize=25)
plt.ylabel(r'\boldmath$\log_{10}\ M_\star \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$',fontsize=25)
plt.show()
bw_std_red = std_cen_bf_red[1] - std_cen_bf_red[0]
bw_std_blue = std_cen_bf_blue[1] - std_cen_bf_blue[0]
blue_min = std_cen_bf_blue[3] - 0.5*bw_std_blue
blue_max = std_cen_bf_blue[3] + 0.5*bw_std_blue
red_min = std_cen_bf_red[3] - 0.5*bw_std_red
red_max = std_cen_bf_red[3] + 0.5*bw_std_red
red_halos_in_bin = []
for idx, value in enumerate(cen_gals_red):
if value >= 10.16 and value <= 10.68:
red_halos_in_bin.append(cen_halos_red[idx])
blue_halos_in_bin = []
for idx, value in enumerate(cen_gals_blue):
if value >= 9.86 and value <= 10.28:
blue_halos_in_bin.append(cen_halos_blue[idx])
fig2 = plt.figure(figsize=(10,10))
plt.hist(red_halos_in_bin, histtype='step', lw=3, color='r', ls='-',
label='10.16 - 10.68')
plt.hist(blue_halos_in_bin, histtype='step', lw=3, color='b', ls='-',
label='9.86 - 10.28')
plt.xlabel(r'\boldmath$\log_{10}\ M_{h} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$',fontsize=25)
plt.title('Distribution of halo masses in specified stellar mass bin')
plt.legend()
plt.show()
#! In all bins of M* what do the distributions of M_h look like?
red_min_arr = []
red_max_arr = []
for i in range(len(std_cen_bf_red)):
red_min = std_cen_bf_red[i] - 0.5*bw_std_red
red_max = std_cen_bf_red[i] + 0.5*bw_std_red
red_min_arr.append(red_min)
red_max_arr.append(red_max)
blue_min_arr = []
blue_max_arr = []
for i in range(len(std_cen_bf_blue)):
blue_min = std_cen_bf_blue[i] - 0.5*bw_std_blue
blue_max = std_cen_bf_blue[i] + 0.5*bw_std_blue
blue_min_arr.append(blue_min)
blue_max_arr.append(blue_max)
red_halos = []
bin_counter = 0
while bin_counter < len(red_min_arr):
red_halos_in_bin_idx = []
for idx, value in enumerate(cen_gals_red):
if value >= red_min_arr[bin_counter] and value < red_max_arr[bin_counter]:
red_halos_in_bin_idx.append(cen_halos_red[idx])
red_halos.append(red_halos_in_bin_idx)
bin_counter += 1
blue_halos = []
bin_counter = 0
while bin_counter < len(blue_min_arr):
blue_halos_in_bin_idx = []
for idx, value in enumerate(cen_gals_blue):
if value >= blue_min_arr[bin_counter] and value < blue_max_arr[bin_counter]:
blue_halos_in_bin_idx.append(cen_halos_blue[idx])
blue_halos.append(blue_halos_in_bin_idx)
bin_counter += 1
fig2 = plt.figure(figsize=(10,10))
colour_arr = ['indianred', 'darkorange', 'gold', 'forestgreen', 'cornflowerblue']
for i in range(len(red_halos)):
plt.hist(red_halos[i], histtype='step', lw=3, color=colour_arr[i], ls='-',
label='R: {0} - {1}'.format(np.round(red_min_arr[i],2), np.round(red_max_arr[i],2)), density=True)
plt.hist(blue_halos[i], histtype='step', lw=3, color=colour_arr[i], ls='--',
label='B: {0} - {1}'.format(np.round(blue_min_arr[i],2), np.round(blue_max_arr[i],2)), density=True)
plt.xlabel(r'\boldmath$\log_{10}\ M_{h} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$',fontsize=25)
plt.title('Distribution of halo masses in stellar mass bins for galaxies around red and blue group centrals')
plt.legend()
plt.show()
#! In much narrower bins of M* what do the distributions of M_h look like?
red_arr = np.linspace(8.6, 11, 10)
blue_arr = np.linspace(8.6, 11, 10)
red_halos = []
bin_counter = 0
while bin_counter < len(red_arr):
if bin_counter == 9:
break
red_halos_in_bin_idx = []
for idx, value in enumerate(cen_gals_red):
if value >= red_arr[bin_counter] and value < red_arr[bin_counter+1]:
red_halos_in_bin_idx.append(cen_halos_red[idx])
red_halos.append(red_halos_in_bin_idx)
bin_counter += 1
blue_halos = []
bin_counter = 0
while bin_counter < len(blue_arr):
if bin_counter == 9:
break
blue_halos_in_bin_idx = []
for idx, value in enumerate(cen_gals_blue):
if value >= blue_arr[bin_counter] and value < blue_arr[bin_counter+1]:
blue_halos_in_bin_idx.append(cen_halos_blue[idx])
blue_halos.append(blue_halos_in_bin_idx)
bin_counter += 1
fig2 = plt.figure(figsize=(10,10))
colour_arr = ['indianred', 'darkorange', 'gold', 'yellowgreen', 'forestgreen','darkturquoise', 'cornflowerblue', 'mediumorchid', 'orchid']
for i in range(len(red_halos)):
if i in [0,2,4,6,8]:
plt.hist(red_halos[i], histtype='step', lw=3, color=colour_arr[i], ls='-',
label='R: {0} - {1}'.format(np.round(red_arr[i],2), np.round(red_arr[i+1],2)), density=True)
plt.hist(blue_halos[i], histtype='step', lw=3, color=colour_arr[i], ls='--',
label='B: {0} - {1}'.format(np.round(blue_arr[i],2), np.round(blue_arr[i+1],2)), density=True)
plt.xlabel(r'\boldmath$\log_{10}\ M_{h} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$',fontsize=25)
plt.title('Distribution of halo masses in stellar mass bins for galaxies around red and blue group centrals')
plt.legend()
plt.show()
################################################################################
############ Experimenting with new metric for second observable ###############
################################################################################
from scipy.stats import binned_statistic as bs
from scipy.stats import normaltest as nt
from cosmo_utils.utils import work_paths as cwpaths
import matplotlib.pyplot as plt
from matplotlib import rc
import pandas as pd
import numpy as np
import scipy as sp
from scipy.stats import iqr
import random
import math
import os
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']}, size=25)
rc('text', usetex=True)
rc('text.latex', preamble=[r"\usepackage{amsmath}"])
rc('axes', linewidth=2)
rc('xtick.major', width=2, size=7)
rc('ytick.major', width=2, size=7)
def read_mock_catl(filename, catl_format='.hdf5'):
"""
Function to read ECO/RESOLVE catalogues.
Parameters
----------
filename: string
path and name of the ECO/RESOLVE catalogue to read
catl_format: string, optional (default = '.hdf5')
type of file to read.
Options:
- '.hdf5': Reads in a catalogue in HDF5 format
Returns
-------
mock_pd: pandas DataFrame
DataFrame with galaxy/group information
Examples
--------
# Specifying `filename`
>>> filename = 'ECO_catl.hdf5'
# Reading in Catalogue
>>> mock_pd = reading_catls(filename, format='.hdf5')
>>> mock_pd.head()
x y z vx vy vz \
0 10.225435 24.778214 3.148386 356.112457 -318.894409 366.721832
1 20.945772 14.500367 -0.237940 168.731766 37.558834 447.436951
2 21.335835 14.808488 0.004653 967.204407 -701.556763 -388.055115
3 11.102760 21.782235 2.947002 611.646484 -179.032089 113.388794
4 13.217764 21.214905 2.113904 120.689598 -63.448833 400.766541
loghalom cs_flag haloid halo_ngal ... cz_nodist vel_tot \
0 12.170 1 196005 1 ... 2704.599189 602.490355
1 11.079 1 197110 1 ... 2552.681697 479.667489
2 11.339 1 197131 1 ... 2602.377466 1256.285409
3 11.529 1 199056 1 ... 2467.277182 647.318259
4 10.642 1 199118 1 ... 2513.381124 423.326770
vel_tan vel_pec ra_orig groupid M_group g_ngal g_galtype \
0 591.399858 -115.068833 215.025116 0 11.702527 1 1
1 453.617221 155.924074 182.144134 1 11.524787 4 0
2 1192.742240 394.485714 182.213220 1 11.524787 4 0
3 633.928896 130.977416 210.441320 2 11.502205 1 1
4 421.064495 43.706352 205.525386 3 10.899680 1 1
halo_rvir
0 0.184839
1 0.079997
2 0.097636
3 0.113011
4 0.057210
"""
## Checking if file exists
if not os.path.exists(filename):
msg = '`filename`: {0} NOT FOUND! Exiting..'.format(filename)
raise ValueError(msg)
## Reading file
if catl_format=='.hdf5':
mock_pd = pd.read_hdf(filename)
else:
msg = '`catl_format` ({0}) not supported! Exiting...'.format(catl_format)
raise ValueError(msg)
return mock_pd
def assign_colour_label_data(catl):
"""
Assign colour label to data
Parameters
----------
catl: pandas Dataframe
Data catalog
Returns
---------
catl: pandas Dataframe
Data catalog with colour label assigned as new column
"""
logmstar_arr = catl.logmstar.values
u_r_arr = catl.modelu_rcorr.values
colour_label_arr = np.empty(len(catl), dtype='str')
for idx, value in enumerate(logmstar_arr):
# Divisions taken from Moffett et al. 2015 equation 1
if value <= 9.1:
if u_r_arr[idx] > 1.457:
colour_label = 'R'
else:
colour_label = 'B'
if value > 9.1 and value < 10.1:
divider = 0.24 * value - 0.7
if u_r_arr[idx] > divider:
colour_label = 'R'
else:
colour_label = 'B'
if value >= 10.1:
if u_r_arr[idx] > 1.7:
colour_label = 'R'
else:
colour_label = 'B'
colour_label_arr[idx] = colour_label
catl['colour_label'] = colour_label_arr
return catl
def read_data_catl(path_to_file, survey):
"""
Reads survey catalog from file
Parameters
----------
path_to_file: `string`
Path to survey catalog file
survey: `string`
Name of survey
Returns
---------
catl: `pandas.DataFrame`
Survey catalog with grpcz, abs rmag and stellar mass limits
volume: `float`
Volume of survey
z_median: `float`
Median redshift of survey
"""
if survey == 'eco':
# columns = ['name', 'radeg', 'dedeg', 'cz', 'grpcz', 'absrmag',
# 'logmstar', 'logmgas', 'grp', 'grpn', 'logmh', 'logmh_s',
# 'fc', 'grpmb', 'grpms','modelu_rcorr']
# 13878 galaxies
# eco_buff = pd.read_csv(path_to_file,delimiter=",", header=0)#, \usecols=columns)
eco_buff = read_mock_catl(path_to_file)
if mf_type == 'smf':
# 6456 galaxies
catl = eco_buff.loc[(eco_buff.grpcz.values >= 3000) &
(eco_buff.grpcz.values <= 7000) &
(eco_buff.absrmag.values <= -17.33)]
elif mf_type == 'bmf':
catl = eco_buff.loc[(eco_buff.grpcz.values >= 3000) &
(eco_buff.grpcz.values <= 7000) &
(eco_buff.absrmag.values <= -17.33)]
volume = 151829.26 # Survey volume without buffer [Mpc/h]^3
# volume = 192351.36 # Survey volume with buffer [Mpc/h]^3
# cvar = 0.125
z_median = np.median(catl.grpcz.values) / (3 * 10**5)
elif survey == 'resolvea' or survey == 'resolveb':
columns = ['name', 'radeg', 'dedeg', 'cz', 'grpcz', 'absrmag',
'logmstar', 'logmgas', 'grp', 'grpn', 'grpnassoc', 'logmh',
'logmh_s', 'fc', 'grpmb', 'grpms', 'f_a', 'f_b']
# 2286 galaxies
resolve_live18 = pd.read_csv(path_to_file, delimiter=",", header=0, \
usecols=columns)
if survey == 'resolvea':
if mf_type == 'smf':
catl = resolve_live18.loc[(resolve_live18.f_a.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17.33)]
elif mf_type == 'bmf':
catl = resolve_live18.loc[(resolve_live18.f_a.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17.33)]
volume = 13172.384 # Survey volume without buffer [Mpc/h]^3
# cvar = 0.30
z_median = np.median(resolve_live18.grpcz.values) / (3 * 10**5)
elif survey == 'resolveb':
if mf_type == 'smf':
# 487 - cz, 369 - grpcz
catl = resolve_live18.loc[(resolve_live18.f_b.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17)]
elif mf_type == 'bmf':
catl = resolve_live18.loc[(resolve_live18.f_b.values == 1) &
(resolve_live18.grpcz.values >= 4500) &
(resolve_live18.grpcz.values <= 7000) &
(resolve_live18.absrmag.values <= -17)]
volume = 4709.8373 # *2.915 #Survey volume without buffer [Mpc/h]^3
# cvar = 0.58
z_median = np.median(resolve_live18.grpcz.values) / (3 * 10**5)
return catl, volume, z_median
global survey
global path_to_figures
global gal_group_df_subset
dict_of_paths = cwpaths.cookiecutter_paths()
path_to_raw = dict_of_paths['raw_dir']
path_to_proc = dict_of_paths['proc_dir']
path_to_interim = dict_of_paths['int_dir']
path_to_figures = dict_of_paths['plot_dir']
path_to_external = dict_of_paths['ext_dir']
path_to_data = dict_of_paths['data_dir']
machine = 'mac'
mf_type = 'smf'
survey = 'eco'
if survey == 'eco':
# catl_file = path_to_raw + "eco/eco_all.csv"
## New catalog with group finder run on subset after applying M* and cz cuts
# catl_file = path_to_proc + "gal_group_eco_data.hdf5"
catl_file = path_to_proc + "gal_group_eco_data_vol_update.hdf5"
path_to_mocks = path_to_data + 'mocks/m200b/eco/'
elif survey == 'resolvea' or survey == 'resolveb':
catl_file = path_to_raw + "RESOLVE_liveJune2018.csv"
catl, volume, z_median = read_data_catl(catl_file, survey)
catl = assign_colour_label_data(catl)
catl.logmstar = np.log10((10**catl.logmstar) / 2.041)
catl.M_group = np.log10((10**catl.M_group) / 2.041)
catl.logmh_s = np.log10((10**catl.logmh_s) / 2.041)
catl.logmh = np.log10((10**catl.logmh) / 2.041)
red_subset_grpids = np.unique(catl.groupid.loc[(catl.\
colour_label == 'R') & (catl.g_galtype == 1)].values)
blue_subset_grpids = np.unique(catl.groupid.loc[(catl.\
colour_label == 'B') & (catl.g_galtype == 1)].values)
red_singleton_counter = 0
red_deltav_arr = []
red_cen_stellar_mass_arr = []
red_grpid_arr = []
red_cen_cz_arr = []
red_mean_cz_arr = []
red_grp_halo_mass_arr = []
for key in red_subset_grpids:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
red_singleton_counter += 1
else:
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.g_galtype.\
values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_grp_halo_mass_arr.append(grp_halo_mass)
red_cen_cz_arr.append(cen_cz_grp)
red_mean_cz_arr.append(mean_cz_grp)
for val in deltav:
if val != 0:
red_deltav_arr.append(val)
red_cen_stellar_mass_arr.append(cen_stellar_mass)
red_grpid_arr.append(key)
blue_singleton_counter = 0
blue_deltav_arr = []
blue_cen_stellar_mass_arr = []
blue_grpid_arr = []
blue_cen_cz_arr = []
blue_mean_cz_arr = []
blue_grp_halo_mass_arr = []
for key in blue_subset_grpids:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
blue_singleton_counter += 1
else:
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.g_galtype\
.values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_grp_halo_mass_arr.append(grp_halo_mass)
blue_cen_cz_arr.append(cen_cz_grp)
blue_mean_cz_arr.append(mean_cz_grp)
for val in deltav:
if val != 0:
blue_deltav_arr.append(val)
blue_cen_stellar_mass_arr.append(cen_stellar_mass)
blue_grpid_arr.append(key)
all_grpids_with_cen = np.unique(catl.groupid.loc[(catl.g_galtype == 1)].values)
cen_stellar_mass_arr = []
deltav_arr = []
grpid_arr = []
cen_cz_arr = []
mean_cz_arr = []
grp_halo_mass_arr = []
singleton_counter = 0
for key in all_grpids_with_cen:
group = catl.loc[catl.groupid == key]
if len(group) == 1:
singleton_counter += 1
else:
grp_halo_mass = np.unique(group.logmh.values)[0]
cen_stellar_mass = group.logmstar.loc[group.g_galtype\
.values == 1].values[0]
# Different velocity definitions
mean_cz_grp = np.round(np.mean(group.cz.values),2)
cen_cz_grp = group.cz.loc[group.g_galtype == 1].values[0]
cz_grp = np.unique(group.grpcz.values)[0]
# Velocity difference
deltav = group.cz.values - len(group)*[cen_cz_grp]
# blue_cen_stellar_mass_arr.append(cen_stellar_mass)
grp_halo_mass_arr.append(grp_halo_mass)
cen_cz_arr.append(cen_cz_grp)
mean_cz_arr.append(mean_cz_grp)
for val in deltav:
if val != 0:
deltav_arr.append(val)
cen_stellar_mass_arr.append(cen_stellar_mass)
grpid_arr.append(key)
##! Plot of new metric but with trend line fit to ALL groups and not split by red
##! and blue
plt.scatter(red_cen_stellar_mass_arr, np.log10(np.abs(red_deltav_arr)), c='indianred')
plt.scatter(blue_cen_stellar_mass_arr, np.log10(np.abs(blue_deltav_arr)), c='cornflowerblue')
# plt.scatter(cen_stellar_mass_arr, np.log10(np.abs(deltav_arr)), c='lightgray')
z = np.polyfit(cen_stellar_mass_arr, np.log10(np.abs(deltav_arr)), 1)
p = np.poly1d(z)
plt.plot(cen_stellar_mass_arr,p(cen_stellar_mass_arr),"k--")
# plt.yscale('log')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$ log_{10}({| \Delta{v} |}) \left[\mathrm{km/s} \right]$', fontsize=30)
plt.show()
##! Plot of fractional difference in new metric between points and trend line
red_frac_diff_arr = []
for idx, val in enumerate(red_cen_stellar_mass_arr):
## Need to unlog since the fit was done to the log of the absolute values
## above
red_frac_diff = (np.abs(red_deltav_arr[idx]) - (10**p(val)))/(10**p(val))
red_frac_diff_arr.append(red_frac_diff)
blue_frac_diff_arr = []
for idx, val in enumerate(blue_cen_stellar_mass_arr):
blue_frac_diff = (np.abs(blue_deltav_arr[idx]) - (10**p(val)))/(10**p(val))
blue_frac_diff_arr.append(blue_frac_diff)
plt.scatter(red_cen_stellar_mass_arr, red_frac_diff_arr, c='indianred')
plt.scatter(blue_cen_stellar_mass_arr, blue_frac_diff_arr, c='cornflowerblue')
# plt.plot(cen_stellar_mass_arr,p(cen_stellar_mass_arr),"k--")
# plt.yscale('log')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$ ({| \Delta{v} | - \Delta{v}_{fit}})/\Delta{v}_{fit} \left[\mathrm{km/s} \right]$', fontsize=30)
plt.show()
##! Taking the mean of the fractional difference in bins of central stellar mass
## Original bins but more bins for red than blue
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
red_stellar_mass_bins = np.linspace(8.6,11.2,10)
## Trying with same bins for both pops
red_stellar_mass_bins = np.arange(8.75, 11.25, 0.5)
blue_stellar_mass_bins = np.arange(8.75, 11.25, 0.5)
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
stats_red = bs(red_cen_stellar_mass_arr, red_frac_diff_arr, statistic='mean',
bins=red_stellar_mass_bins)
stats_blue = bs(blue_cen_stellar_mass_arr, blue_frac_diff_arr, statistic='mean',
bins=blue_stellar_mass_bins)
plt.scatter(centers_red, stats_red[0], c='indianred', s=200, marker='*')
plt.scatter(centers_blue, stats_blue[0], c='cornflowerblue', s=200, marker='*')
plt.ylabel(r'\boldmath$ \overline{({| \Delta{v} | - \Delta{v}_{fit}})/\Delta{v}_{fit}} \left[\mathrm{km/s} \right]$', fontsize=30)
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.show()
##! Look at distribution of original deltav values in red and blue bins of mass
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
red_stellar_mass_bins = np.linspace(8.6,11.2,6)
centers_red = 0.5 * (red_stellar_mass_bins[1:] + \
red_stellar_mass_bins[:-1])
centers_blue = 0.5 * (blue_stellar_mass_bins[1:] + \
blue_stellar_mass_bins[:-1])
stats_red = bs(red_cen_stellar_mass_arr, red_deltav_arr,
bins=red_stellar_mass_bins)
stats_blue = bs(blue_cen_stellar_mass_arr, blue_deltav_arr,
bins=blue_stellar_mass_bins)
data = pd.DataFrame(data=zip(red_deltav_arr, stats_red[2]), columns=['deltav','bin_num'])
color_arr = ['r','g','b','c','m','y']
for idx in range(1,7):
subset = data.deltav.loc[data.bin_num == idx]
print("Num of red things in bin {0}: {1}".format(idx, len(subset)))
if len(subset) > 8:
pval = nt(subset)[1]
plt.hist(subset, histtype='step', color=color_arr[idx-1], label={pval, idx})
plt.legend()
plt.show()
data = pd.DataFrame(data=zip(blue_deltav_arr, stats_blue[2]), columns=['deltav','bin_num'])
color_arr = ['r','g','b','c','m','y']
for idx in range(1,7):
subset = data.deltav.loc[data.bin_num == idx]
print("Num of blue things in bin {0}: {1}".format(idx, len(subset)))
if len(subset) > 8:
pval = nt(subset)[1]
plt.hist(subset, histtype='step', color=color_arr[idx-1], label={pval, idx})
plt.legend()
plt.show()
##! Look at location of points of original central stellar mass in red and blue bins of mass
data_r = pd.DataFrame(data=zip(red_cen_stellar_mass_arr, red_deltav_arr, stats_red[2]), columns=['mcen','deltav','bin_num'])
data_b = pd.DataFrame(data=zip(blue_cen_stellar_mass_arr, blue_deltav_arr, stats_blue[2]), columns=['mcen','deltav','bin_num'])
stats_red = bs(red_cen_stellar_mass_arr, red_deltav_arr, statistic='mean',
bins=red_stellar_mass_bins)
stats_blue = bs(blue_cen_stellar_mass_arr, blue_deltav_arr, statistic='mean',
bins=blue_stellar_mass_bins)
for idx in range(1,7):
subset_r = data_r.loc[data_r.bin_num == idx]
print("Num of red things in bin {0}: {1}".format(idx, len(subset_r)))
plt.scatter(subset_r.mcen.values, subset_r.deltav.values, c='indianred', alpha=0.4)
plt.vlines(red_stellar_mass_bins[idx-1],min(subset_r.deltav.values),max(subset_r.deltav.values),colors='r')
if idx != 6:
plt.vlines(red_stellar_mass_bins[idx],min(subset_r.deltav.values),max(subset_r.deltav.values),colors='r')
plt.scatter(centers_red, stats_red[0], marker='*', s=200, c='r')
plt.scatter(centers_blue, stats_blue[0], marker='*', s=200, c='b')
subset_b = data_b.loc[data_b.bin_num == idx]
print("Num of blue things in bin {0}: {1}".format(idx, len(subset_b)))
plt.scatter(subset_b.mcen.values, subset_b.deltav.values-1000, c='cornflowerblue', alpha=0.4)
plt.vlines(blue_stellar_mass_bins[idx-1],min(subset_b.deltav.values-1000),max(subset_b.deltav.values-1000),colors='b')
if idx != 6:
plt.vlines(blue_stellar_mass_bins[idx],min(subset_b.deltav.values-1000),max(subset_b.deltav.values-1000),colors='b')
plt.show()
##! Experiment with statistic to use on non normal |deltav| measurements
def pop_standard_dev(array):
mean = np.mean(array)
median = np.median(array)
sum = 0
N = len(array)
for val in array:
sum += (val - median)**2
sigma = np.sqrt(sum/N)
return sigma
def sam_standard_dev(array):
mean = np.mean(array)
median = np.median(array)
sum = 0
N = len(array)
for val in array:
sum += (val - median)**2
sigma = np.sqrt(sum/(N-1))
return sigma
def iqr(array):
return iqr(array)
def mad(array):
median_arr = []
for val in array:
median = val - np.median(array)
median_arr.append(median)
mad = np.median(np.abs(median_arr))
return mad
stats_red = bs(red_cen_stellar_mass_arr, np.abs(red_deltav_arr), statistic=mad,
bins=red_stellar_mass_bins)
stats_blue = bs(blue_cen_stellar_mass_arr, np.abs(blue_deltav_arr), statistic=mad,
bins=blue_stellar_mass_bins)
plt.scatter(centers_red, stats_red[0], c='indianred')
plt.scatter(centers_blue, stats_blue[0], c='cornflowerblue')
plt.ylabel(r'\boldmath$ | \Delta{v} | \left[\mathrm{km/s} \right]$', fontsize=30)
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.show()
##! Experiment with number of bins for original deltav metric
## 23 bins
# h_r = 2*(iqr(red_cen_stellar_mass_arr)/(len(red_cen_stellar_mass_arr)**(1/3)))
# k_r = math.ceil((max(red_cen_stellar_mass_arr) - min(red_cen_stellar_mass_arr))/h_r)
# ## 11 bins
# h_b= 2*(iqr(blue_cen_stellar_mass_arr)/(len(blue_cen_stellar_mass_arr)**(1/3)))
# k_b = math.ceil((max(blue_cen_stellar_mass_arr) - min(blue_cen_stellar_mass_arr))/h_b)
red_stellar_mass_bins = np.linspace(8.6,11.2,10)
blue_stellar_mass_bins = np.linspace(8.6,10.7,6)
red_stellar_mass_bins = np.linspace(8.6,11,6)
blue_stellar_mass_bins = np.linspace(8.6,11,6)
stats_red = bs(red_cen_stellar_mass_arr, red_deltav_arr, statistic='std',
bins=red_stellar_mass_bins)
stats_blue = bs(blue_cen_stellar_mass_arr, blue_deltav_arr, statistic='std',
bins=blue_stellar_mass_bins)
centers_red = 0.5 * (stats_red[1][1:] + \
stats_red[1][:-1])
centers_blue = 0.5 * (stats_blue[1][1:] + \
stats_blue[1][:-1])
plt.scatter(centers_red, stats_red[0], c='indianred')
plt.scatter(centers_blue, stats_blue[0], c='cornflowerblue')
plt.xlabel(r'\boldmath$\log_{10}\ M_{\star , cen} \left[\mathrm{M_\odot}\, \mathrm{h}^{-1} \right]$', fontsize=30)
plt.ylabel(r'\boldmath$\sigma \left[\mathrm{km/s} \right]$', fontsize=30)
plt.show()
| 34.873876
| 138
| 0.63592
| 22,037
| 147,377
| 3.988474
| 0.043926
| 0.035418
| 0.022937
| 0.020309
| 0.922498
| 0.901882
| 0.883894
| 0.866953
| 0.852766
| 0.846793
| 0
| 0.051982
| 0.237425
| 147,377
| 4,225
| 139
| 34.88213
| 0.730091
| 0.259125
| 0
| 0.803742
| 0
| 0.007833
| 0.080141
| 0.012941
| 0
| 0
| 0
| 0.001183
| 0
| 1
| 0.02698
| false
| 0
| 0.013925
| 0.000435
| 0.067885
| 0.005657
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40ff6e9eeb699752f1f079da84dcd4b6fdaa6594
| 11,439
|
py
|
Python
|
pyswmm/lidunits.py
|
kmmacro/pyswmm
|
028fc9a2c731bed4129ee100e295900d989867fd
|
[
"BSD-2-Clause"
] | 186
|
2015-06-12T13:24:02.000Z
|
2022-03-24T10:19:43.000Z
|
pyswmm/lidunits.py
|
kmmacro/pyswmm
|
028fc9a2c731bed4129ee100e295900d989867fd
|
[
"BSD-2-Clause"
] | 256
|
2015-02-16T02:47:20.000Z
|
2022-03-18T16:08:09.000Z
|
pyswmm/lidunits.py
|
kmmacro/pyswmm
|
028fc9a2c731bed4129ee100e295900d989867fd
|
[
"BSD-2-Clause"
] | 99
|
2015-01-11T17:06:44.000Z
|
2022-03-15T09:26:44.000Z
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2018 Jennifer Wu
#
# Licensed under the terms of the BSD2 License
# See LICENSE.txt for details
# -----------------------------------------------------------------------------
from pyswmm.toolkitapi import LidLayers, LidResults
def _flux_rate(model, subcatchment, lid_index, layer):
"""
Get lid net inflow - outflow from previous time step for each lid layer
ONLY FOR for surface, soil, storage, pave
:param int layerIndex: layer type (toolkitapi.LidLayers member variable)
:return: Parameter Value
:rtype: double
"""
return model.getLidUFluxRates(subcatchment,
lid_index,
layer)
class Surface(object):
def __init__(self, model, lidunit):
self._model = model
self._lidunit = lidunit
self._subcatchmentid = lidunit._subcatchmentid
self._lidid = lidunit._lidid
@property
def depth(self):
"""
Get lid depth of ponded water on surface layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.surfDepth.value)
@property
def inflow(self):
"""
Get lid precip. + runon to LID unit
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.surfInflow.value)
@property
def infiltration(self):
"""
Get lid infiltration rate from surface layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.surfInfil.value)
@property
def evaporation(self):
"""
Get lid evaporation rate from surface layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.surfEvap.value)
@property
def outflow(self):
"""
Get lid outflow from surface layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.surfOutflow.value)
@property
def flux_rate(self):
"""
Get lid flux rate from surface layer
:return: Parameter Value
:rtype: double
"""
return _flux_rate(self._model,
self._subcatchmentid,
self._lidid,
LidLayers.surface.value)
class Pavement(object):
def __init__(self, model, lidunit):
self._model = model
self._lidunit = lidunit
self._subcatchmentid = lidunit._subcatchmentid
self._lidid = lidunit._lidid
@property
def depth(self):
"""
Get lid depth of water in poroous pavement layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.paveDepth.value)
@property
def evaporation(self):
"""
Get lid evaporation from pavement layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.paveEvap.value)
@property
def percolation(self):
"""
Get lid percolation from pavement layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.pavePerc.value)
@property
def flux_rate(self):
"""
Get lid flux rate from pavement layer
:return: Parameter Value
:rtype: double
"""
return _flux_rate(self._model,
self._subcatchmentid,
self._lidid,
LidLayers.pavement.value)
class Storage(object):
def __init__(self, model, lidunit):
self._model = model
self._lidunit = lidunit
self._subcatchmentid = lidunit._subcatchmentid
self._lidid = lidunit._lidid
@property
def depth(self):
"""
Get lid depth of water in storage layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.storDepth.value)
@property
def inflow(self):
"""
Get lid inflow rate to storage rate
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.storInflow.value)
@property
def exfiltration(self):
"""
Get lid exfiltration rate from storage layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.storExfil.value)
@property
def evaporation(self):
"""
Get lid evaporation rate from storage layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.storEvap.value)
@property
def drain(self):
"""
Get lid drain rate from storage layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.storDrain.value)
@property
def flux_rate(self):
"""
Get lid flux rate from storage layer
:return: Parameter Value
:rtype: double
"""
return _flux_rate(self._model,
self._subcatchmentid,
self._lidid,
LidLayers.storage.value)
class Soil(object):
def __init__(self, model, lidunit):
self._model = model
self._lidunit = lidunit
self._subcatchmentid = lidunit._subcatchmentid
self._lidid = lidunit._lidid
@property
def moisture(self):
"""
Get lid moisture content of biocell soil layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.soilMoist.value)
@property
def evaporation(self):
"""
Get lid evaporation from soil layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.soilEvap.value)
@property
def percolation(self):
"""
Get lid percolation from soil layer
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.soilPerc.value)
@property
def flux_rate(self):
"""
Get lid flux rate from soil layer
:return: Parameter Value
:rtype: double
"""
return _flux_rate(self._model,
self._subcatchmentid,
self._lidid,
LidLayers.soil.value)
class WaterBalance(object):
def __init__(self, model, lidunit):
self._model = model
self._lidunit = lidunit
self._subcatchmentid = lidunit._subcatchmentid
self._lidid = lidunit._lidid
@property
def inflow(self):
"""
Get lid water balance total inflow
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.inflow.value)
@property
def evaporation(self):
"""
Get lid water balance total evaporation
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.evap.value)
@property
def infiltration(self):
"""
Get lid water balance total infiltration
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.infil.value)
@property
def surface_flow(self):
"""
Get lid water balance total surface runoff
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.surfFlow.value)
@property
def drain_flow(self):
"""
Get lid water balance total underdrain flow
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.drainFlow.value)
@property
def initial_volume(self):
"""
Get lid water balance initial stored volume
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.initVol.value)
@property
def final_volume(self):
"""
Get lid water balance final stored volume
:return: Parameter Value
:rtype: double
"""
return self._model.getLidUResult(self._subcatchmentid,
self._lidid,
LidResults.finalVol.value)
| 29.330769
| 79
| 0.498208
| 925
| 11,439
| 5.997838
| 0.127568
| 0.060022
| 0.13266
| 0.126172
| 0.805876
| 0.805876
| 0.777037
| 0.733958
| 0.733958
| 0.697188
| 0
| 0.000903
| 0.419267
| 11,439
| 389
| 80
| 29.40617
| 0.834262
| 0.236821
| 0
| 0.741379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.189655
| false
| 0
| 0.005747
| 0
| 0.385057
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
904c9295f90dc1cd296b7f50d5f4b920f786c384
| 5,440
|
py
|
Python
|
great_international/migrations/0060_auto_20190730_1247.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
great_international/migrations/0060_auto_20190730_1247.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
great_international/migrations/0060_auto_20190730_1247.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# Generated by Django 2.2.2 on 2019-07-30 12:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('great_international', '0059_merge_20190729_1601'),
]
operations = [
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='prioritised_opportunity',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_ar',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_de',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_en_gb',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_es',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_fr',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_ja',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_pt',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_one_zh_hans',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_ar',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_de',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_en_gb',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_es',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_fr',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_ja',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_pt',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_three_zh_hans',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_ar',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_de',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_en_gb',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_es',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_fr',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_ja',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_pt',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='related_page_two_zh_hans',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_ar',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_de',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_en_gb',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_es',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_fr',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_ja',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_pt',
),
migrations.RemoveField(
model_name='capitalinvestopportunitypage',
name='similar_projects_title_zh_hans',
),
]
| 33.580247
| 60
| 0.604779
| 401
| 5,440
| 7.820449
| 0.119701
| 0.247768
| 0.30676
| 0.353954
| 0.941327
| 0.941327
| 0.921556
| 0.91773
| 0.914541
| 0.124043
| 0
| 0.008247
| 0.309007
| 5,440
| 161
| 61
| 33.78882
| 0.826018
| 0.008272
| 0
| 0.716129
| 1
| 0
| 0.348229
| 0.293158
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006452
| 0
| 0.025806
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
90d1eafc137dc90ae2d1b14c3e4a2132259fbad5
| 21,454
|
py
|
Python
|
testArray.py
|
JHMeusener/detectron2-ResNeSt
|
6abab6fb9496a528f6aa2d4e1e27f3e7ceb42685
|
[
"Apache-2.0"
] | null | null | null |
testArray.py
|
JHMeusener/detectron2-ResNeSt
|
6abab6fb9496a528f6aa2d4e1e27f3e7ceb42685
|
[
"Apache-2.0"
] | null | null | null |
testArray.py
|
JHMeusener/detectron2-ResNeSt
|
6abab6fb9496a528f6aa2d4e1e27f3e7ceb42685
|
[
"Apache-2.0"
] | null | null | null |
from detectron2.structures import BoxMode
# Some basic setup:
# Setup detectron2 logger
import detectron2
# import some common libraries
import numpy as np
import os, json, cv2, random
# import some common detectron2 utilities
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from detectron2.config import get_cfg
from detectron2.utils.visualizer import Visualizer
from detectron2.data import MetadataCatalog, DatasetCatalog
from detectron2.data.datasets import register_coco_instances
from detectron2.data import MetadataCatalog, DatasetCatalog
from detectron2.data.datasets import load_coco_json
from detectron2.config import get_cfg
from detectron2.engine import DefaultTrainer
from detectron2.data import DatasetMapper
from PIL import Image
import copy
import logging
import numpy as np
from typing import List, Optional, Union
import torch
from detectron2.config import configurable
from detectron2.data import build_detection_train_loader,build_detection_test_loader
from detectron2.data import detection_utils as utils
from detectron2.data import transforms as T
import logging
import numpy as np
from typing import Optional, Tuple
import torch
from torch import nn
from detectron2.config import configurable
from detectron2.structures import ImageList
from detectron2.utils.events import get_event_storage
from detectron2.utils.logger import log_first_n
from detectron2.modeling.backbone import Backbone, build_backbone
from detectron2.modeling.postprocessing import detector_postprocess
from detectron2.modeling.proposal_generator import build_proposal_generator
from detectron2.modeling.roi_heads import build_roi_heads
from detectron2.modeling.meta_arch.build import META_ARCH_REGISTRY
from detectron2.modeling.meta_arch import GeneralizedRCNN, ProposalNetwork
import importlib
from detectron2.layers import ShapeSpec
import sys
import time
from evaluation import RGBDTrainer, mask2target,DepthMapper,DepthRCNN,MultiLoss,EdgeImportanceLoss,_toMask,JointDepthEvaluator,DepthJointRCNN
from scipy import ndimage as ndi
import torch
import torch.nn.functional as F
import torch.nn as nn
from torch.nn.modules.loss import _Loss
from detectron2.evaluation import COCOEvaluator, inference_on_dataset
from detectron2.data.datasets.coco import convert_to_coco_json
from detectron2.data import build_detection_test_loader
from detectron2.evaluation.coco_evaluation import instances_to_coco_json
from pycocotools import mask as maskUtils
from pycocotools.coco import COCO
from detectron2.structures import BitMasks, PolygonMasks
import scipy
import matplotlib.pyplot as plt
validationJsonPath = "/files/Dataset/train.json"
trainingJsonPath = "/files/Dataset/validation.json"
datasetPath = "/files/Dataset/datasetPics/"
register_coco_instances("my_dataset_train", {},validationJsonPath , datasetPath)
register_coco_instances("my_dataset_val", {}, trainingJsonPath, datasetPath)
@META_ARCH_REGISTRY.register()
class OnlyRCNN(DepthJointRCNN):
def forward(self, batched_inputs):
if not self.training:
return self.inference(batched_inputs)
images = self.preprocess_image(batched_inputs)
if "instances" in batched_inputs[0]:
gt_instances = [x["instances"].to(self.device) for x in batched_inputs]
elif "targets" in batched_inputs[0]:
log_first_n(
logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10
)
gt_instances = [x["targets"].to(self.device) for x in batched_inputs]
else:
gt_instances = None
features = self.backbone(images.tensor) # ['p2', 'p3', 'p4', 'p5', 'p6']
#p2: ([1, 256, 192, 336]
#p3: [1, 256, 96, 168]
#p4: [1, 256, 48, 84]
#p5: [1, 256, 24, 42]
#p6: [1, 256, 12, 21]
#deeplab v3 with lower layer input
#upsample an concat all
c4 = self.edgeSegmentation_c4Head(features["p5"])
c3 = self.edgeSegmentation_c3Head(features["p4"])
c2 = self.edgeSegmentation_c2Head(features["p3"])
c1 = self.edgeSegmentation_c1Head(features["p2"])
x1 = self.edgeSegmentation_x1Head(images.tensor)
_, _, h1, w1 = x1.size()
c1 = F.interpolate(c1, (h1,w1))
c2 = F.interpolate(c2, (h1,w1))
c3 = F.interpolate(c3, (h1,w1))
c4 = F.interpolate(c4, (h1,w1))
cat = torch.cat((c1,c2,c3,c4,x1),1)
edgeSegmentOutput = self.edgeSegmentation_predictionHead(cat)
target = ImageList.from_tensors([x["target"].to(self.device) for x in batched_inputs],size_divisibility=self.backbone.size_divisibility)
importance = ImageList.from_tensors([x["importance"].to(self.device) for x in batched_inputs],size_divisibility=self.backbone.size_divisibility)
edgeSegmentLoss = self.edgeLoss(edgeSegmentOutput, target.tensor, importance.tensor)
#more rcnn
if self.proposal_generator:
proposals, proposal_losses = self.proposal_generator(images, features, gt_instances)
else:
assert "proposals" in batched_inputs[0]
proposals = [x["proposals"].to(self.device) for x in batched_inputs]
proposal_losses = {}
_, detector_losses = self.roi_heads(images, features, proposals, gt_instances)
if self.vis_period > 0:
storage = get_event_storage()
if storage.iter % self.vis_period == 0:
self.visualize_training(batched_inputs, proposals)
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
loss1 = sum(losses.values())
loss2 = edgeSegmentLoss["hasToBeZeroishError"]+edgeSegmentLoss["hasToBeNegativeError"]+edgeSegmentLoss["hasToBePositiveError"]
losses["hasToBeZeroishError"] = edgeSegmentLoss["hasToBeZeroishError"]
losses["hasToBeNegativeError"] = edgeSegmentLoss["hasToBeNegativeError"]
losses["hasToBePositiveError"] = edgeSegmentLoss["hasToBePositiveError"]
losses["falseNegativeError"] = edgeSegmentLoss["falseNegativeError"]
losses["falsePositiveError"] = edgeSegmentLoss["falsePositiveError"]
loss = self.multiLoss(loss1,torch.tensor([1.]).cuda())
losses["allLoss"] = loss
return losses
@META_ARCH_REGISTRY.register()
class OnlyEdges(DepthJointRCNN):
def forward(self, batched_inputs):
if not self.training:
return self.inference(batched_inputs)
images = self.preprocess_image(batched_inputs)
if "instances" in batched_inputs[0]:
gt_instances = [x["instances"].to(self.device) for x in batched_inputs]
elif "targets" in batched_inputs[0]:
log_first_n(
logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10
)
gt_instances = [x["targets"].to(self.device) for x in batched_inputs]
else:
gt_instances = None
features = self.backbone(images.tensor) # ['p2', 'p3', 'p4', 'p5', 'p6']
#p2: ([1, 256, 192, 336]
#p3: [1, 256, 96, 168]
#p4: [1, 256, 48, 84]
#p5: [1, 256, 24, 42]
#p6: [1, 256, 12, 21]
#deeplab v3 with lower layer input
#upsample an concat all
c4 = self.edgeSegmentation_c4Head(features["p5"])
c3 = self.edgeSegmentation_c3Head(features["p4"])
c2 = self.edgeSegmentation_c2Head(features["p3"])
c1 = self.edgeSegmentation_c1Head(features["p2"])
x1 = self.edgeSegmentation_x1Head(images.tensor)
_, _, h1, w1 = x1.size()
c1 = F.interpolate(c1, (h1,w1))
c2 = F.interpolate(c2, (h1,w1))
c3 = F.interpolate(c3, (h1,w1))
c4 = F.interpolate(c4, (h1,w1))
cat = torch.cat((c1,c2,c3,c4,x1),1)
edgeSegmentOutput = self.edgeSegmentation_predictionHead(cat)
target = ImageList.from_tensors([x["target"].to(self.device) for x in batched_inputs],size_divisibility=self.backbone.size_divisibility)
importance = ImageList.from_tensors([x["importance"].to(self.device) for x in batched_inputs],size_divisibility=self.backbone.size_divisibility)
edgeSegmentLoss = self.edgeLoss(edgeSegmentOutput, target.tensor, importance.tensor)
#more rcnn
if self.proposal_generator:
proposals, proposal_losses = self.proposal_generator(images, features, gt_instances)
else:
assert "proposals" in batched_inputs[0]
proposals = [x["proposals"].to(self.device) for x in batched_inputs]
proposal_losses = {}
_, detector_losses = self.roi_heads(images, features, proposals, gt_instances)
if self.vis_period > 0:
storage = get_event_storage()
if storage.iter % self.vis_period == 0:
self.visualize_training(batched_inputs, proposals)
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
loss1 = sum(losses.values())
loss2 = edgeSegmentLoss["hasToBeZeroishError"]+edgeSegmentLoss["hasToBeNegativeError"]+edgeSegmentLoss["hasToBePositiveError"]
losses["hasToBeZeroishError"] = edgeSegmentLoss["hasToBeZeroishError"]
losses["hasToBeNegativeError"] = edgeSegmentLoss["hasToBeNegativeError"]
losses["hasToBePositiveError"] = edgeSegmentLoss["hasToBePositiveError"]
losses["falseNegativeError"] = edgeSegmentLoss["falseNegativeError"]
losses["falsePositiveError"] = edgeSegmentLoss["falsePositiveError"]
loss = self.multiLoss(torch.tensor([1.]).cuda(),loss2)
losses["allLoss"] = loss
return losses
class OnlyOneMapper(DepthMapper):
def __call__(self, dataset_dict):
dataset_dict = super().__call__(self,dataset_dict)
if self.deleteRGB:
dataset_dict["image"] *= 0.
else:
dataset_dict["depth"] *= 0.
return dataset_dict
class RGBDTrainerDeleteRGB(RGBDTrainer):
@classmethod
def build_train_loader(cls, cfg):
"""
Returns:
iterable
It now calls :func:`detectron2.data.build_detection_train_loader`.
Overwrite it if you'd like a different data loader.
"""
mapper = OnlyOneMapper(cfg,True)
mapper.deleteRGB = True
return build_detection_train_loader(cfg, mapper=mapper)
class RGBDTrainerDeleteDepth(RGBDTrainer):
@classmethod
def build_train_loader(cls, cfg):
"""
Returns:
iterable
It now calls :func:`detectron2.data.build_detection_train_loader`.
Overwrite it if you'd like a different data loader.
"""
mapper = OnlyOneMapper(cfg,True)
mapper.deleteRGB = False
return build_detection_train_loader(cfg, mapper=mapper)
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectron2-ResNeSt/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "DepthJointRCNN"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
#cfg.MODEL.WEIGHTS = "/files/Code/detectronResNestWeights/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x-c58bd325.pth"
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_small_Joint"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
##################################################################################
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectron2-ResNeSt/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "DepthJointRCNN"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
cfg.MODEL.WEIGHTS = ""
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_small_Joint_noInit"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
##################################################################################
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectron2-ResNeSt/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "OnlyRCNN"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
#cfg.MODEL.WEIGHTS = "/files/Code/detectronResNestWeights/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x-c58bd325.pth"
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_small_RCNN"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
##################################################################################
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectron2-ResNeSt/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "OnlyEdges"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
#cfg.MODEL.WEIGHTS = "/files/Code/detectronResNestWeights/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x-c58bd325.pth"
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_small_Edges"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
##################################################################################
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectron2-ResNeSt/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "DepthJointRCNN"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
#cfg.MODEL.WEIGHTS = "/files/Code/detectronResNestWeights/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x-c58bd325.pth"
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_small_joint_onlyRGB"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
##################################################################################
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectron2-ResNeSt/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "DepthJointRCNN"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
#cfg.MODEL.WEIGHTS = "/files/Code/detectronResNestWeights/mask_cascade_rcnn_ResNeSt_50_FPN_syncBN_1x-c58bd325.pth"
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_small_joint_onlyDepth"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainerDeleteRGB(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
##################################################################################
cfg = get_cfg()
cfg.merge_from_file("/files/Code/detectronResNest/configs/COCO-InstanceSegmentation/mask_cascade_rcnn_ResNeSt_101_FPN_syncBN_1x.yaml")
cfg.MODEL.META_ARCHITECTURE = "DepthJointRCNN"
cfg.DATASETS.TRAIN = ("my_dataset_train",)
cfg.DATASETS.TEST = ("my_dataset_val",)
#cfg.MODEL.WEIGHTS = "/files/Code/detectronResNestWeights/faster_cascade_rcnn_ResNeSt_101_FPN_syncbn_range-scale_1x-3627ef78.pth"
cfg.DATALOADER.NUM_WORKERS = 6
cfg.SOLVER.IMS_PER_BATCH = 3
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 256 # faster, and good enough for this toy dataset (default: 512)
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1 # only has one class (ballon). (see https://detectron2.readthedocs.io/tutorials/datasets.html#update-the-config-for-new-datasets)
cfg.MODEL.BACKBONE.FREEZE_AT = 0
cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 1
cfg.MODEL.RETINANET.NUM_CLASSES = 1
#cfg.MODEL.RESNETS.NORM = "noNorm"#"BN"
cfg.MODEL.RESNETS.STEM_OUT_CHANNELS = 128
cfg.TEST.EVAL_PERIOD = 25000
cfg.TEST.PRECISE_BN.ENABLED = False
folder = "2020_11_24_big_Joint"
cfg.OUTPUT_DIR = "/files/Code/experiments/" +folder
cfg.SEED = 42
#cfg.INPUT.CROP.ENABLED = False
os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)
cfg.SOLVER.CHECKPOINT_PERIOD = 25000
cfg.SOLVER.BASE_LR = 0.008
cfg.SOLVER.STEPS = (75000,)
cfg.TEST.DETECTIONS_PER_IMAGE = 250
cfg.MODEL.EDGE_SEGMENT_BASE_LR = 0.005
trainer = RGBDTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()
del trainer
torch.cuda.empty_cache()
| 42.483168
| 166
| 0.729841
| 2,854
| 21,454
| 5.283812
| 0.124036
| 0.037135
| 0.015318
| 0.014854
| 0.85683
| 0.84118
| 0.830769
| 0.820358
| 0.808157
| 0.802321
| 0
| 0.037461
| 0.140207
| 21,454
| 504
| 167
| 42.56746
| 0.780061
| 0.152233
| 0
| 0.801508
| 0
| 0
| 0.132672
| 0.067819
| 0
| 0
| 0
| 0
| 0.005025
| 1
| 0.012563
| false
| 0
| 0.158291
| 0
| 0.201005
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2915aacff529dd85964ce6c61b91e898358999e3
| 30,270
|
py
|
Python
|
tests/system_tests_fallback_dest.py
|
franz1981/qpid-dispatch
|
21ee691eb0066d8061bf612f2fffa4c226ae7abb
|
[
"Apache-2.0"
] | null | null | null |
tests/system_tests_fallback_dest.py
|
franz1981/qpid-dispatch
|
21ee691eb0066d8061bf612f2fffa4c226ae7abb
|
[
"Apache-2.0"
] | null | null | null |
tests/system_tests_fallback_dest.py
|
franz1981/qpid-dispatch
|
21ee691eb0066d8061bf612f2fffa4c226ae7abb
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from time import sleep
from threading import Event
from threading import Timer
import unittest2 as unittest
from proton import Message, Timeout, symbol
from system_test import TestCase, Qdrouterd, main_module, TIMEOUT, MgmtMsgProxy
from system_test import AsyncTestReceiver
from system_test import AsyncTestSender
from system_test import QdManager
from system_tests_link_routes import ConnLinkRouteService
from proton.handlers import MessagingHandler
from proton.reactor import Container, DynamicNodeProperties
from proton.utils import BlockingConnection
from qpid_dispatch.management.client import Node
from subprocess import PIPE, STDOUT
import re
class AddrTimer(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.check_address()
class RouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(RouterTest, cls).setUpClass()
def router(name, mode, connection, extra=None):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'role': 'route-container', 'name': 'WP'}),
('address', {'prefix': 'dest', 'enableFallback': 'yes'}),
('autoLink', {'connection': 'WP', 'address': 'dest.al', 'dir': 'out', 'fallback': 'yes'}),
('autoLink', {'connection': 'WP', 'address': 'dest.al', 'dir': 'in', 'fallback': 'yes'}),
connection
]
if extra:
config.append(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
edge_port_A = cls.tester.get_port()
edge_port_B = cls.tester.get_port()
router('INT.A', 'interior', ('listener', {'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_A}))
router('INT.B', 'interior', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_B}))
router('EA1', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_A}))
router('EA2', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_A}))
router('EB1', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_B}))
router('EB2', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_B}))
cls.routers[0].wait_router_connected('INT.B')
cls.routers[1].wait_router_connected('INT.A')
def test_01_sender_first_primary_same_interior(self):
test = SenderFirstTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
'dest.01', False)
test.run()
self.assertEqual(None, test.error)
def test_02_sender_first_fallback_same_interior(self):
test = SenderFirstTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
'dest.02', True)
test.run()
self.assertEqual(None, test.error)
def test_03_sender_first_primary_same_edge(self):
test = SenderFirstTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
'dest.03', False)
test.run()
self.assertEqual(None, test.error)
def test_04_sender_first_fallback_same_edge(self):
test = SenderFirstTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
'dest.04', True)
test.run()
self.assertEqual(None, test.error)
def test_05_sender_first_primary_interior_interior(self):
test = SenderFirstTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
'dest.05', False)
test.run()
self.assertEqual(None, test.error)
def test_06_sender_first_fallback_interior_interior(self):
test = SenderFirstTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
'dest.06', True)
test.run()
self.assertEqual(None, test.error)
def test_07_sender_first_primary_edge_interior(self):
test = SenderFirstTest(self.routers[2].addresses[0],
self.routers[1].addresses[0],
'dest.07', False)
test.run()
self.assertEqual(None, test.error)
def test_08_sender_first_fallback_edge_interior(self):
test = SenderFirstTest(self.routers[2].addresses[0],
self.routers[1].addresses[0],
'dest.08', True)
test.run()
self.assertEqual(None, test.error)
def test_09_sender_first_primary_interior_edge(self):
test = SenderFirstTest(self.routers[1].addresses[0],
self.routers[2].addresses[0],
'dest.09', False)
test.run()
self.assertEqual(None, test.error)
def test_10_sender_first_fallback_interior_edge(self):
test = SenderFirstTest(self.routers[1].addresses[0],
self.routers[2].addresses[0],
'dest.10', True)
test.run()
self.assertEqual(None, test.error)
def test_11_sender_first_primary_edge_edge(self):
test = SenderFirstTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
'dest.11', False)
test.run()
self.assertEqual(None, test.error)
def test_12_sender_first_fallback_edge_edge(self):
test = SenderFirstTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
'dest.12', True)
test.run()
self.assertEqual(None, test.error)
def test_13_receiver_first_primary_same_interior(self):
test = ReceiverFirstTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
'dest.13', False)
test.run()
self.assertEqual(None, test.error)
def test_14_receiver_first_fallback_same_interior(self):
test = ReceiverFirstTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
'dest.14', True)
test.run()
self.assertEqual(None, test.error)
def test_15_receiver_first_primary_same_edge(self):
test = ReceiverFirstTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
'dest.15', False)
test.run()
self.assertEqual(None, test.error)
def test_16_receiver_first_fallback_same_edge(self):
test = ReceiverFirstTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
'dest.16', True)
test.run()
self.assertEqual(None, test.error)
def test_17_receiver_first_primary_interior_interior(self):
test = ReceiverFirstTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
'dest.17', False)
test.run()
self.assertEqual(None, test.error)
def test_18_receiver_first_fallback_interior_interior(self):
test = ReceiverFirstTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
'dest.18', True)
test.run()
self.assertEqual(None, test.error)
def test_19_receiver_first_primary_edge_interior(self):
test = ReceiverFirstTest(self.routers[2].addresses[0],
self.routers[1].addresses[0],
'dest.19', False)
test.run()
self.assertEqual(None, test.error)
def test_20_receiver_first_fallback_edge_interior(self):
test = ReceiverFirstTest(self.routers[2].addresses[0],
self.routers[1].addresses[0],
'dest.20', True)
test.run()
self.assertEqual(None, test.error)
def test_21_receiver_first_primary_interior_edge(self):
test = ReceiverFirstTest(self.routers[1].addresses[0],
self.routers[2].addresses[0],
'dest.21', False)
test.run()
self.assertEqual(None, test.error)
def test_22_receiver_first_fallback_interior_edge(self):
test = ReceiverFirstTest(self.routers[1].addresses[0],
self.routers[2].addresses[0],
'dest.22', True)
test.run()
self.assertEqual(None, test.error)
def test_23_receiver_first_primary_edge_edge(self):
test = ReceiverFirstTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
'dest.23', False)
test.run()
self.assertEqual(None, test.error)
def test_24_receiver_first_fallback_edge_edge(self):
test = ReceiverFirstTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
'dest.24', True)
test.run()
self.assertEqual(None, test.error)
def test_25_switchover_same_edge(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
'dest.25')
test.run()
self.assertEqual(None, test.error)
def test_26_switchover_same_interior(self):
test = SwitchoverTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
self.routers[0].addresses[0],
'dest.26')
test.run()
self.assertEqual(None, test.error)
def test_27_switchover_local_edge_alt_remote_interior(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
'dest.27')
test.run()
self.assertEqual(None, test.error)
def test_28_switchover_local_edge_alt_remote_edge(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
self.routers[2].addresses[0],
'dest.28')
test.run()
self.assertEqual(None, test.error)
def test_29_switchover_local_edge_pri_remote_interior(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
'dest.29')
test.run()
self.assertEqual(None, test.error)
def test_30_switchover_local_interior_pri_remote_edge(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[4].addresses[0],
'dest.30')
test.run()
self.assertEqual(None, test.error)
def test_31_switchover_local_interior_alt_remote_interior(self):
test = SwitchoverTest(self.routers[1].addresses[0],
self.routers[0].addresses[0],
self.routers[1].addresses[0],
'dest.31')
test.run()
self.assertEqual(None, test.error)
def test_32_switchover_local_interior_alt_remote_edge(self):
test = SwitchoverTest(self.routers[1].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
'dest.32')
test.run()
self.assertEqual(None, test.error)
def test_33_switchover_local_interior_pri_remote_interior(self):
test = SwitchoverTest(self.routers[1].addresses[0],
self.routers[1].addresses[0],
self.routers[0].addresses[0],
'dest.33')
test.run()
self.assertEqual(None, test.error)
def test_34_switchover_local_interior_pri_remote_edge(self):
test = SwitchoverTest(self.routers[1].addresses[0],
self.routers[1].addresses[0],
self.routers[4].addresses[0],
'dest.34')
test.run()
self.assertEqual(None, test.error)
def test_35_switchover_mix_1(self):
test = SwitchoverTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
self.routers[2].addresses[0],
'dest.35')
test.run()
self.assertEqual(None, test.error)
def test_36_switchover_mix_2(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[1].addresses[0],
self.routers[0].addresses[0],
'dest.36')
test.run()
self.assertEqual(None, test.error)
def test_37_switchover_mix_3(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[1].addresses[0],
self.routers[4].addresses[0],
'dest.37')
test.run()
self.assertEqual(None, test.error)
def test_38_switchover_mix_4(self):
test = SwitchoverTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
'dest.38')
test.run()
self.assertEqual(None, test.error)
def test_39_auto_link_sender_first_fallback_same_interior(self):
test = SenderFirstAutoLinkTest(self.routers[0].addresses[0],
self.routers[0].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_40_auto_link_sender_first_fallback_same_edge(self):
test = SenderFirstAutoLinkTest(self.routers[2].addresses[0],
self.routers[2].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_41_auto_link_sender_first_fallback_interior_interior(self):
test = SenderFirstAutoLinkTest(self.routers[0].addresses[0],
self.routers[1].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_42_auto_link_sender_first_fallback_edge_interior(self):
test = SenderFirstAutoLinkTest(self.routers[2].addresses[0],
self.routers[0].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_43_auto_link_sender_first_fallback_interior_edge(self):
test = SenderFirstAutoLinkTest(self.routers[1].addresses[0],
self.routers[2].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_44_auto_link_sender_first_fallback_edge_edge(self):
test = SenderFirstAutoLinkTest(self.routers[2].addresses[0],
self.routers[4].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_45_auto_link_receiver_first_fallback_same_interior(self):
test = ReceiverFirstAutoLinkTest(self.routers[0].addresses[0],
self.routers[0].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_46_auto_link_receiver_first_fallback_same_edge(self):
test = ReceiverFirstAutoLinkTest(self.routers[2].addresses[0],
self.routers[2].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_47_auto_link_receiver_first_fallback_interior_interior(self):
test = ReceiverFirstAutoLinkTest(self.routers[0].addresses[0],
self.routers[1].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_48_auto_link_receiver_first_fallback_edge_interior(self):
test = ReceiverFirstAutoLinkTest(self.routers[2].addresses[0],
self.routers[1].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_49_auto_link_receiver_first_fallback_interior_edge(self):
test = ReceiverFirstAutoLinkTest(self.routers[1].addresses[0],
self.routers[2].addresses[1])
test.run()
self.assertEqual(None, test.error)
def test_50_auto_link_receiver_first_fallback_edge_edge(self):
test = ReceiverFirstAutoLinkTest(self.routers[2].addresses[0],
self.routers[4].addresses[1])
test.run()
self.assertEqual(None, test.error)
class Timeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.timeout()
class SenderFirstTest(MessagingHandler):
def __init__(self, sender_host, receiver_host, addr, rx_fallback):
super(SenderFirstTest, self).__init__()
self.sender_host = sender_host
self.receiver_host = receiver_host
self.addr = addr
self.rx_fallback = rx_fallback
self.count = 300
self.sender_conn = None
self.receiver_conn = None
self.error = None
self.n_tx = 0
self.n_rx = 0
self.n_rel = 0
def timeout(self):
self.error = "Timeout Expired - n_tx=%d, n_rx=%d, n_rel=%d" % (self.n_tx, self.n_rx, self.n_rel)
self.sender_conn.close()
self.receiver_conn.close()
def fail(self, error):
self.error = error
self.sender_conn.close()
self.receiver_conn.close()
self.timer.cancel()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.sender_conn = event.container.connect(self.sender_host)
self.receiver_conn = event.container.connect(self.receiver_host)
self.sender = event.container.create_sender(self.sender_conn, self.addr)
def on_link_opened(self, event):
if event.sender == self.sender:
self.receiver = event.container.create_receiver(self.receiver_conn, self.addr)
if self.rx_fallback:
self.receiver.source.capabilities.put_symbol("qd.fallback")
def on_sendable(self, event):
if event.sender == self.sender:
while self.sender.credit > 0 and self.n_tx < self.count:
self.sender.send(Message("Message %d" % self.n_tx))
self.n_tx += 1
def on_message(self, event):
if event.receiver == self.receiver:
self.n_rx += 1
if self.n_rx == self.count:
self.fail(None)
def on_released(self, event):
self.n_rel += 1
def run(self):
Container(self).run()
class ReceiverFirstTest(MessagingHandler):
def __init__(self, sender_host, receiver_host, addr, rx_fallback):
super(ReceiverFirstTest, self).__init__()
self.sender_host = sender_host
self.receiver_host = receiver_host
self.addr = addr
self.rx_fallback = rx_fallback
self.count = 300
self.sender_conn = None
self.receiver_conn = None
self.error = None
self.n_tx = 0
self.n_rx = 0
self.n_rel = 0
def timeout(self):
self.error = "Timeout Expired - n_tx=%d, n_rx=%d, n_rel=%d" % (self.n_tx, self.n_rx, self.n_rel)
self.sender_conn.close()
self.receiver_conn.close()
def fail(self, error):
self.error = error
self.sender_conn.close()
self.receiver_conn.close()
self.timer.cancel()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.sender_conn = event.container.connect(self.sender_host)
self.receiver_conn = event.container.connect(self.receiver_host)
self.receiver = event.container.create_receiver(self.receiver_conn, self.addr)
if self.rx_fallback:
self.receiver.source.capabilities.put_symbol("qd.fallback")
def on_link_opened(self, event):
if event.receiver == self.receiver:
self.sender = event.container.create_sender(self.sender_conn, self.addr)
def on_sendable(self, event):
if event.sender == self.sender:
while self.sender.credit > 0 and self.n_tx < self.count:
self.sender.send(Message("Message %d" % self.n_tx))
self.n_tx += 1
def on_message(self, event):
if event.receiver == self.receiver:
self.n_rx += 1
if self.n_rx == self.count:
self.fail(None)
def on_released(self, event):
self.n_rel += 1
def run(self):
Container(self).run()
class SwitchoverTest(MessagingHandler):
def __init__(self, sender_host, primary_host, fallback_host, addr):
super(SwitchoverTest, self).__init__()
self.sender_host = sender_host
self.primary_host = primary_host
self.fallback_host = fallback_host
self.addr = addr
self.count = 300
self.sender_conn = None
self.primary_conn = None
self.fallback_conn = None
self.error = None
self.n_tx = 0
self.n_rx = 0
self.n_rel = 0
self.phase = 0
def timeout(self):
self.error = "Timeout Expired - n_tx=%d, n_rx=%d, n_rel=%d, phase=%d" % (self.n_tx, self.n_rx, self.n_rel, self.phase)
self.sender_conn.close()
self.primary_conn.close()
self.fallback_conn.close()
def fail(self, error):
self.error = error
self.sender_conn.close()
self.primary_conn.close()
self.fallback_conn.close()
self.timer.cancel()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.sender_conn = event.container.connect(self.sender_host)
self.primary_conn = event.container.connect(self.primary_host)
self.fallback_conn = event.container.connect(self.fallback_host)
self.primary_receiver = event.container.create_receiver(self.primary_conn, self.addr)
self.fallback_receiver = event.container.create_receiver(self.primary_conn, self.addr, name=self.addr)
self.fallback_receiver.source.capabilities.put_object(symbol("qd.fallback"))
def on_link_opened(self, event):
if event.receiver == self.primary_receiver:
self.sender = event.container.create_sender(self.sender_conn, self.addr)
def on_link_closed(self, event):
if event.receiver == self.primary_receiver:
self.n_rx = 0
self.n_tx = 0
self.send()
def send(self):
while self.sender.credit > 0 and self.n_tx < self.count:
self.sender.send(Message("Message %d" % self.n_tx))
self.n_tx += 1
def on_sendable(self, event):
if event.sender == self.sender:
self.send()
def on_message(self, event):
self.n_rx += 1
if self.n_rx == self.count:
if self.phase == 0:
self.phase = 1
self.primary_receiver.close()
else:
self.fail(None)
def on_released(self, event):
self.n_rel += 1
self.n_tx -= 1
def run(self):
Container(self).run()
class SenderFirstAutoLinkTest(MessagingHandler):
def __init__(self, sender_host, receiver_host):
super(SenderFirstAutoLinkTest, self).__init__()
self.sender_host = sender_host
self.receiver_host = receiver_host
self.addr = "dest.al"
self.count = 300
self.sender_conn = None
self.receiver_conn = None
self.error = None
self.n_tx = 0
self.n_rx = 0
self.n_rel = 0
def timeout(self):
self.error = "Timeout Expired - n_tx=%d, n_rx=%d, n_rel=%d" % (self.n_tx, self.n_rx, self.n_rel)
self.sender_conn.close()
self.receiver_conn.close()
def fail(self, error):
self.error = error
self.sender_conn.close()
self.receiver_conn.close()
self.timer.cancel()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.sender_conn = event.container.connect(self.sender_host)
self.sender = event.container.create_sender(self.sender_conn, self.addr)
def on_link_opening(self, event):
if event.sender:
self.alt_sender = event.sender
event.sender.source.address = self.addr
event.sender.open()
elif event.receiver:
self.alt_receiver = event.receiver
event.receiver.target.address = self.addr
event.receiver.open()
def on_link_opened(self, event):
if event.sender == self.sender:
self.receiver_conn = event.container.connect(self.receiver_host)
def on_sendable(self, event):
if event.sender == self.sender:
while self.sender.credit > 0 and self.n_tx < self.count:
self.sender.send(Message("Message %d" % self.n_tx))
self.n_tx += 1
def on_message(self, event):
self.n_rx += 1
if self.n_rx == self.count:
self.fail(None)
def on_released(self, event):
self.n_rel += 1
self.n_tx -= 1
def run(self):
Container(self).run()
class ReceiverFirstAutoLinkTest(MessagingHandler):
def __init__(self, sender_host, receiver_host):
super(ReceiverFirstAutoLinkTest, self).__init__()
self.sender_host = sender_host
self.receiver_host = receiver_host
self.addr = "dest.al"
self.count = 300
self.sender_conn = None
self.receiver_conn = None
self.alt_receiver = None
self.error = None
self.n_tx = 0
self.n_rx = 0
self.n_rel = 0
def timeout(self):
self.error = "Timeout Expired - n_tx=%d, n_rx=%d, n_rel=%d" % (self.n_tx, self.n_rx, self.n_rel)
self.sender_conn.close()
self.receiver_conn.close()
def fail(self, error):
self.error = error
self.sender_conn.close()
self.receiver_conn.close()
self.timer.cancel()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.receiver_conn = event.container.connect(self.receiver_host)
def on_link_opening(self, event):
if event.sender:
self.alt_sender = event.sender
event.sender.source.address = self.addr
event.sender.open()
elif event.receiver:
self.alt_receiver = event.receiver
event.receiver.target.address = self.addr
event.receiver.open()
def on_link_opened(self, event):
if event.receiver == self.alt_receiver:
self.sender_conn = event.container.connect(self.sender_host)
self.sender = event.container.create_sender(self.sender_conn, self.addr)
def on_sendable(self, event):
if event.sender == self.sender:
while self.sender.credit > 0 and self.n_tx < self.count:
self.sender.send(Message("Message %d" % self.n_tx))
self.n_tx += 1
def on_message(self, event):
self.n_rx += 1
if self.n_rx == self.count:
self.fail(None)
def on_released(self, event):
self.n_rel += 1
self.n_tx -= 1
def run(self):
Container(self).run()
if __name__== '__main__':
unittest.main(main_module())
| 38.70844
| 127
| 0.575685
| 3,490
| 30,270
| 4.795989
| 0.083095
| 0.074919
| 0.053531
| 0.080296
| 0.834568
| 0.819632
| 0.792986
| 0.7569
| 0.742263
| 0.694169
| 0
| 0.023677
| 0.313512
| 30,270
| 781
| 128
| 38.758003
| 0.781809
| 0.025372
| 0
| 0.715421
| 0
| 0
| 0.037586
| 0
| 0
| 0
| 0
| 0
| 0.079491
| 1
| 0.166932
| false
| 0
| 0.031797
| 0
| 0.213037
| 0.00159
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2934c370560b41240f36b42eeda6d3868de2647e
| 69
|
py
|
Python
|
booleans.py
|
MafikengZ/wethinkcode_bootcamp020
|
523b6491165b90120d8ca0ab4e2b074da8a85512
|
[
"MIT"
] | null | null | null |
booleans.py
|
MafikengZ/wethinkcode_bootcamp020
|
523b6491165b90120d8ca0ab4e2b074da8a85512
|
[
"MIT"
] | null | null | null |
booleans.py
|
MafikengZ/wethinkcode_bootcamp020
|
523b6491165b90120d8ca0ab4e2b074da8a85512
|
[
"MIT"
] | null | null | null |
print(42 == 42)
print(3 != 3)
print(3 >= 4)
print(0 < 6)
print(6 < 0)
| 13.8
| 15
| 0.536232
| 15
| 69
| 2.466667
| 0.4
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218182
| 0.202899
| 69
| 5
| 16
| 13.8
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
2940e9b6227ab34823bcb8b61b8df5947b657889
| 6,257
|
py
|
Python
|
car_racing/network.py
|
shatayu/copg
|
830b654c8db7ec74f84ee8584e2215200cf8a68d
|
[
"MIT"
] | 16
|
2020-06-22T17:13:36.000Z
|
2022-01-29T10:46:20.000Z
|
car_racing/network.py
|
shatayu/copg
|
830b654c8db7ec74f84ee8584e2215200cf8a68d
|
[
"MIT"
] | 2
|
2020-08-19T11:31:35.000Z
|
2020-11-12T16:11:15.000Z
|
car_racing/network.py
|
shatayu/copg
|
830b654c8db7ec74f84ee8584e2215200cf8a68d
|
[
"MIT"
] | 7
|
2020-06-24T21:54:17.000Z
|
2021-08-30T03:58:32.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.distributions import Normal
def init_weights(m):
if isinstance(m, nn.Linear):
nn.init.normal_(m.weight, mean=0., std=0.1)
nn.init.constant_(m.bias, 0.1)
class ActorVar(nn.Module):
def __init__(self, state_dim, action_dim, std=1.0):
super(ActorVar, self).__init__()
self.actor = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh())
self.actor_tail = nn.Sequential(nn.Linear(128, action_dim),
nn.Tanh()) # 20*2
self.var_tail = nn.Sequential(nn.Linear(128, action_dim))
# self.log_std = nn.Parameter(torch.ones(action_dim) * std)
self.apply(init_weights)
def forward(self, state):
x = self.actor(state)
mu = self.actor_tail(x)
log_std = self.var_tail(x)
log_std_clamped = torch.clamp(log_std, min=-20, max=0.1)
std = log_std_clamped.exp().expand_as(mu)
# std = self.log_std.exp().expand_as(mu)
dist = Normal(mu,std)
return dist
class Actor(nn.Module):
def __init__(self, state_dim, action_dim, std=1.0):
super(Actor, self).__init__()
self.actor = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.ReLU(),
nn.Linear(128, 128), # 50*20
nn.ReLU(),
nn.Linear(128, action_dim),
nn.Tanh()) # 20*2
self.log_std = nn.Parameter(torch.ones(action_dim) * std)
self.apply(init_weights)
def forward(self, state):
mu = self.actor(state)
log_std_clamped = self.log_std#torch.clamp(self.log_std, min=-20, max=0.1)
std = log_std_clamped.exp().expand_as(mu)
dist = Normal(mu,std)
return dist
class Critic(nn.Module):
def __init__(self, state_dim):
super(Critic, self).__init__()
self.critic = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.ReLU(),
nn.Linear(128, 128), # 50*20
nn.ReLU(),
nn.Linear(128, 1)) # 20*2
self.apply(init_weights)
def forward(self, state):
value = self.critic(state)
return value
class Actor_tan(nn.Module):
def __init__(self, state_dim, action_dim, std=1.0):
super(Actor_tan, self).__init__()
self.actor = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh(),
nn.Linear(128, action_dim),
nn.Tanh()) # 20*2
self.log_std = nn.Parameter(torch.ones(action_dim) * std)
self.apply(init_weights)
def forward(self, state):
mu = self.actor(state)
log_std_clamped = self.log_std#torch.clamp(self.log_std, min=-20, max=0.1)
std = log_std_clamped.exp().expand_as(mu)
dist = Normal(mu,std)
return dist
class Critic_tan(nn.Module):
def __init__(self, state_dim):
super(Critic_tan, self).__init__()
self.critic = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh(),
nn.Linear(128, 1)) # 20*2
self.apply(init_weights)
def forward(self, state):
value = self.critic(state)
return value
class ActorCritic(nn.Module):
def __init__(self, state_dim, action_dim, std=1.0):
super(ActorCritic, self).__init__()
self.actor = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh(),
nn.Linear(128, action_dim),
nn.Tanh()) # 20*2
self.critic = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh(),
nn.Linear(128, 1)) # 20*2
self.log_std = nn.Parameter(torch.ones(action_dim) * std)
self.apply(init_weights)
def forward(self, state):
mu = self.actor(state)
value = self.critic(state)
std = self.log_std.exp().expand_as(mu)
dist = Normal(mu,std)
return dist, value
class ActorCriticVar(nn.Module):
def __init__(self, state_dim, action_dim, std=1.0):
super(ActorCriticVar, self).__init__()
self.actor = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh())
self.actor_tail = nn.Sequential(nn.Linear(128, action_dim),
nn.Tanh()) # 20*2
self.var_tail = nn.Sequential(nn.Linear(128, action_dim))
self.critic = nn.Sequential(nn.Linear(state_dim, 128), # 84*50
nn.Tanh(),
nn.Linear(128, 128), # 50*20
nn.Tanh(),
nn.Linear(128, 1)) # 20*2
self.apply(init_weights)
def forward(self, state):
value = self.critic(state)
x = self.actor(state)
mu = self.actor_tail(x)
log_std = self.var_tail(x)
log_std_clamped = torch.clamp(log_std, min=-20, max=0.1)
std = log_std_clamped.exp().expand_as(mu)
# std = self.log_std.exp().expand_as(mu)
dist = Normal(mu,std)
return dist, value
| 35.95977
| 82
| 0.486175
| 754
| 6,257
| 3.838196
| 0.082228
| 0.08293
| 0.076019
| 0.089841
| 0.908086
| 0.901175
| 0.901175
| 0.901175
| 0.899102
| 0.87284
| 0
| 0.065384
| 0.393799
| 6,257
| 174
| 83
| 35.95977
| 0.697601
| 0.059773
| 0
| 0.825758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113636
| false
| 0
| 0.030303
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46584e45dbd6f9126857d53dfb2607fb7dbada5e
| 167
|
py
|
Python
|
code_icc/archs/__init__.py
|
ThmCuong/IIC-Python3
|
5a02b40ffa07b159fa7e89cf5b4ed781f4798ff1
|
[
"MIT"
] | null | null | null |
code_icc/archs/__init__.py
|
ThmCuong/IIC-Python3
|
5a02b40ffa07b159fa7e89cf5b4ed781f4798ff1
|
[
"MIT"
] | null | null | null |
code_icc/archs/__init__.py
|
ThmCuong/IIC-Python3
|
5a02b40ffa07b159fa7e89cf5b4ed781f4798ff1
|
[
"MIT"
] | null | null | null |
# from code_icc.archs.cluster import *
# from code_icc.archs.segmentation import *
# from code_icc.archs.semisup import *
from . import cluster, segmentation, semisup
| 33.4
| 44
| 0.784431
| 23
| 167
| 5.565217
| 0.347826
| 0.1875
| 0.257813
| 0.375
| 0.34375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125749
| 167
| 4
| 45
| 41.75
| 0.876712
| 0.688623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
466dc7de3e009cba0b24e356915fdf7df38eeaf0
| 2,612
|
py
|
Python
|
segmenters/audio/AudioSlicer/tests/test_audioslicer.py
|
sidphbot/jina-hub
|
ab195030b72353c9b803874e2c99829fb75e1b17
|
[
"Apache-2.0"
] | 106
|
2020-04-28T10:24:08.000Z
|
2022-03-15T02:30:27.000Z
|
segmenters/audio/AudioSlicer/tests/test_audioslicer.py
|
sidphbot/jina-hub
|
ab195030b72353c9b803874e2c99829fb75e1b17
|
[
"Apache-2.0"
] | 6,808
|
2020-05-01T04:13:43.000Z
|
2021-06-23T08:04:02.000Z
|
segmenters/audio/AudioSlicer/tests/test_audioslicer.py
|
sidphbot/jina-hub
|
ab195030b72353c9b803874e2c99829fb75e1b17
|
[
"Apache-2.0"
] | 86
|
2020-04-29T09:50:29.000Z
|
2022-01-25T05:42:44.000Z
|
import numpy as np
from .. import AudioSlicer
def test_slice_mono():
n_frames = 100
frame_length = 2048
signal_orig = np.random.randn(frame_length * n_frames)
segmenter = AudioSlicer(frame_length, frame_length)
segmented_chunks_per_doc = segmenter.segment(np.stack([signal_orig, signal_orig]))
assert len(segmented_chunks_per_doc) == 2
for segmented_chunk in segmented_chunks_per_doc:
assert len(segmented_chunk) == n_frames
def test_slice_stereo():
n_frames = 100
frame_length = 2048
signal_orig = np.random.randn(2, frame_length * n_frames)
segmenter = AudioSlicer(frame_length, frame_length)
segmented_chunks_per_doc = segmenter.segment(np.stack([signal_orig, signal_orig]))
assert len(segmented_chunks_per_doc) == 2
for segmented_chunk in segmented_chunks_per_doc:
assert len(segmented_chunk) == n_frames * 2
def test_location_mono():
frame_length = 10
hop_length = 5
n_frames = 5
num_docs = 3
num_channels = 1
signal_orig = np.random.randn(frame_length * n_frames)
expected_n_frames = (signal_orig.shape[0] - frame_length) / hop_length
expected_locations = [[i * frame_length, i * frame_length + frame_length] for i in range(int(expected_n_frames))]
expected_channel = 'mono'
segmenter = AudioSlicer(frame_length=frame_length, hop_length=frame_length)
docs = segmenter.segment(np.stack([signal_orig] * num_docs))
assert len(docs) == num_docs
for d in docs:
assert len(d) == n_frames * num_channels
for i, chunk in enumerate(d):
assert chunk['location'] == expected_locations[i % n_frames]
assert chunk['tags']['channel'] == expected_channel
def test_location_stereo():
frame_length = 10
hop_length = 5
n_frames = 5
num_docs = 3
num_channels = 2
signal_orig = np.random.randn(num_channels, frame_length * n_frames)
expected_n_frames = (signal_orig.shape[1] - frame_length) / hop_length
expected_locations = [[i * frame_length, i * frame_length + frame_length] for i in range(int(expected_n_frames))]
segmenter = AudioSlicer(frame_length=frame_length, hop_length=frame_length)
docs = segmenter.segment(np.stack([signal_orig] * num_docs))
assert len(docs) == num_docs
for d in docs:
assert len(d) == n_frames * num_channels
for i, chunk in enumerate(d):
assert chunk['location'] == expected_locations[i % n_frames]
expected_channel = 'left' if i // n_frames == 0 else 'right'
assert chunk['tags']['channel'] == expected_channel
| 34.826667
| 117
| 0.697933
| 364
| 2,612
| 4.68956
| 0.159341
| 0.167545
| 0.079672
| 0.077329
| 0.891623
| 0.878149
| 0.834798
| 0.834798
| 0.834798
| 0.821324
| 0
| 0.015896
| 0.205207
| 2,612
| 74
| 118
| 35.297297
| 0.806358
| 0
| 0
| 0.714286
| 0
| 0
| 0.019525
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.071429
| false
| 0
| 0.035714
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4674a7628a51d24603356972bab35aa444bde7eb
| 8,271
|
py
|
Python
|
torpido/wavelet/wavelets/dmey.py
|
AP-Atul/Torpido
|
a646b4d6de7f2e2c96de4c64ce3113f53e3931c2
|
[
"Unlicense"
] | 21
|
2020-12-23T07:13:10.000Z
|
2022-01-12T10:32:22.000Z
|
wavelet/wavelets/dmey.py
|
AP-Atul/wavelets-ext
|
00ced22462c369584ebd32f9b5f357f092de0142
|
[
"MIT"
] | 2
|
2020-12-30T10:45:42.000Z
|
2021-09-25T09:52:00.000Z
|
wavelet/wavelets/dmey.py
|
AP-Atul/wavelets-ext
|
00ced22462c369584ebd32f9b5f357f092de0142
|
[
"MIT"
] | 1
|
2021-02-06T21:39:41.000Z
|
2021-02-06T21:39:41.000Z
|
""" Discrete Meyer (FIR Approximation) wavelet """
class Meyer:
"""
Properties
----------
near symmetric, orthogonal, biorthogonal
All values are from http://wavelets.pybytes.com/wavelet/dmey/
"""
__name__ = "Meyer Wavelet"
__motherWaveletLength__ = 62 # length of the mother wavelet
__transformWaveletLength__ = 2 # minimum wavelength of input signal
# decomposition filter
# low-pass
decompositionLowFilter = [
0.0,
-1.009999956941423e-12,
8.519459636796214e-09,
-1.111944952595278e-08,
-1.0798819539621958e-08,
6.066975741351135e-08,
-1.0866516536735883e-07,
8.200680650386481e-08,
1.1783004497663934e-07,
-5.506340565252278e-07,
1.1307947017916706e-06,
-1.489549216497156e-06,
7.367572885903746e-07,
3.20544191334478e-06,
-1.6312699734552807e-05,
6.554305930575149e-05,
-0.0006011502343516092,
-0.002704672124643725,
0.002202534100911002,
0.006045814097323304,
-0.006387718318497156,
-0.011061496392513451,
0.015270015130934803,
0.017423434103729693,
-0.03213079399021176,
-0.024348745906078023,
0.0637390243228016,
0.030655091960824263,
-0.13284520043622938,
-0.035087555656258346,
0.44459300275757724,
0.7445855923188063,
0.44459300275757724,
-0.035087555656258346,
-0.13284520043622938,
0.030655091960824263,
0.0637390243228016,
-0.024348745906078023,
-0.03213079399021176,
0.017423434103729693,
0.015270015130934803,
-0.011061496392513451,
-0.006387718318497156,
0.006045814097323304,
0.002202534100911002,
-0.002704672124643725,
-0.0006011502343516092,
6.554305930575149e-05,
-1.6312699734552807e-05,
3.20544191334478e-06,
7.367572885903746e-07,
-1.489549216497156e-06,
1.1307947017916706e-06,
-5.506340565252278e-07,
1.1783004497663934e-07,
8.200680650386481e-08,
-1.0866516536735883e-07,
6.066975741351135e-08,
-1.0798819539621958e-08,
-1.111944952595278e-08,
8.519459636796214e-09,
-1.009999956941423e-12,
]
# high-pass
decompositionHighFilter = [
1.009999956941423e-12,
8.519459636796214e-09,
1.111944952595278e-08,
-1.0798819539621958e-08,
-6.066975741351135e-08,
-1.0866516536735883e-07,
-8.200680650386481e-08,
1.1783004497663934e-07,
5.506340565252278e-07,
1.1307947017916706e-06,
1.489549216497156e-06,
7.367572885903746e-07,
-3.20544191334478e-06,
-1.6312699734552807e-05,
-6.554305930575149e-05,
-0.0006011502343516092,
0.002704672124643725,
0.002202534100911002,
-0.006045814097323304,
-0.006387718318497156,
0.011061496392513451,
0.015270015130934803,
-0.017423434103729693,
-0.03213079399021176,
0.024348745906078023,
0.0637390243228016,
-0.030655091960824263,
-0.13284520043622938,
0.035087555656258346,
0.44459300275757724,
-0.7445855923188063,
0.44459300275757724,
0.035087555656258346,
-0.13284520043622938,
-0.030655091960824263,
0.0637390243228016,
0.024348745906078023,
-0.03213079399021176,
-0.017423434103729693,
0.015270015130934803,
0.011061496392513451,
-0.006387718318497156,
-0.006045814097323304,
0.002202534100911002,
0.002704672124643725,
-0.0006011502343516092,
-6.554305930575149e-05,
-1.6312699734552807e-05,
-3.20544191334478e-06,
7.367572885903746e-07,
1.489549216497156e-06,
1.1307947017916706e-06,
5.506340565252278e-07,
1.1783004497663934e-07,
-8.200680650386481e-08,
-1.0866516536735883e-07,
-6.066975741351135e-08,
-1.0798819539621958e-08,
1.111944952595278e-08,
8.519459636796214e-09,
1.009999956941423e-12,
0.0,
]
# reconstruction filters
# low pass
reconstructionLowFilter = [
-1.009999956941423e-12,
8.519459636796214e-09,
-1.111944952595278e-08,
-1.0798819539621958e-08,
6.066975741351135e-08,
-1.0866516536735883e-07,
8.200680650386481e-08,
1.1783004497663934e-07,
-5.506340565252278e-07,
1.1307947017916706e-06,
-1.489549216497156e-06,
7.367572885903746e-07,
3.20544191334478e-06,
-1.6312699734552807e-05,
6.554305930575149e-05,
-0.0006011502343516092,
-0.002704672124643725,
0.002202534100911002,
0.006045814097323304,
-0.006387718318497156,
-0.011061496392513451,
0.015270015130934803,
0.017423434103729693,
-0.03213079399021176,
-0.024348745906078023,
0.0637390243228016,
0.030655091960824263,
-0.13284520043622938,
-0.035087555656258346,
0.44459300275757724,
0.7445855923188063,
0.44459300275757724,
-0.035087555656258346,
-0.13284520043622938,
0.030655091960824263,
0.0637390243228016,
-0.024348745906078023,
-0.03213079399021176,
0.017423434103729693,
0.015270015130934803,
-0.011061496392513451,
-0.006387718318497156,
0.006045814097323304,
0.002202534100911002,
-0.002704672124643725,
-0.0006011502343516092,
6.554305930575149e-05,
-1.6312699734552807e-05,
3.20544191334478e-06,
7.367572885903746e-07,
-1.489549216497156e-06,
1.1307947017916706e-06,
-5.506340565252278e-07,
1.1783004497663934e-07,
8.200680650386481e-08,
-1.0866516536735883e-07,
6.066975741351135e-08,
-1.0798819539621958e-08,
-1.111944952595278e-08,
8.519459636796214e-09,
-1.009999956941423e-12,
0.0,
]
# high-pass
reconstructionHighFilter = [
0.0,
1.009999956941423e-12,
8.519459636796214e-09,
1.111944952595278e-08,
-1.0798819539621958e-08,
-6.066975741351135e-08,
-1.0866516536735883e-07,
-8.200680650386481e-08,
1.1783004497663934e-07,
5.506340565252278e-07,
1.1307947017916706e-06,
1.489549216497156e-06,
7.367572885903746e-07,
-3.20544191334478e-06,
-1.6312699734552807e-05,
-6.554305930575149e-05,
-0.0006011502343516092,
0.002704672124643725,
0.002202534100911002,
-0.006045814097323304,
-0.006387718318497156,
0.011061496392513451,
0.015270015130934803,
-0.017423434103729693,
-0.03213079399021176,
0.024348745906078023,
0.0637390243228016,
-0.030655091960824263,
-0.13284520043622938,
0.035087555656258346,
0.44459300275757724,
-0.7445855923188063,
0.44459300275757724,
0.035087555656258346,
-0.13284520043622938,
-0.030655091960824263,
0.0637390243228016,
0.024348745906078023,
-0.03213079399021176,
-0.017423434103729693,
0.015270015130934803,
0.011061496392513451,
-0.006387718318497156,
-0.006045814097323304,
0.002202534100911002,
0.002704672124643725,
-0.0006011502343516092,
-6.554305930575149e-05,
-1.6312699734552807e-05,
-3.20544191334478e-06,
7.367572885903746e-07,
1.489549216497156e-06,
1.1307947017916706e-06,
5.506340565252278e-07,
1.1783004497663934e-07,
-8.200680650386481e-08,
-1.0866516536735883e-07,
-6.066975741351135e-08,
-1.0798819539621958e-08,
1.111944952595278e-08,
8.519459636796214e-09,
1.009999956941423e-12,
]
| 29.434164
| 72
| 0.61226
| 671
| 8,271
| 7.529061
| 0.131148
| 0.014252
| 0.030087
| 0.031671
| 0.917656
| 0.917656
| 0.917656
| 0.917656
| 0.917656
| 0.917656
| 0
| 0.771029
| 0.29138
| 8,271
| 280
| 73
| 29.539286
| 0.09094
| 0.038448
| 0
| 0.953846
| 0
| 0
| 0.001643
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.030769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d3b9182e3ae81e3318899907e0087a5c6d297bfd
| 71
|
py
|
Python
|
poputils/boto/ec2.py
|
GaretJax/pop-utils
|
2cdfaf24c2f8678edfab1f430c07611d488247d5
|
[
"MIT"
] | null | null | null |
poputils/boto/ec2.py
|
GaretJax/pop-utils
|
2cdfaf24c2f8678edfab1f430c07611d488247d5
|
[
"MIT"
] | 1
|
2021-03-22T17:12:51.000Z
|
2021-03-22T17:12:51.000Z
|
poputils/boto/ec2.py
|
GaretJax/pop-utils
|
2cdfaf24c2f8678edfab1f430c07611d488247d5
|
[
"MIT"
] | null | null | null |
import boto.ec2
class EC2Connection(boto.ec2.EC2Connection):
pass
| 14.2
| 44
| 0.774648
| 9
| 71
| 6.111111
| 0.666667
| 0.254545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 0.140845
| 71
| 5
| 45
| 14.2
| 0.836066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
d3d2670fc23a6fee1dad79d47791611c9fe707c5
| 25,860
|
py
|
Python
|
tests/test_benchmark.py
|
ajayarora1235/Orion
|
69e258ebcb2c19e63054453b3cb2cd74043ef433
|
[
"MIT"
] | 1
|
2021-06-05T07:46:57.000Z
|
2021-06-05T07:46:57.000Z
|
tests/test_benchmark.py
|
ajayarora1235/Orion
|
69e258ebcb2c19e63054453b3cb2cd74043ef433
|
[
"MIT"
] | 1
|
2020-12-13T12:45:57.000Z
|
2020-12-13T12:45:57.000Z
|
tests/test_benchmark.py
|
ajayarora1235/Orion
|
69e258ebcb2c19e63054453b3cb2cd74043ef433
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from unittest.mock import ANY, Mock, call, patch
import pandas as pd
from mlblocks import MLPipeline
from orion import benchmark
from orion.evaluation import CONTEXTUAL_METRICS as METRICS
from orion.evaluation import contextual_confusion_matrix
def test__sort_leaderboard_rank():
rank = 'f1'
metrics = METRICS
score = pd.DataFrame({
'pipeline': range(5),
'f1': range(5),
})
expected_return = pd.DataFrame({
'pipeline': range(5)[::-1],
'rank': range(1, 6),
'f1': range(5)[::-1],
})
returned = benchmark._sort_leaderboard(score, rank, metrics)
pd.testing.assert_frame_equal(returned, expected_return)
def test__sort_leaderboard_rank_does_not_exist():
rank = 'does not exist'
metrics = {'f1': METRICS['f1']}
score = pd.DataFrame({
'pipeline': range(5),
'f1': range(5),
})
expected_return = pd.DataFrame({
'pipeline': range(5)[::-1],
'rank': range(1, 6),
'f1': range(5)[::-1],
})
returned = benchmark._sort_leaderboard(score, rank, metrics)
pd.testing.assert_frame_equal(returned, expected_return)
def test__sort_leaderboard_no_rank():
rank = None
metrics = METRICS
score = {k: range(5) for k in metrics.keys()}
score['pipeline'] = range(5)
score = pd.DataFrame(score)
expected_return = score.iloc[::-1].reset_index(drop=True)
expected_return['rank'] = range(1, 6)
returned = benchmark._sort_leaderboard(score, rank, metrics)
assert len(returned.columns) == len(expected_return.columns)
assert sorted(returned.columns) == sorted(expected_return.columns)
pd.testing.assert_frame_equal(returned, expected_return[returned.columns])
def test__detrend_signal_trend():
df = pd.DataFrame({
'timestamp': range(5),
'value': range(5)
})
expected_return = pd.DataFrame({
'timestamp': range(5),
'value': [0.0] * 5,
})
returned = benchmark._detrend_signal(df, 'value')
pd.testing.assert_frame_equal(returned, expected_return)
def test__detrend_signal_no_trend():
df = pd.DataFrame({
'timestamp': range(5),
'value': [0.0] * 5
})
expected_return = df.copy()
returned = benchmark._detrend_signal(df, 'value')
pd.testing.assert_frame_equal(returned, expected_return)
def test__get_parameter_pipeline():
hyperparameters = {
"pipeline1": "pipeline1.json",
"pipeline2": "pipeline2.json",
}
pipeline = "pipeline1"
expected_return = "pipeline1.json"
returned = benchmark._get_parameter(hyperparameters, pipeline)
assert returned == expected_return
def test__get_parameter_dataset():
hyperparameters = {
"dataset1": {
"pipeline1": "pipeline1.json",
"pipeline2": "pipeline2.json",
}
}
dataset = "dataset1"
expected_return = {
"pipeline1": "pipeline1.json",
"pipeline2": "pipeline2.json",
}
returned = benchmark._get_parameter(hyperparameters, dataset)
assert returned == expected_return
def test__get_parameter_does_not_exist():
hyperparameters = None
pipeline = "pipeline1"
expected_return = None
returned = benchmark._get_parameter(hyperparameters, pipeline)
assert returned == expected_return
@patch('orion.benchmark.load_signal')
def test__load_signal_test_split_true(load_signal_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.return_value = (train, test)
test_split = True
returned = benchmark._load_signal('signal-name', test_split)
assert isinstance(returned, tuple)
assert len(returned) == 2
expected_calls = [
call('signal-name-train'),
call('signal-name-test')
]
assert load_signal_mock.call_args_list == expected_calls
@patch('orion.benchmark.load_signal')
def test__load_signal_test_split_false(load_signal_mock):
df = pd.DataFrame({
'timestamp': list(range(10)),
'value': list(range(10, 20))
})
load_signal_mock.return_value = df
test_split = False
returned = benchmark._load_signal('signal-name', test_split)
assert isinstance(returned, tuple)
assert len(returned) == 2
train, test = returned
pd.testing.assert_frame_equal(train, test)
expected_calls = [
call('signal-name'),
]
assert load_signal_mock.call_args_list == expected_calls
@patch('orion.benchmark.load_signal')
def test__load_signal_test_split_float(load_signal_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.return_value = (train, test)
test_split = 0.2
returned = benchmark._load_signal('signal-name', test_split)
assert isinstance(returned, tuple)
assert len(returned) == 2
expected_calls = [
call('signal-name', test_size=test_split),
]
assert load_signal_mock.call_args_list == expected_calls
class TestBenchmark(TestCase):
@classmethod
def setup_class(cls):
cls.pipeline = Mock(autospec=MLPipeline)
cls.name = 'pipeline-name'
cls.dataset = 'dataset-name'
cls.signal = 'signal-name'
cls.hyper = None
cls.distributed = False
cls.rank = 'metric-name'
cls.metrics = {
'metric-name': Mock(autospec=METRICS['f1'], return_value=1)
}
def set_score(self, metric, elapsed, test_split):
return {
'metric-name': metric,
'elapsed': elapsed,
'pipeline': self.name,
'split': test_split,
'dataset': self.dataset,
'signal': self.signal,
'status': 'OK'
}
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
def test__evaluate_signal(
self, load_signal_mock, load_pipeline_mock, analyze_mock, load_anomalies_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
anomalies = Mock(autospec=pd.DataFrame)
analyze_mock.return_value = anomalies
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics, True)
expected_return = self.set_score(1, ANY, ANY)
assert returned == expected_return
expected_calls = [
call('signal-name-train'),
call('signal-name-test')
]
assert load_signal_mock.call_args_list == expected_calls
load_pipeline_mock.assert_called_once_with(self.pipeline, self.hyper)
analyze_mock.assert_called_once_with(self.pipeline, train, test)
load_anomalies_mock.assert_called_once_with(self.signal)
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
def test__evaluate_signal_exception(
self, load_signal_mock, load_pipeline_mock, analyze_mock, load_anomalies_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
analyze_mock.side_effect = Exception("failed analyze.")
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics, True)
expected_return = self.set_score(0, ANY, ANY)
expected_return['status'] = 'ERROR'
assert returned == expected_return
expected_calls = [
call('signal-name-train'),
call('signal-name-test')
]
assert load_signal_mock.call_args_list == expected_calls
load_pipeline_mock.assert_called_once_with(self.pipeline, self.hyper)
analyze_mock.assert_called_once_with(self.pipeline, train, test)
assert load_anomalies_mock.called
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
def test__evaluate_signal_exception_confusion_matrix(
self, load_signal_mock, load_pipeline_mock, analyze_mock, load_anomalies_mock):
anomalies = pd.DataFrame({
'start': [10, 35],
'end': [20, 40]
})
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
load_anomalies_mock.return_value = anomalies
analyze_mock.side_effect = Exception("failed analyze.")
metrics = {'confusion_matrix': Mock(autospec=contextual_confusion_matrix)}
metrics = {**metrics, **self.metrics}
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, metrics, True)
expected_return = self.set_score(0, ANY, ANY)
expected_return['status'] = 'ERROR'
expected_return['confusion_matrix'] = (None, 0, 2, 0)
assert returned == expected_return
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
def test__evaluate_signal_test_split(
self, load_signal_mock, load_pipeline_mock, analyze_mock, load_anomalies_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
test_split = True
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics,
test_split=test_split)
expected_return = self.set_score(1, ANY, test_split)
assert returned == expected_return
expected_calls = [
call('signal-name-train'),
call('signal-name-test')
]
assert load_signal_mock.call_args_list == expected_calls
load_pipeline_mock.assert_called_once_with(self.pipeline, self.hyper)
analyze_mock.assert_called_once_with(self.pipeline, train, test)
load_anomalies_mock.assert_called_once_with(self.signal)
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
def test__evaluate_signal_no_test_split(
self, load_signal_mock, load_pipeline_mock, analyze_mock, load_anomalies_mock):
train = test = Mock(autospec=pd.DataFrame)
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
test_split = False
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics,
test_split=test_split)
expected_return = self.set_score(1, ANY, test_split)
assert returned == expected_return
expected_calls = [
call('signal-name')
]
assert load_signal_mock.call_args_list == expected_calls
load_pipeline_mock.assert_called_once_with(self.pipeline, self.hyper)
analyze_mock.assert_called_once_with(self.pipeline, train, test)
load_anomalies_mock.assert_called_once_with(self.signal)
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
def test__evaluate_signal_no_detrend(
self, load_signal_mock, load_pipeline_mock, analyze_mock, load_anomalies_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
detrend = False
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics,
test_split=True, detrend=detrend)
expected_return = self.set_score(1, ANY, ANY)
assert returned == expected_return
expected_calls = [
call('signal-name-train'),
call('signal-name-test')
]
assert load_signal_mock.call_args_list == expected_calls
load_pipeline_mock.assert_called_once_with(self.pipeline, self.hyper)
analyze_mock.assert_called_once_with(self.pipeline, train, test)
load_anomalies_mock.assert_called_once_with(self.signal)
@patch('orion.benchmark.load_anomalies')
@patch('orion.benchmark.analyze')
@patch('orion.benchmark._load_pipeline')
@patch('orion.benchmark.load_signal')
@patch('orion.benchmark._detrend_signal')
def test__evaluate_signal_detrend(self, detrend_signal_mock, load_signal_mock,
load_pipeline_mock, analyze_mock, load_anomalies_mock):
train = Mock(autospec=pd.DataFrame)
test = Mock(autospec=pd.DataFrame)
detrend_signal_mock.side_effect = [train, test]
load_signal_mock.side_effect = [train, test]
load_pipeline_mock.return_value = self.pipeline
detrend = True
returned = benchmark._evaluate_signal(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics,
test_split=True, detrend=detrend)
expected_return = self.set_score(1, ANY, ANY)
assert returned == expected_return
expected_calls = [
call('signal-name-train'),
call('signal-name-test')
]
assert load_signal_mock.call_args_list == expected_calls
expected_calls = [
call(train, 'value'),
call(test, 'value')
]
assert detrend_signal_mock.call_args_list == expected_calls
load_pipeline_mock.assert_called_once_with(self.pipeline, self.hyper)
analyze_mock.assert_called_once_with(self.pipeline, train, test)
load_anomalies_mock.assert_called_once_with(self.signal)
@patch('orion.benchmark._evaluate_signal')
def test__evaluate_pipeline(self, evaluate_signal_mock):
test_split = (True, False)
detrend = False
signals = [self.signal]
score = self.set_score(1, ANY, ANY)
evaluate_signal_mock.return_value = score
benchmark._evaluate_pipeline(
self.pipeline, self.name, self.dataset, signals, self.hyper, self.metrics,
self.distributed, test_split, detrend)
expected_calls = [
call(self.pipeline, self.name, self.dataset, self.signal,
self.hyper, self.metrics, True, detrend),
call(self.pipeline, self.name, self.dataset, self.signal,
self.hyper, self.metrics, False, detrend)
]
assert evaluate_signal_mock.call_args_list == expected_calls
@patch('orion.benchmark._evaluate_signal')
def test__evaluate_pipeline_test_split_none(self, evaluate_signal_mock):
test_split = None
detrend = False
signals = [self.signal]
score = self.set_score(1, ANY, ANY)
evaluate_signal_mock.return_value = score
returned = benchmark._evaluate_pipeline(
self.pipeline, self.name, self.dataset, signals, self.hyper, self.metrics,
self.distributed, test_split, detrend)
expected_return = [
self.set_score(1, ANY, True),
self.set_score(1, ANY, False)
]
assert returned == expected_return
expected_calls = [
call(self.pipeline, self.name, self.dataset, self.signal,
self.hyper, self.metrics, True, detrend),
call(self.pipeline, self.name, self.dataset, self.signal,
self.hyper, self.metrics, False, detrend)
]
assert evaluate_signal_mock.call_args_list == expected_calls
@patch('orion.benchmark._evaluate_signal')
def test__evaluate_pipeline_test_split(self, evaluate_signal_mock):
test_split = True
detrend = False
signals = [self.signal]
score = self.set_score(1, ANY, test_split)
evaluate_signal_mock.return_value = score
expected_return = [score]
returned = benchmark._evaluate_pipeline(
self.pipeline, self.name, self.dataset, signals, self.hyper, self.metrics,
self.distributed, test_split, detrend)
assert returned == expected_return
evaluate_signal_mock.assert_called_once_with(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics,
test_split, detrend)
@patch('orion.benchmark._evaluate_signal')
def test__evaluate_pipeline_no_test_split(self, evaluate_signal_mock):
test_split = False
detrend = False
signals = [self.signal]
score = self.set_score(1, ANY, test_split)
evaluate_signal_mock.return_value = score
expected_return = [score]
returned = benchmark._evaluate_pipeline(
self.pipeline, self.name, self.dataset, signals, self.hyper, self.metrics,
self.distributed, test_split, detrend)
assert returned == expected_return
evaluate_signal_mock.assert_called_once_with(
self.pipeline, self.name, self.dataset, self.signal, self.hyper, self.metrics,
test_split, detrend)
@patch('orion.benchmark._evaluate_pipeline')
def test__evaluate_pipelines(self, evaluate_pipeline_mock):
test_split = False
detrend = False
signals = [self.signal]
pipelines = {self.name: self.pipeline}
score = self.set_score(1, ANY, test_split)
evaluate_pipeline_mock.return_value = [score]
expected_return = [score]
returned = benchmark._evaluate_pipelines(pipelines, self.dataset, signals, self.hyper,
self.metrics, self.distributed, test_split,
detrend)
assert returned == expected_return
evaluate_pipeline_mock.assert_called_once_with(
self.pipeline, self.name, self.dataset, signals, self.hyper, self.metrics,
self.distributed, test_split, detrend)
@patch('orion.benchmark._evaluate_pipeline')
def test__evaluate_pipelines_hyperparameter(self, evaluate_pipeline_mock):
test_split = False
detrend = False
signals = [self.signal]
pipelines = {self.name: self.pipeline}
hyperparameter = Mock(autospec=dict)
hyperparameters = {self.name: hyperparameter}
score = self.set_score(1, ANY, test_split)
evaluate_pipeline_mock.return_value = [score]
expected_return = [score]
returned = benchmark._evaluate_pipelines(pipelines, self.dataset, signals, hyperparameters,
self.metrics, self.distributed, test_split,
detrend)
assert returned == expected_return
evaluate_pipeline_mock.assert_called_once_with(
self.pipeline, self.name, self.dataset, signals, hyperparameter, self.metrics,
self.distributed, test_split, detrend)
@patch('orion.benchmark._evaluate_pipelines')
def test__evaluate_datasets(self, evaluate_pipelines_mock):
test_split = False
detrend = False
signals = [self.signal]
datasets = {self.dataset: signals}
pipelines = {self.name, self.pipeline}
score = self.set_score(1, ANY, test_split)
evaluate_pipelines_mock.return_value = [score]
order = ['dataset', 'elapsed', 'metric-name', 'pipeline', 'signal', 'split', 'status']
expected_return = pd.DataFrame.from_records([{
'metric-name': 1,
'elapsed': ANY,
'split': test_split,
'pipeline': self.name,
'dataset': self.dataset,
'signal': self.signal,
'status': 'OK'
}])[order]
returned = benchmark._evaluate_datasets(
pipelines, datasets, self.hyper, self.metrics, self.distributed, test_split, detrend)
pd.testing.assert_frame_equal(returned, expected_return)
evaluate_pipelines_mock.assert_called_once_with(
pipelines, self.dataset, signals, self.hyper, self.metrics,
self.distributed, test_split, detrend)
@patch('orion.benchmark._evaluate_datasets')
def test_benchmark(self, evaluate_datasets_mock):
signals = [self.signal]
datasets = {self.dataset: signals}
pipelines = {self.name, self.pipeline}
score = self.set_score(1, ANY, ANY)
evaluate_datasets_mock.return_value = pd.DataFrame.from_records([score])
order = [
'pipeline',
'rank',
'dataset',
'elapsed',
'metric-name',
'signal',
'split',
'status']
expected_return = pd.DataFrame.from_records([{
'rank': 1,
'metric-name': 1,
'elapsed': ANY,
'split': ANY,
'pipeline': self.name,
'dataset': self.dataset,
'signal': self.signal,
'status': 'OK'
}])[order]
returned = benchmark.benchmark(
pipelines, datasets, self.hyper, self.metrics, self.rank, self.distributed)
pd.testing.assert_frame_equal(returned, expected_return)
evaluate_datasets_mock.assert_called_once_with(
pipelines, datasets, self.hyper, self.metrics, self.distributed, False, False)
@patch('orion.benchmark._evaluate_datasets')
def test_benchmark_metrics_list(self, evaluate_datasets_mock):
test_split = False
detrend = False
signals = [self.signal]
datasets = {self.dataset: signals}
pipelines = {self.name: self.pipeline}
metric = Mock(autospec=METRICS['f1'], return_value=1)
metric.__name__ = 'metric-name'
metrics = [metric]
metrics_ = {metric.__name__: metric}
score = self.set_score(1, ANY, test_split)
score[metric.__name__] = metric
evaluate_datasets_mock.return_value = pd.DataFrame.from_records([score])
order = [
'pipeline',
'rank',
'dataset',
'elapsed',
'metric-name',
'signal',
'split',
'status']
expected_return = pd.DataFrame.from_records([{
'rank': 1,
'metric-name': metric,
'elapsed': ANY,
'split': test_split,
'pipeline': self.name,
'dataset': self.dataset,
'signal': self.signal,
'status': 'OK'
}])[order]
returned = benchmark.benchmark(pipelines, datasets, self.hyper, metrics, self.rank,
self.distributed, test_split, detrend)
pd.testing.assert_frame_equal(returned, expected_return)
evaluate_datasets_mock.assert_called_once_with(
pipelines, datasets, self.hyper, metrics_, self.distributed, test_split, detrend)
@patch('orion.benchmark._evaluate_datasets')
def test_benchmark_metrics_exception(self, evaluate_datasets_mock):
test_split = False
detrend = False
signals = [self.signal]
datasets = {self.dataset: signals}
pipelines = {self.name: self.pipeline}
metric = 'does-not-exist'
metrics = [metric]
score = self.set_score(1, ANY, test_split)
evaluate_datasets_mock.return_value = pd.DataFrame.from_records([score])
with self.assertRaises(ValueError) as ex:
benchmark.benchmark(pipelines, datasets, self.hyper, metrics, self.rank,
self.distributed, test_split, detrend)
self.assertTrue(metric in ex.exception)
@patch('orion.benchmark._evaluate_datasets')
def test_benchmark_pipelines_list(self, evaluate_datasets_mock):
test_split = False
detrend = False
signals = [self.signal]
datasets = {self.dataset: signals}
pipelines = [self.pipeline]
pipelines_ = {self.pipeline: self.pipeline}
score = self.set_score(1, ANY, test_split)
score['pipeline'] = self.pipeline
evaluate_datasets_mock.return_value = pd.DataFrame.from_records([score])
order = [
'pipeline',
'rank',
'dataset',
'elapsed',
'metric-name',
'signal',
'split',
'status']
expected_return = pd.DataFrame.from_records([{
'rank': 1,
'metric-name': 1,
'elapsed': ANY,
'split': test_split,
'pipeline': self.pipeline,
'dataset': self.dataset,
'signal': self.signal,
'status': 'OK'
}])[order]
returned = benchmark.benchmark(pipelines, datasets, self.hyper, self.metrics, self.rank,
self.distributed, test_split, detrend)
pd.testing.assert_frame_equal(returned, expected_return)
evaluate_datasets_mock.assert_called_once_with(
pipelines_, datasets, self.hyper, self.metrics, self.distributed, test_split, detrend)
| 34.206349
| 99
| 0.643426
| 2,869
| 25,860
| 5.523876
| 0.044615
| 0.036913
| 0.051552
| 0.03155
| 0.863327
| 0.850013
| 0.833922
| 0.814488
| 0.791646
| 0.766911
| 0
| 0.005533
| 0.252166
| 25,860
| 755
| 100
| 34.251656
| 0.813951
| 0
| 0
| 0.726804
| 0
| 0
| 0.09768
| 0.048299
| 0
| 0
| 0
| 0
| 0.12543
| 1
| 0.053265
| false
| 0
| 0.012027
| 0.001718
| 0.068729
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3e949e765461c77fd3e3ad1ea45c11d2a42ce98
| 56,129
|
py
|
Python
|
etl_base/dags/sqlg_jobs_QAM.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | null | null | null |
etl_base/dags/sqlg_jobs_QAM.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | null | null | null |
etl_base/dags/sqlg_jobs_QAM.py
|
buckylee2019/sqlg-airflow
|
37610a23b99bea8d9fdc8b066a01736ff2ff0c9d
|
[
"Apache-2.0"
] | 1
|
2022-03-10T03:47:35.000Z
|
2022-03-10T03:47:35.000Z
|
# -*- coding: utf-8 -*-
# Author : Jesse Wei
# LastUpdate : 2020/10/04
# Impact : Jobs generated by SQLG
# Message : Humanity towards others, we live by sharing. Fear can hold you prisoner, only hope can set you free.
# from __future__ import print_function
import logging
import airflow
from datetime import datetime, timedelta
from airflow.operators.sensors import ExternalTaskSensor
from airflow.operators.python_operator import PythonOperator
from airflow.operators.bash_operator import BashOperator
from airflow.contrib.sensors.file_sensor import FileSensor
from airflow import models
from airflow.models import Variable
from acme.operators.sqlg_oracle import OracleOperatorWithTemplatedParams
from airflow.operators.oracle_operator import OracleOperator
# DB_NAME = 'DWH'
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_HR_EMPMSF_H"
MV_HR_EMPMSF_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_HR_EMPMSF_CN_H"
MV_HR_EMPMSF_CN_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_HR_EMPMSF_VN_H"
MV_HR_EMPMSF_VN_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "HR_DEPMSF_H"
HR_DEPMSF_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "HR_DEPMSF_CN_H"
HR_DEPMSF_CN_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "HR_DEPMSF_VN_H"
HR_DEPMSF_VN_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "HISTORYCARD"
HISTORYCARD = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "PN_SPC"
PN_SPC = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SUB_SPC"
SUB_SPC = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "RWK_GLOBAL_LOT_WS1"
RWK_GLOBAL_LOT_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "RWK_GLOBAL_LOT_DETAIL_WS1"
RWK_GLOBAL_LOT_DETAIL_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INSTRUMENT_CORRECT_NQJ"
INSTRUMENT_CORRECT_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INSTRUMENT_INFO_CORRECT_NQJ"
INSTRUMENT_INFO_CORRECT_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_MTL_CROSS_REFERENCES_V"
MV_MTL_CROSS_REFERENCES_V = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "XX_ERP_ITEM"
XX_ERP_ITEM = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERPIQC"
ERPIQC = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERFORM_DOC_MSG_WS1"
ERFORM_DOC_MSG_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "EF_QCEXCEPTION_MST_WS1"
EF_QCEXCEPTION_MST_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "PN_MODULE"
PN_MODULE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "PN_MODULE_MAINTAIN"
PN_MODULE_MAINTAIN = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SPC_ABNORMAL"
SPC_ABNORMAL = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "COPQ_FCTACTUALCOST"
COPQ_FCTACTUALCOST = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MTL_MATERIAL_TRANSACTIONS"
MTL_MATERIAL_TRANSACTIONS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "COPQ_DIMCATEGORY"
COPQ_DIMCATEGORY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "BI_DIMMULTIORG"
BI_DIMMULTIORG = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_ORG_ORGANIZATION_DEF"
MV_ORG_ORGANIZATION_DEF = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_ORG_ORGANIZATION_DEF"
MV_ORG_ORGANIZATION_DEF = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERDRLRR_INSPECTION_HEADER_WS1"
ERDRLRR_INSPECTION_HEADER_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_GL_SETS_OF_BOOKS"
MV_GL_SETS_OF_BOOKS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_WSH_DELIVERABLES_V"
MV_WSH_DELIVERABLES_V = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "QKB_ITEM"
QKB_ITEM = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERDRLRR_INSPECTION_STATUS_WS1"
ERDRLRR_INSPECTION_STATUS_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERDRLRR_INSPECTION_DETAIL_WS1"
ERDRLRR_INSPECTION_DETAIL_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERDRLRR_INSPECTION_RESULT_WS1"
ERDRLRR_INSPECTION_RESULT_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "PLANT"
PLANT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SAP_MATERIALMASTER"
SAP_MATERIALMASTER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MATERIALGROUP"
MATERIALGROUP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "CONTROLTABLE"
CONTROLTABLE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "EMS_LOOKUPVALUE_NQJ"
EMS_LOOKUPVALUE_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "RESULTTYPE"
RESULTTYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_PDE_EXCEPTION_HEADER_V_WS1"
MV_PDE_EXCEPTION_HEADER_V_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_PDE_EXCEPTION_EQUIP_V_WS1"
MV_PDE_EXCEPTION_EQUIP_V_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_PDE_EXCEPTION_DETAIL_V_WS1"
MV_PDE_EXCEPTION_DETAIL_V_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "PDE_USER_WS1"
PDE_USER_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "QCE_REASON_CODE_WS1"
QCE_REASON_CODE_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "EMS_MANUFACTURER_WS1"
EMS_MANUFACTURER_WS1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MODELTYPE"
MODELTYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "EFLOW_ATLO_SCAR_CN"
EFLOW_ATLO_SCAR_CN = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ATLO_QUESTION_NQJ"
ATLO_QUESTION_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "EF_QCEXCEPTION_MST_NQJ"
EF_QCEXCEPTION_MST_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ATLO_SCAR_NQJ"
ATLO_SCAR_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MV_XXCS_INCIDENTS_SFCS"
MV_XXCS_INCIDENTS_SFCS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "CLOUD_WO_NQJ"
CLOUD_WO_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MATERIALGROUP_NQJ"
MATERIALGROUP_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "INSPECTIONLOT_NQJ"
INSPECTIONLOT_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "MSD_CS_DATA"
MSD_CS_DATA = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SAP_MATERIALMASTER_NQJ"
SAP_MATERIALMASTER_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "ERPIQC_NQJ"
ERPIQC_NQJ = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_DEPARTMENT_H"
SDM_DEPARTMENT_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_EMPLOYEE_H"
SDM_EMPLOYEE_H = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MATERIAL_CATEGORY_QA"
SDM_MATERIAL_CATEGORY_QA = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_PRODUCT_TYPE"
SDM_PRODUCT_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MODULE_TYPE"
SDM_MODULE_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MODULE_TYPE"
SDM_MODULE_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_PRODUCT_DEVELOPMENT_TYPE"
SDM_PRODUCT_DEVELOPMENT_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MATERIAL_DEFECT_MODE"
SDM_MATERIAL_DEFECT_MODE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_ABNORMAL_DESCRIPTION"
SDM_ABNORMAL_DESCRIPTION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CATEGORY"
SDM_CATEGORY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_QA_RESULT"
SDM_QA_RESULT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CONTROL_STATION"
SDM_CONTROL_STATION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CONTROL_THE_PROJECT"
SDM_CONTROL_THE_PROJECT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_TURN_AROUND_TIME"
SDM_TURN_AROUND_TIME = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_RMA_CASE_STATUS"
SDM_RMA_CASE_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_PERSON_IN_CHARGE"
SDM_PERSON_IN_CHARGE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CLOSED_DAY_8D"
SDM_CLOSED_DAY_8D = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_TIER1"
SDM_TIER1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_SHIPPING_DATE"
SDM_SHIPPING_DATE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_RETURN_SOURCE"
SDM_RETURN_SOURCE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_SHIPPING_PERIOD"
SDM_SHIPPING_PERIOD = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_SHIPPING_PERIOD"
SDM_SHIPPING_PERIOD = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_WARRANTY_STATUS"
SDM_WARRANTY_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_WARRANTY_STATUS"
SDM_WARRANTY_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_INVENTORY_OWNER"
SDM_INVENTORY_OWNER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MANUFACTURER"
SDM_MANUFACTURER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CAVITY_NO"
SDM_CAVITY_NO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CASE_CLOSE_STATUS"
SDM_CASE_CLOSE_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_STATION"
SDM_STATION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_C_FLOW_DEVELOPMENT_STAGE"
SDM_C_FLOW_DEVELOPMENT_STAGE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_C_FLOW_DEVELOPMENT_DERI"
SDM_C_FLOW_DEVELOPMENT_DERI = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_ATLO_FOR_MP"
SDM_ATLO_FOR_MP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MP_FLAG"
SDM_MP_FLAG = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_PM"
SDM_PM = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_EPR_UPPER_LIMIT_OF_MOD"
SDM_EPR_UPPER_LIMIT_OF_MOD = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MO_NO"
SDM_MO_NO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MO_START_MONTH"
SDM_MO_START_MONTH = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MO_PART_TYPE"
SDM_MO_PART_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_EPR_UPPER_LIMIT_OF_SIN"
SDM_EPR_UPPER_LIMIT_OF_SIN = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MP_APPROVE_DATE"
SDM_MP_APPROVE_DATE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_SR_NUMBER"
SDM_SR_NUMBER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CSD_REASON_PAY"
SDM_CSD_REASON_PAY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CSD_MATERIAL_SCRAP_COS"
SDM_CSD_MATERIAL_SCRAP_COS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CSD_CUSTOMER_PAID_SERV"
SDM_CSD_CUSTOMER_PAID_SERV = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_IQC_DAILY_INPUT_MANP_A"
SDM_IQC_DAILY_INPUT_MANP_A = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_IQC_DAILY_INPUT_MANP"
SDM_IQC_DAILY_INPUT_MANP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_IQC_DAILY_TOTAL_INSP"
SDM_IQC_DAILY_TOTAL_INSP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_IQC_AVERAGE_INSPECTION"
SDM_IQC_AVERAGE_INSPECTION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_INCOMING_MATERIAL_REJEC"
SDM_INCOMING_MATERIAL_REJEC = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CUSTOMER_COMPLAIN_CASES"
SDM_CUSTOMER_COMPLAIN_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_IN_PROCESS_QUALITY_CONTROL"
SDM_IN_PROCESS_QUALITY_CONTROL = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_QUALITY_ALERT_CASES"
SDM_QUALITY_ALERT_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_EQUIPMENT_ANOMALY_CASE"
SDM_EQUIPMENT_ANOMALY_CASE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FIXTURE_ANOMALY_CASES"
SDM_FIXTURE_ANOMALY_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FIXTURE_ANOMALY_CASES"
SDM_FIXTURE_ANOMALY_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_EQUIPMENT_FIXTURE_ANOM"
SDM_EQUIPMENT_FIXTURE_ANOM = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FAULT_INJECTION_DR"
SDM_FAULT_INJECTION_DR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_Q_SCAN_DEFECT_RATE_DR"
SDM_Q_SCAN_DEFECT_RATE_DR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FINAL_QUALITY_INSPECTI"
SDM_FINAL_QUALITY_INSPECTI = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FQC_LRR"
SDM_FQC_LRR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_QUALITY_HOLD_CASES"
SDM_QUALITY_HOLD_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CLOSE_WITHIN_SIPULATED"
SDM_CLOSE_WITHIN_SIPULATED = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CUSTOMER_COMPLAIN_FOR"
SDM_CUSTOMER_COMPLAIN_FOR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_ON_TIME_CLOSE_RATIO_FOR_WN"
SDM_ON_TIME_CLOSE_RATIO_FOR_WN = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CLOSE_WITHIN_14_DAYS_FO"
SDM_CLOSE_WITHIN_14_DAYS_FO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CUSTOMER_COMPLAIN_FOR_S"
SDM_CUSTOMER_COMPLAIN_FOR_S = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CLOSE_WITHIN_14_DAYS_RATIO"
SDM_CLOSE_WITHIN_14_DAYS_RATIO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FIELD_DEFECT_QUANTITY"
SDM_FIELD_DEFECT_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FIELD_DEFECT_QUANTITY"
SDM_FIELD_DEFECT_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_SHIPPING_QUANTITY"
SDM_SHIPPING_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_SHIPPING_QUANTITY"
SDM_SHIPPING_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_AUTOMOTIVE_PRODUCT_FIELD_D"
SDM_AUTOMOTIVE_PRODUCT_FIELD_D = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_ON_SITE_REWORK_QUANTITY"
SDM_ON_SITE_REWORK_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_IN_WARRANTY_RETURN_QUANTITY"
SDM_IN_WARRANTY_RETURN_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_QUALITY_REJECT_QUANTITY"
SDM_QUALITY_REJECT_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_QUALITY_REJECT_QUANTITY"
SDM_QUALITY_REJECT_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_MODELS_WITH_MO_RECORDS"
SDM_MODELS_WITH_MO_RECORDS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CSD_PLANNED_SHIPPING"
SDM_CSD_PLANNED_SHIPPING = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_ACTUAL_CALIBRATION"
SDM_ACTUAL_CALIBRATION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_PLANNED_CALIBRATION"
SDM_PLANNED_CALIBRATION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_CALIBRATION_COMPLETED_RATE"
SDM_CALIBRATION_COMPLETED_RATE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_TICKET_TYPE"
SDM_TICKET_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "SDM_FINAL_QUALITY_INSPECT"
SDM_FINAL_QUALITY_INSPECT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MATERIAL_CATEGORY_QA"
DIM_MATERIAL_CATEGORY_QA = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_PRODUCT_TYPE"
DIM_PRODUCT_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MODULE_TYPE"
DIM_MODULE_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_PRODUCT_DEVELOPMENT_TYPE"
DIM_PRODUCT_DEVELOPMENT_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MATERIAL_DEFECT_MODE"
DIM_MATERIAL_DEFECT_MODE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_ABNORMAL_DESCRIPTION"
DIM_ABNORMAL_DESCRIPTION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CATEGORY"
DIM_CATEGORY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_QA_RESULT"
DIM_QA_RESULT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CONTROL_STATION"
DIM_CONTROL_STATION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CONTROL_THE_PROJECT"
DIM_CONTROL_THE_PROJECT = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_TURN_AROUND_TIME"
DIM_TURN_AROUND_TIME = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_RMA_CASE_STATUS"
DIM_RMA_CASE_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_PERSON_IN_CHARGE"
DIM_PERSON_IN_CHARGE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CLOSED_DAY_8D"
DIM_CLOSED_DAY_8D = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_TIER1"
DIM_TIER1 = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_SHIPPING_DATE"
DIM_SHIPPING_DATE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_RETURN_SOURCE"
DIM_RETURN_SOURCE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_SHIPPING_PERIOD"
DIM_SHIPPING_PERIOD = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_WARRANTY_STATUS"
DIM_WARRANTY_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_INVENTORY_OWNER"
DIM_INVENTORY_OWNER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MANUFACTURER"
DIM_MANUFACTURER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CAVITY_NO"
DIM_CAVITY_NO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_CASE_CLOSE_STATUS"
DIM_CASE_CLOSE_STATUS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_STATION"
DIM_STATION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_C_FLOW_DEVELOPMENT_STAGE"
DIM_C_FLOW_DEVELOPMENT_STAGE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_C_FLOW_DEVELOPMENT_DERI"
DIM_C_FLOW_DEVELOPMENT_DERI = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_ATLO_FOR_MP"
DIM_ATLO_FOR_MP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MP_FLAG"
DIM_MP_FLAG = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_PM"
DIM_PM = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_EPR_UPPER_LIMIT_OF_MOD"
DIM_EPR_UPPER_LIMIT_OF_MOD = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MO_NO"
DIM_MO_NO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MO_START_MONTH"
DIM_MO_START_MONTH = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MO_PART_TYPE"
DIM_MO_PART_TYPE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_EPR_UPPER_LIMIT_OF_SIN"
DIM_EPR_UPPER_LIMIT_OF_SIN = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_MP_APPROVE_DATE"
DIM_MP_APPROVE_DATE = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "DIM_SR_NUMBER"
DIM_SR_NUMBER = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CSD_MATERIAL_SCRAP_COS"
FCT_CSD_MATERIAL_SCRAP_COS = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CSD_CUSTOMER_PAID_SERV"
FCT_CSD_CUSTOMER_PAID_SERV = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_IQC_DAILY_INPUT_MANP"
FCT_IQC_DAILY_INPUT_MANP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_IQC_DAILY_TOTAL_INSP"
FCT_IQC_DAILY_TOTAL_INSP = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_IQC_AVERAGE_INSPECTION"
FCT_IQC_AVERAGE_INSPECTION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_SUPPLIER_MATERIAL_PRODUC"
FCT_SUPPLIER_MATERIAL_PRODUC = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CUSTOMER_INSPECTION"
FCT_CUSTOMER_INSPECTION = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CUSTOMER_COMPLAIN_CASES"
FCT_CUSTOMER_COMPLAIN_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_FAULT_INJECTION_DR"
FCT_FAULT_INJECTION_DR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_Q_SCAN_DEFECT_RATE_DR"
FCT_Q_SCAN_DEFECT_RATE_DR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_QUALITY_HOLD_CASES"
FCT_QUALITY_HOLD_CASES = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CLOSE_WITHIN_SIPULATED"
FCT_CLOSE_WITHIN_SIPULATED = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CUSTOMER_COMPLAIN_FOR"
FCT_CUSTOMER_COMPLAIN_FOR = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_ON_TIME_CLOSE_RATIO_FOR_WN"
FCT_ON_TIME_CLOSE_RATIO_FOR_WN = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CLOSE_WITHIN_14_DAYS_FO"
FCT_CLOSE_WITHIN_14_DAYS_FO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CUSTOMER_COMPLAIN_FOR_S"
FCT_CUSTOMER_COMPLAIN_FOR_S = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_CLOSE_WITHIN_14_DAYS_RATIO"
FCT_CLOSE_WITHIN_14_DAYS_RATIO = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_FIELD_DEFECT_QUANTITY"
FCT_FIELD_DEFECT_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_SHIPPING_QUANTITY"
FCT_SHIPPING_QUANTITY = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
# JOB_TYPE=ODS-MAIN
my_taskid = "FCT_AUTOMOTIVE_PRODUCT_FIELD_D"
FCT_AUTOMOTIVE_PRODUCT_FIELD_D = OracleOperatorWithTemplatedParams(
task_id=my_taskid,
parameters=({":END_DT_CHAR":"{{ ds_nodash }}"}),
sql= "Begin SQLEXT." + my_taskid + "_SP("+
":END_DT_CHAR"+
"); End;"
)
| 28.887802
| 118
| 0.644088
| 6,722
| 56,129
| 4.911782
| 0.043142
| 0.143199
| 0.093225
| 0.083533
| 0.906382
| 0.891813
| 0.885756
| 0.884363
| 0.884272
| 0.883303
| 0
| 0.001354
| 0.197385
| 56,129
| 1,942
| 119
| 28.902678
| 0.731521
| 0.068236
| 0
| 0.628665
| 1
| 0
| 0.302196
| 0.04803
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007166
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
310680c39f7251ea7c01f44bc8b4c3e991f6d02b
| 20,700
|
py
|
Python
|
hydroserver/hydroserver_wof/wof_database_models/odm2_sqlite.py
|
kjlippold/his_hydroserver
|
aaf3939965d12dd5bc74f69d22b653ce548bec0a
|
[
"MIT"
] | 1
|
2021-01-27T19:19:05.000Z
|
2021-01-27T19:19:05.000Z
|
hydroserver/hydroserver_wof/wof_database_models/odm2_sqlite.py
|
CUAHSI-APPS/his_hydroserver
|
aaf3939965d12dd5bc74f69d22b653ce548bec0a
|
[
"MIT"
] | 1
|
2019-09-27T16:20:49.000Z
|
2019-09-27T16:20:49.000Z
|
hydroserver/hydroserver_wof/wof_database_models/odm2_sqlite.py
|
kjlippold/his_hydroserver
|
aaf3939965d12dd5bc74f69d22b653ce548bec0a
|
[
"MIT"
] | 1
|
2020-06-08T21:43:38.000Z
|
2020-06-08T21:43:38.000Z
|
import pandas as pd
import sqlite3
import datetime
from hydroserver_wof.dao import WofModels
def get_sites(network, database, database_path, params):
try:
sql_connect = sqlite3.connect(database_path, isolation_level=None)
except:
return "400_Bad_Request"
cursor = sql_connect.cursor()
query_table = WofModels.query_table
site_info_table = WofModels.site_info_table
creation_time = str(datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00"))
query_url = params["query_url"].replace("&", "&")
method_called = "GetSites"
location_param = None
variable_param = None
begin_datetime = None
end_datetime = None
query_data = [(
creation_time,
query_url,
method_called,
location_param,
variable_param,
begin_datetime,
end_datetime
)]
query_table = query_table.append(pd.DataFrame(query_data, columns=query_table.columns))
cursor.execute(f"""SELECT SamplingFeatures.SamplingFeatureCode,
SamplingFeatures.SamplingFeatureName,
Sites.Latitude,
Sites.Longitude,
SamplingFeatures.Elevation_m,
SamplingFeatures.ElevationDatumCV
FROM SamplingFeatures
INNER JOIN Sites
ON SamplingFeatures.SamplingFeatureID = Sites.SamplingFeatureID""")
site_info_table = site_info_table.append(pd.DataFrame(cursor.fetchall(), columns=site_info_table.columns))
sites_data = {
"query_table": query_table,
"site_info_table": site_info_table
}
sql_connect.close()
return sites_data
def get_site_info(network, database, database_path, params):
try:
sql_connect = sqlite3.connect(database_path, isolation_level=None)
except:
return "400_Bad_Request"
cursor = sql_connect.cursor()
query_table = WofModels.query_table
site_info_table = WofModels.site_info_table
series_catalog_table = WofModels.series_catalog_table
variable_info_table = WofModels.variable_info_table
method_table = WofModels.method_table
source_table = WofModels.source_table
creation_time = str(datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00"))
query_url = params["query_url"].replace("&", "&")
method_called = "GetSiteInfo"
location_param = params["site_code"]
variable_param = None
begin_datetime = None
end_datetime = None
query_data = [(
creation_time,
query_url,
method_called,
location_param,
variable_param,
begin_datetime,
end_datetime
)]
query_table = query_table.append(pd.DataFrame(query_data, columns=query_table.columns))
cursor.execute(f"""SELECT SamplingFeatures.SamplingFeatureCode,
SamplingFeatures.SamplingFeatureName,
Sites.Latitude,
Sites.Longitude,
SamplingFeatures.Elevation_m,
SamplingFeatures.ElevationDatumCV
FROM SamplingFeatures
INNER JOIN Sites
ON SamplingFeatures.SamplingFeatureID = Sites.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?""", (str(location_param),))
site_info_table = site_info_table.append(pd.DataFrame(cursor.fetchall(), columns=site_info_table.columns))
cursor.execute(f"""SELECT Results.ValueCount,
Actions.BeginDateTime,
Actions.EndDateTime
FROM Results
INNER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
INNER JOIN Actions
ON FeatureActions.ActionID = Actions.ActionID
INNER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?""", (str(location_param),))
series_catalog_table = series_catalog_table.append(pd.DataFrame(cursor.fetchall(), columns=series_catalog_table.columns))
cursor.execute(f"""SELECT Methods.MethodCode,
Methods.MethodDescription,
Methods.MethodLink
FROM Results
INNER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
INNER JOIN Actions
ON FeatureActions.ActionID = Actions.ActionID
INNER JOIN Methods
ON Actions.MethodID = Methods.MethodID
INNER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?""", (str(location_param),))
method_table = method_table.append(pd.DataFrame(cursor.fetchall(), columns=method_table.columns))
cursor.execute(f"""SELECT Organizations.OrganizationCode,
Organizations.OrganizationName,
Organizations.OrganizationDescription,
People.PersonFirstName || People.PersonLastName,
ActionBy.RoleDescription,
Affiliations.PrimaryPhone,
Affiliations.PrimaryEmail,
Affiliations.PrimaryAddress,
Organizations.OrganizationLink
FROM Results
INNER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
INNER JOIN Actions
ON FeatureActions.ActionID = Actions.ActionID
INNER JOIN ActionBy
ON Actions.ActionID = ActionBy.ActionID
INNER JOIN Affiliations
ON ActionBy.AffiliationID = Affiliations.AffiliationID
INNER JOIN Organizations
ON Affiliations.OrganizationID = Organizations.OrganizationID
INNER JOIN People
ON Affiliations.PersonID = People.PersonID
INNER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?""", (str(location_param),))
source_table = source_table.append(pd.DataFrame(cursor.fetchall(), columns=source_table.columns))
cursor.execute(f"""SELECT Variables.VariableCode,
Variables.VariableNameCV,
Variables.VariableDefinition,
Units.UnitsName,
Units.UnitsAbbreviation,
Units.UnitsID,
Variables.NoDataValue
FROM Results
INNER JOIN Units
ON Results.UnitsID = Units.UnitsID
INNER JOIN Variables
ON Results.VariableID = Variables.VariableID
INNER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
INNER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?""", (str(location_param),))
variable_info_table = variable_info_table.append(pd.DataFrame(cursor.fetchall(), columns=variable_info_table.columns))
site_info_data = {
"query_table": query_table,
"site_info_table": site_info_table,
"series_catalog_table": series_catalog_table,
"method_table": method_table,
"source_table": source_table,
"variable_info_table": variable_info_table
}
sql_connect.close()
return site_info_data
def get_variables(network, database, database_path, params):
try:
sql_connect = sqlite3.connect(database_path, isolation_level=None)
except:
return "400_Bad_Request"
cursor = sql_connect.cursor()
query_table = WofModels.query_table
variable_info_table = WofModels.variable_info_table
creation_time = str(datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00"))
query_url = params["query_url"].replace("&", "&")
method_called = "GetVariables"
location_param = None
variable_param = None
begin_datetime = None
end_datetime = None
query_data = [(
creation_time,
query_url,
method_called,
location_param,
variable_param,
begin_datetime,
end_datetime
)]
query_table = query_table.append(pd.DataFrame(query_data, columns=query_table.columns))
cursor.execute(f"""SELECT Variables.VariableCode,
Variables.VariableNameCV,
Variables.VariableDefinition,
Units.UnitsName,
Units.UnitsAbbreviation,
Units.UnitsID,
Variables.NoDataValue
FROM Results
INNER JOIN Units
ON Results.UnitsID = Units.UnitsID
INNER JOIN Variables
ON Results.VariableID = Variables.VariableID
WHERE Results.ResultID IN
(SELECT MIN(Results.ResultID)
FROM Results
GROUP BY VariableID, UnitsID)""")
variable_info_table = variable_info_table.append(pd.DataFrame(cursor.fetchall(), columns=variable_info_table.columns))
variables_data = {
"query_table": query_table,
"variable_info_table": variable_info_table
}
sql_connect.close()
return variables_data
def get_variable_info(network, database, database_path, params):
try:
sql_connect = sqlite3.connect(database_path, isolation_level=None)
except:
return "400_Bad_Request"
cursor = sql_connect.cursor()
query_table = WofModels.query_table
variable_info_table = WofModels.variable_info_table
creation_time = str(datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00"))
query_url = params["query_url"].replace("&", "&")
method_called = "GetVariableInfo"
location_param = None
variable_param = params["variable_code"]
begin_datetime = None
end_datetime = None
query_data = [(
creation_time,
query_url,
method_called,
location_param,
variable_param,
begin_datetime,
end_datetime
)]
query_table = query_table.append(pd.DataFrame(query_data, columns=query_table.columns))
cursor.execute(f"""SELECT Variables.VariableCode,
Variables.VariableNameCV,
Variables.VariableDefinition,
Units.UnitsName,
Units.UnitsAbbreviation,
Units.UnitsID,
Variables.NoDataValue
FROM Results
INNER JOIN Units
ON Results.UnitsID = Units.UnitsID
INNER JOIN Variables
ON Results.VariableID = Variables.VariableID
WHERE Variables.VariableCode = ?
GROUP BY Results.VariableID""", (str(variable_param),))
variable_info_table = variable_info_table.append(pd.DataFrame(cursor.fetchall(), columns=variable_info_table.columns))
variables_data = {
"query_table": query_table,
"variable_info_table": variable_info_table
}
sql_connect.close()
return variables_data
def get_values(network, database, database_path, params):
try:
sql_connect = sqlite3.connect(database_path, isolation_level=None)
except:
return "400_Bad_Request"
cursor = sql_connect.cursor()
query_table = WofModels.query_table
site_info_table = WofModels.site_info_table
values_table = WofModels.values_table
variable_info_table = WofModels.variable_info_table
method_table = WofModels.method_table
source_table = WofModels.source_table
creation_time = str(datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00"))
query_url = params["query_url"].replace("&", "&")
method_called = "GetValues"
location_param = params["site_code"]
variable_param = params["variable_code"]
begin_datetime = params["start_time"]
end_datetime = params["end_time"]
query_data = [(
creation_time,
query_url,
method_called,
location_param,
variable_param,
begin_datetime,
end_datetime
)]
query_table = query_table.append(pd.DataFrame(query_data, columns=query_table.columns))
cursor.execute(f"""SELECT SamplingFeatures.SamplingFeatureCode,
SamplingFeatures.SamplingFeatureName,
Sites.Latitude,
Sites.Longitude,
SamplingFeatures.Elevation_m,
SamplingFeatures.ElevationDatumCV
FROM SamplingFeatures
INNER JOIN Sites
ON SamplingFeatures.SamplingFeatureID = Sites.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?""", (str(location_param),))
site_info_table = site_info_table.append(pd.DataFrame(cursor.fetchall(), columns=site_info_table.columns))
cursor.execute(f"""SELECT Variables.VariableCode,
Variables.VariableNameCV,
Variables.VariableDefinition,
Units.UnitsName,
Units.UnitsAbbreviation,
Units.UnitsID,
Variables.NoDataValue
FROM Results
INNER JOIN Units
ON Results.UnitsID = Units.UnitsID
INNER JOIN Variables
ON Results.VariableID = Variables.VariableID
WHERE Variables.VariableCode = ?
GROUP BY Results.VariableID""", (str(variable_param),))
variable_info_table = variable_info_table.append(pd.DataFrame(cursor.fetchall(), columns=variable_info_table.columns))
cursor.execute(f"""SELECT Methods.MethodCode AS MethodCode,
Methods.MethodDescription,
Methods.MethodLink
FROM Results
INNER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
INNER JOIN Actions
ON FeatureActions.ActionID = Actions.ActionID
INNER JOIN Methods
ON Actions.MethodID = Methods.MethodID
INNER JOIN Variables
ON Results.VariableID = Variables.VariableID
INNER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?
AND Variables.VariableCode = ?
GROUP BY MethodCode""", (str(location_param), str(variable_param),))
method_table = method_table.append(pd.DataFrame(cursor.fetchall(), columns=method_table.columns))
cursor.execute(f"""SELECT Organizations.OrganizationCode AS OrganizationCode,
Organizations.OrganizationName,
Organizations.OrganizationDescription,
People.PersonFirstName || People.PersonLastName,
ActionBy.RoleDescription,
Affiliations.PrimaryPhone,
Affiliations.PrimaryEmail,
Affiliations.PrimaryAddress,
Organizations.OrganizationLink
FROM Results
INNER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
INNER JOIN Actions
ON FeatureActions.ActionID = Actions.ActionID
INNER JOIN ActionBy
ON Actions.ActionID = ActionBy.ActionID
INNER JOIN Affiliations
ON ActionBy.AffiliationID = Affiliations.AffiliationID
INNER JOIN Organizations
ON Affiliations.OrganizationID = Organizations.OrganizationID
INNER JOIN People
ON Affiliations.PersonID = People.PersonID
INNER JOIN Variables
ON Results.VariableID = Variables.VariableID
INNER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE SamplingFeatures.SamplingFeatureCode = ?
AND Variables.VariableCode = ?
GROUP BY OrganizationCode""", (str(location_param), str(variable_param),))
source_table = source_table.append(pd.DataFrame(cursor.fetchall(), columns=source_table.columns))
value_params = [variable_param, location_param]
if begin_datetime:
value_params.append(begin_datetime)
if end_datetime:
value_params.append(end_datetime)
value_params = tuple(value_params)
cursor.execute(f"""SELECT TimeSeriesResultValues.DataValue,
TimeSeriesResultValues.ValueDateTime,
TimeSeriesResultValues.ValueDateTimeUTCOffset,
Methods.MethodCode,
Organizations.OrganizationCode
FROM TimeSeriesResultValues
LEFT OUTER JOIN Results
ON TimeSeriesResultValues.ResultID = Results.ResultID
LEFT OUTER JOIN FeatureActions
ON Results.FeatureActionID = FeatureActions.FeatureActionID
LEFT OUTER JOIN Actions
ON FeatureActions.ActionID = Actions.ActionID
LEFT OUTER JOIN ActionBy
ON ActionBy.ActionID = Actions.ActionID
LEFT OUTER JOIN Affiliations
ON ActionBy.AffiliationID = Affiliations.AffiliationID
LEFT OUTER JOIN Methods
ON Actions.MethodID = Methods.MethodID
LEFT OUTER JOIN Organizations
ON Affiliations.OrganizationID = Organizations.OrganizationID
LEFT OUTER JOIN Variables
ON Results.VariableID = Variables.VariableID
LEFT OUTER JOIN SamplingFeatures
ON FeatureActions.SamplingFeatureID = SamplingFeatures.SamplingFeatureID
WHERE Variables.VariableCode = ?
AND SamplingFeatures.SamplingFeatureCode = ?
{f"AND datetime(TimeSeriesResultValues.ValueDateTime) >= datetime(?)" if begin_datetime else ""}
{f"AND datetime(TimeSeriesResultValues.ValueDateTime) <= datetime(?)" if end_datetime else ""}""", value_params)
values_table = values_table.append(pd.DataFrame(cursor.fetchall(), columns=values_table.columns))
values_data = {
"query_table": query_table,
"variable_info_table": variable_info_table,
"site_info_table": site_info_table,
"method_table": method_table,
"source_table": source_table,
"values_table": values_table
}
sql_connect.close()
return values_data
| 41.153082
| 135
| 0.582947
| 1,723
| 20,700
| 6.793384
| 0.082995
| 0.037676
| 0.040666
| 0.033832
| 0.902606
| 0.898932
| 0.881845
| 0.821956
| 0.807689
| 0.799744
| 0
| 0.003058
| 0.352271
| 20,700
| 502
| 136
| 41.23506
| 0.869928
| 0
| 0
| 0.818182
| 0
| 0
| 0.619807
| 0.187488
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012285
| false
| 0
| 0.009828
| 0
| 0.046683
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31213a15852432ac7b791d4e83bd3ef8e8e8e63e
| 1,510
|
py
|
Python
|
tests/test_542.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_542.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_542.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 542. 01 Matrix
"""
@pytest.fixture(scope="session")
def init_variables_542():
from src.leetcode_542_01_matrix import Solution
solution = Solution()
def _init_variables_542():
return solution
yield _init_variables_542
class TestClass542:
def test_solution_0(self, init_variables_542):
assert init_variables_542().updateMatrix([[0, 0, 0], [0, 1, 0], [0, 0, 0]]) == [
[0, 0, 0],
[0, 1, 0],
[0, 0, 0],
]
def test_solution_1(self, init_variables_542):
assert init_variables_542().updateMatrix([[0, 0, 0], [0, 1, 0], [1, 1, 1]]) == [
[0, 0, 0],
[0, 1, 0],
[1, 2, 1],
]
#!/usr/bin/env python
import pytest
"""
Test 542. 01 Matrix
"""
@pytest.fixture(scope="session")
def init_variables_542():
from src.leetcode_542_01_matrix import Solution
solution = Solution()
def _init_variables_542():
return solution
yield _init_variables_542
class TestClass542:
def test_solution_0(self, init_variables_542):
assert init_variables_542().updateMatrix([[0, 0, 0], [0, 1, 0], [0, 0, 0]]) == [
[0, 0, 0],
[0, 1, 0],
[0, 0, 0],
]
def test_solution_1(self, init_variables_542):
assert init_variables_542().updateMatrix([[0, 0, 0], [0, 1, 0], [1, 1, 1]]) == [
[0, 0, 0],
[0, 1, 0],
[1, 2, 1],
]
| 20.684932
| 88
| 0.545695
| 204
| 1,510
| 3.813725
| 0.147059
| 0.097686
| 0.107969
| 0.092545
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.135849
| 0.298013
| 1,510
| 72
| 89
| 20.972222
| 0.598113
| 0.02649
| 0
| 0.904762
| 0
| 0
| 0.009901
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 1
| 0.190476
| false
| 0
| 0.095238
| 0.047619
| 0.380952
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3137b1f52c14d1677c4bb0de3f57ab45e17f33e3
| 14,878
|
py
|
Python
|
Simulation/HaptotaxisTest2DSteppables.py
|
ashleefv/MetastaticCancerECMRemodelingCC3D
|
614655bc9e682990096cf033e7dc35b198256b17
|
[
"BSD-3-Clause"
] | 1
|
2020-08-25T13:00:44.000Z
|
2020-08-25T13:00:44.000Z
|
Simulation/HaptotaxisTest2DSteppables.py
|
ashleefv/MetastaticCancerECMRemodelingCC3D
|
614655bc9e682990096cf033e7dc35b198256b17
|
[
"BSD-3-Clause"
] | null | null | null |
Simulation/HaptotaxisTest2DSteppables.py
|
ashleefv/MetastaticCancerECMRemodelingCC3D
|
614655bc9e682990096cf033e7dc35b198256b17
|
[
"BSD-3-Clause"
] | 1
|
2019-06-26T17:32:47.000Z
|
2019-06-26T17:32:47.000Z
|
from PySteppables import *
import CompuCell
import sys
import random
from PlayerPython import *
import CompuCellSetup
from math import *
import numpy as np
from random import uniform
class HaptotaxisTest2DSteppable(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
# any code in the start function runs before MCS=0
pass
def step(self,mcs):
pass
class FiberConcentrationCaseARandom50(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
field_fiber[x,y,z] = random.uniform(0, 1)
field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseA_Random_50.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class FiberConcentrationCaseAUniform50(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
#****Average 0.5 everywhere:
#for x,y,z in self.everyPixel():
# field_fiber[x,y,z] = 0.5
# field_fiber_cl[x,y,z] = 0
#****Half 0 Half 1
for x,y,z in self.everyPixel():
if x >=150:
field_fiber[x,y,z] = 1
field_fiber_cl[x,y,z] = 0
else:
field_fiber[x,y,z] = 0
field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseA_Uniform_50.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class FiberConcentrationCaseBRandom25(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
field_fiber[x,y,z] = random.uniform(0, 0.5)
field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseB_Random_25.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class FiberConcentrationCaseBRandom50(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
field_fiber[x,y,z] = random.uniform(0.25, 0.75)
field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseB_Random_50.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class FiberConcentrationCaseBRandom75(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
field_fiber[x,y,z] = random.uniform(0.5,1)
field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseB_Random_75.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class FiberConcentrationCaseCUniform25(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
#****Average 0.25 everywhere:
for x,y,z in self.everyPixel():
field_fiber[x,y,z] = 0.25
field_fiber_cl[x,y,z] = 0
#****Half 0 Half 1
#for x,y,z in self.everyPixel():
# if x >=150:
# field_fiber[x,y,z] = 0.5
# field_fiber_cl[x,y,z] = 0
# else:
# field_fiber[x,y,z] = 0
# field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseC_Uniform_25.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class FiberConcentrationCaseCUniform75(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
#****Average 0.25 everywhere:
for x,y,z in self.everyPixel():
field_fiber[x,y,z] = 0.75
field_fiber_cl[x,y,z] = 0
#****Half 0 Half 1
#for x,y,z in self.everyPixel():
# if x >=150:
# field_fiber[x,y,z] = 1
# field_fiber_cl[x,y,z] = 0
# else:
# field_fiber[x,y,z] = 0.5
# field_fiber_cl[x,y,z] = 0
def step(self,mcs):
fiber_concentration = 0
fiber_cl_concentration = 0
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
for x,y,z in self.everyPixel():
fiber_concentration += field_fiber[x,y,z]
fiber_cl_concentration += field_fiber_cl[x,y,z]
fileName='CaseC_Uniform_75.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", fiber_concentration,",", fiber_cl_concentration
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class LogData(SteppableBasePy):
def __init__(self,_simulator,_frequency=10):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
IDCount = 1
for cell in self.cellListByType(1):
cell_attribute=self.getDictionaryAttribute(cell)
# Way to count the amount of generalized cells for a given cell type
cell_attribute["id"] = IDCount
IDCount += 1
def step(self,mcs):
#*****For every cell_ID of the same cell type, log cell position in term of xCOM and yCOM
for cell in self.cellListByType(1):
cell_attribute=self.getDictionaryAttribute(cell)
#Log data into multiple separate cvs file by cell_ID
#fileName='CellPosition_COM_'+str(cell_attribute["id"])+'.csv'
#Log all data into one cvs file
fileName='CellPosition_COM.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
cell_attribute=self.getDictionaryAttribute(cell)
print >>fileHandle,cell.id,",",mcs,",",cell.xCOM,",",cell.yCOM
fileHandle.close()
#******Log MMP and LOX Concentration
MMP = 0
fieldMMP=self.getConcentrationField("MMP")
for x in xrange(self.dim.x):
for y in xrange(self.dim.y):
for z in xrange(self.dim.z):
MMP += fieldMMP[x,y,z];
fileName='MMP_LOX.csv'
try:
fileHandle,fullFileName=self.openFileInSimulationOutputDirectory(fileName,"a")
except IOError:
print "Could not open file ", fileName," for writing. "
return
print >>fileHandle,mcs,",", MMP
fileHandle.close()
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
class ChemotaxisTest(SteppableBasePy):
def __init__(self,_simulator,_frequency=1):
SteppableBasePy.__init__(self,_simulator,_frequency)
def start(self):
field_fiber = self.getConcentrationField("fiber")
field_fiber_cl = self.getConcentrationField('fiber_cl')
#for x,y,z in self.everyPixel():
# field_fiber[x,y,z] = 0.5
# field_fiber_cl[x,y,z] = 0
# The half uncrosslink and half crosslink test
for x,y,z in self.everyPixel():
if x >=150:
#field_fiber[x,y,z] = random.uniform(0,1)
field_fiber[x,y,z] = 0.5
field_fiber_cl[x,y,z] = 0
else:
field_fiber[x,y,z] = 0
#field_fiber_cl[x,y,z] = random.uniform(0,1)
field_fiber_cl[x,y,z] = 0.5
def step(self,mcs):
pass
def finish(self):
# this function may be called at the end of simulation - used very infrequently though
return
| 36.465686
| 97
| 0.574069
| 1,602
| 14,878
| 5.119226
| 0.086142
| 0.09511
| 0.024875
| 0.012681
| 0.873308
| 0.863553
| 0.858188
| 0.858188
| 0.851725
| 0.851116
| 0
| 0.014345
| 0.334655
| 14,878
| 407
| 98
| 36.555283
| 0.814123
| 0.133486
| 0
| 0.813688
| 0
| 0
| 0.054492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.011407
| 0.034221
| null | null | 0.068441
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31bed4e3aa554d44bdf3c40a10db382e4ac6602d
| 83,029
|
py
|
Python
|
src/deep_image_to_image_models.py
|
furgerf/GAN-for-dermatologic-imaging
|
e90b06c46c7693e984a4c5b067e18460113cd23b
|
[
"Apache-2.0"
] | null | null | null |
src/deep_image_to_image_models.py
|
furgerf/GAN-for-dermatologic-imaging
|
e90b06c46c7693e984a4c5b067e18460113cd23b
|
[
"Apache-2.0"
] | 9
|
2020-09-26T01:22:00.000Z
|
2022-01-22T18:00:52.000Z
|
src/deep_image_to_image_models.py
|
furgerf/GAN-for-dermatologic-imaging
|
e90b06c46c7693e984a4c5b067e18460113cd23b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# pylint: disable=arguments-differ,unused-import
import tensorflow as tf
from tensorflow.keras.layers import (BatchNormalization, Dense, Dropout,
Flatten, SpatialDropout2D)
from tensorflow.nn import leaky_relu, tanh
from deep_model_blocks import (BottleneckResidualBlock, Conv, ConvBlock,
Deconv, DeconvBlock, PreActivationResidualBlock,
ResidualBlock, ReverseBottleneckResidualBlock,
ReverseResidualBlock, UBlock)
from model import Model
class ResidualThreeStridesTwoBlocks(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesTwoBlocks.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ResidualBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 2),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 2),
ReverseResidualBlock(initial_filters*1, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 2)
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesTwoBlocks.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ResidualBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class ResidualThreeStridesThreeBlocks(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesThreeBlocks.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ResidualBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 2),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 2),
ReverseResidualBlock(initial_filters*1, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 2)
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesThreeBlocks.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ResidualBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class ResidualThreeStridesMoreFilters(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesMoreFilters.Generator, self).__init__()
# this is pretty similar to CycleGAN which has conv with 32 7x7 filters, then 2 strided convs
# with 64 and 128 3x3 filters, then 9 residual blocks with 128 filters, then 2 strided deconvs
# with 64 and 32 3x3 filters, then finally a convolution with 3 7x7 filters
initial_filters = 64
self.initial_conv = Conv(initial_filters*1, 7, 1)
self.initial_batchnorm = BatchNormalization()
self.blocks = [
ResidualBlock(initial_filters*1, 3, 2),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ResidualBlock(initial_filters*8, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 2),
ReverseResidualBlock(initial_filters*2, 3, 2),
ReverseResidualBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
x = self.initial_conv(x)
x = self.initial_batchnorm(x, training=training)
x = leaky_relu(x)
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesMoreFilters.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ResidualBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class ResidualThreeStridesDeeper(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesDeeper.Generator, self).__init__()
initial_filters = 64
self.initial_conv = Conv(initial_filters*1, 7, 1)
self.initial_batchnorm = BatchNormalization()
self.blocks = [
ResidualBlock(initial_filters*1, 3, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 2),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 2),
ReverseResidualBlock(initial_filters*1, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
x = self.initial_conv(x)
x = self.initial_batchnorm(x, training=training)
x = leaky_relu(x)
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ResidualThreeStridesDeeper.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ResidualBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class CycleGan(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(CycleGan.Generator, self).__init__()
initial_filters = 32
self.blocks = [
ConvBlock(initial_filters*1, 7, 1),
ConvBlock(initial_filters*2, 3, 2),
ConvBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
DeconvBlock(initial_filters*2, 3, 2),
DeconvBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(CycleGan.Discriminator, self).__init__()
# this should actually be a 70x70 PatchGAN
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
]
self.final_conv = Conv(1, 4, 1)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
# no dropout!
x = self.final_conv(x)
x = self.flatten(x)
return self.fc(x)
class ResidualOneStride(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ResidualOneStride.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 1),
ResidualBlock(initial_filters*1, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*4, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*2, 3, 1),
ReverseResidualBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ResidualOneStride.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class ResidualAlternatingStrides(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ResidualAlternatingStrides.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 1),
ConvBlock(initial_filters*1, 3, 2),
ResidualBlock(initial_filters*2, 3, 2),
ReverseResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*4, 3, 2),
ReverseResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 2),
ReverseResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*2, 3, 2),
ReverseResidualBlock(initial_filters*2, 3, 2),
DeconvBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ResidualAlternatingStrides.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class BottleneckResidualAlternatingStrides(Model):
# NOTE: this is exactly the same as "ResidualAlternatingStrides" even though more capacity would be available
class Generator(tf.keras.Model):
def __init__(self, config):
super(BottleneckResidualAlternatingStrides.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 1),
ConvBlock(initial_filters*1, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*2, 3, 2),
BottleneckResidualBlock(initial_filters*4, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*4, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*2, 3, 2),
DeconvBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(BottleneckResidualAlternatingStrides.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
BottleneckResidualBlock(initial_filters*1, 3, 1),
BottleneckResidualBlock(initial_filters*2, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 1),
BottleneckResidualBlock(initial_filters*4, 3, 2),
BottleneckResidualBlock(initial_filters*4, 3, 1),
BottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class BottleneckResidualAlternatingTwoStrides(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(BottleneckResidualAlternatingTwoStrides.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 1),
ConvBlock(initial_filters*1, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*16, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*16, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*16, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*16, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*16, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*16, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*8, 3, 2),
DeconvBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(BottleneckResidualAlternatingTwoStrides.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
BottleneckResidualBlock(initial_filters*1, 3, 1),
BottleneckResidualBlock(initial_filters*2, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 1),
BottleneckResidualBlock(initial_filters*4, 3, 2),
BottleneckResidualBlock(initial_filters*4, 3, 1),
BottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class LargerBottleneckResidualAlternatingStrides(Model):
# NOTE: this is exactly the same as "ResidualAlternatingStrides" even though more capacity would be available
class Generator(tf.keras.Model):
def __init__(self, config):
super(LargerBottleneckResidualAlternatingStrides.Generator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 1),
ConvBlock(initial_filters*1, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*2, 3, 2),
BottleneckResidualBlock(initial_filters*4, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*4, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*4, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*4, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 2),
ReverseBottleneckResidualBlock(initial_filters*2, 3, 2),
DeconvBlock(initial_filters*1, 3, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(LargerBottleneckResidualAlternatingStrides.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
BottleneckResidualBlock(initial_filters*1, 3, 1),
BottleneckResidualBlock(initial_filters*2, 3, 2),
BottleneckResidualBlock(initial_filters*2, 3, 1),
BottleneckResidualBlock(initial_filters*4, 3, 2),
BottleneckResidualBlock(initial_filters*4, 3, 1),
BottleneckResidualBlock(initial_filters*8, 3, 2),
BottleneckResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class ShallowUBlocks(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ShallowUBlocks.Generator, self).__init__()
self.blocks = [
ConvBlock(64),
UBlock(64, 512, 4),
UBlock(64, 512, 4),
UBlock(64, 512, 4),
UBlock(64, 512, 4),
UBlock(64, 512, 4),
UBlock(64, 512, 4),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(ShallowUBlocks.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class DeepUBlocks(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(DeepUBlocks.Generator, self).__init__()
self.blocks = [
ConvBlock(128),
UBlock(128, 512, 6),
UBlock(128, 512, 6),
UBlock(128, 512, 6),
UBlock(128, 512, 6),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 7, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(DeepUBlocks.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 7, 2),
ResidualBlock(initial_filters*1, 3, 1),
ResidualBlock(initial_filters*2, 3, 2),
ResidualBlock(initial_filters*2, 3, 1),
ResidualBlock(initial_filters*4, 3, 2),
ResidualBlock(initial_filters*4, 3, 1),
ResidualBlock(initial_filters*8, 3, 2),
ResidualBlock(initial_filters*8, 3, 1),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
class SimpleResidual(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SimpleResidual.Generator, self).__init__()
initial_filters = 32*2
self.blocks = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
ResidualBlock(initial_filters*8, 5, 1),
ResidualBlock(initial_filters*8, 5, 1),
ResidualBlock(initial_filters*8, 5, 1),
ResidualBlock(initial_filters*8, 5, 1),
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(SimpleResidual.Discriminator, self).__init__()
initial_filters = 64*2
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
return self.fc(x)
class SkipResidual(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SkipResidual.Generator, self).__init__()
initial_filters = 32*2
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = x
e1 = self.encoder[0](x, training=training)
e2 = self.encoder[1](e1, training=training)
x = self.encoder[2](e2, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[2](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(SkipResidual.Discriminator, self).__init__()
initial_filters = 64*2*2
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
return self.fc(x)
class SkipResidualDropout(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SkipResidualDropout.Generator, self).__init__()
initial_filters = 32*2
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
# self.dropout = Dropout(0.1)
def call(self, x, training=True):
e0 = x
e1 = self.encoder[0](x, training=training)
e2 = self.encoder[1](e1, training=training)
x = self.encoder[2](e2, training=training)
# x = self.dropout(x, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[2](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(SkipResidualDropout.Discriminator, self).__init__()
initial_filters = 64*2*2
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = Dropout(0.5)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
return self.fc(x)
class NoSkipTwoStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(NoSkipTwoStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Not using any skip connections!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
x = self.encoder[0](x, training=training)
x = self.encoder[1](x, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder[0](x, training=training)
x = self.decoder[1](x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(NoSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(NoSkipTwoStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [NoSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(NoSkipTwoStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class ConcatSkipThreeStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ConcatSkipThreeStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Concatenating skip connections!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = x
e1 = self.encoder[0](x, training=training)
e2 = self.encoder[1](e1, training=training)
x = self.encoder[2](e2, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[2](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(ConcatSkipThreeStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(ConcatSkipThreeStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [ConcatSkipThreeStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(ConcatSkipThreeStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class ConcatSkipTwoStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ConcatSkipTwoStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Concatenating skip connections!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = x
e1 = self.encoder[0](x, training=training)
e2 = self.encoder[1](e1, training=training)
x = e2
for block in self.res:
x = block(x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(ConcatSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(ConcatSkipTwoStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [ConcatSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(ConcatSkipTwoStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class ConcatAddSkipTwoStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(ConcatAddSkipTwoStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Concatenating skip connections and adding to output!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
images = x[:, :, :, :3]
e0 = x
e1 = self.encoder[0](x, training=training)
e2 = self.encoder[1](e1, training=training)
x = e2
for block in self.res:
x = block(x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tf.minimum(tf.maximum(tf.add(tanh(self.final_conv(x)), images), -1), 1)
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(ConcatAddSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(ConcatAddSkipTwoStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [ConcatAddSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(ConcatAddSkipTwoStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class AddSkipThreeStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(AddSkipThreeStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Adding skip connections!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8//2, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*4//2, 5, 2),
DeconvBlock(initial_filters*2//2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = self.encoder[0](x, training=training)
e1 = self.encoder[1](e0, training=training)
e2 = self.encoder[2](e1, training=training)
x = e2
for block in self.res:
x = block(x, training=training)
x = tf.add(x, e2)
x = self.decoder[0](x, training=training)
x = tf.add(x, e1)
x = self.decoder[1](x, training=training)
x = tf.add(x, e0)
x = self.decoder[2](x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(AddSkipThreeStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(AddSkipThreeStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [AddSkipThreeStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(AddSkipThreeStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class AddSkipTwoStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(AddSkipTwoStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Adding skip connections!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4//2, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2//2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = self.encoder[0](x, training=training)
e1 = self.encoder[1](e0, training=training)
x = e1
for block in self.res:
x = block(x, training=training)
x = tf.add(x, e1)
x = self.decoder[0](x, training=training)
x = tf.add(x, e0)
x = self.decoder[1](x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(AddSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(AddSkipTwoStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [AddSkipTwoStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(AddSkipTwoStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class AddSkipToOutputThreeStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(AddSkipToOutputThreeStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Adding skip connections, including output!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8//2, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*4//2, 5, 2),
DeconvBlock(initial_filters*2//2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
images = x[:, :, :, :3]
e0 = self.encoder[0](x, training=training)
e1 = self.encoder[1](e0, training=training)
e2 = self.encoder[2](e1, training=training)
x = e2
for block in self.res:
x = block(x, training=training)
x = tf.add(x, e2)
x = self.decoder[0](x, training=training)
x = tf.add(x, e1)
x = self.decoder[1](x, training=training)
x = tf.add(x, e0)
x = self.decoder[2](x, training=training)
return tf.minimum(tf.maximum(tf.add(tanh(self.final_conv(x)), images), -1), 1)
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(AddSkipToOutputThreeStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(AddSkipToOutputThreeStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [AddSkipToOutputThreeStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(AddSkipToOutputThreeStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class AddSkipToOutputTwoStrideMsDisc(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(AddSkipToOutputTwoStrideMsDisc.Generator, self).__init__()
tf.logging.fatal("Adding skip connections, including output!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4//2, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2//2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
images = x[:, :, :, :3]
e0 = self.encoder[0](x, training=training)
e1 = self.encoder[1](e0, training=training)
x = e1
for block in self.res:
x = block(x, training=training)
x = tf.add(x, e1)
x = self.decoder[0](x, training=training)
x = tf.add(x, e0)
x = self.decoder[1](x, training=training)
return tf.minimum(tf.maximum(tf.add(tanh(self.final_conv(x)), images), -1), 1)
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(AddSkipToOutputTwoStrideMsDisc.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(AddSkipToOutputTwoStrideMsDisc.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [AddSkipToOutputTwoStrideMsDisc.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(AddSkipToOutputTwoStrideMsDisc.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class AddSkipToOutputTwoStrideMsDiscAlternative(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(AddSkipToOutputTwoStrideMsDiscAlternative.Generator, self).__init__()
tf.logging.fatal("Adding skip connections, including output!")
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4//2, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2//2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
images = x[:, :, :, :3]
e0 = self.encoder[0](x, training=training)
e1 = self.encoder[1](e0, training=training)
x = e1
for block in self.res:
x = block(x, training=training)
x = tf.add(x, e1)
x = self.decoder[0](x, training=training)
x = tf.add(x, e0)
x = self.decoder[1](x, training=training)
return tf.minimum(tf.maximum(tf.add(tanh(self.final_conv(x)), images), -1), 1)
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(AddSkipToOutputTwoStrideMsDiscAlternative.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(AddSkipToOutputTwoStrideMsDiscAlternative.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [AddSkipToOutputTwoStrideMsDiscAlternative.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(AddSkipToOutputTwoStrideMsDiscAlternative.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class SkipResidualMsDiscPathoSeparate(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SkipResidualMsDiscPathoSeparate.Generator, self).__init__()
initial_filters = 32*1
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*4, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
patho = x
patho_2 = tf.image.resize_nearest_neighbor(patho, (patho.shape[1]//2, patho.shape[2]//2))
patho_4 = tf.image.resize_nearest_neighbor(patho, (patho.shape[1]//4, patho.shape[2]//4))
e0 = x
e1 = self.encoder[0](tf.concat([x, patho], axis=-1), training=training)
x = self.encoder[1](tf.concat([e1, patho_2], axis=-1), training=training)
for block in self.res:
x = block(tf.concat([x, patho_4], axis=-1), training=training)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e1, patho_2], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e0, patho], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(SkipResidualMsDiscPathoSeparate.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*1
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
# ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(SkipResidualMsDiscPathoSeparate.Discriminator, self).__init__()
resolution = 256//2
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [SkipResidualMsDiscPathoSeparate.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(SkipResidualMsDiscPathoSeparate.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class SkipResidualMsDiscGLowRes(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SkipResidualMsDiscGLowRes.Generator, self).__init__()
initial_filters = 32*2
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
ConvBlock(initial_filters*8, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*8, 5, 2),
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = x
e1 = self.encoder[0](x, training=training)
e2 = self.encoder[1](e1, training=training)
e3 = self.encoder[2](e2, training=training)
x = self.encoder[3](e3, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e3], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder[2](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[3](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(SkipResidualMsDiscGLowRes.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*2
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(SkipResidualMsDiscGLowRes.Discriminator, self).__init__()
resolution = 256
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [SkipResidualMsDiscGLowRes.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(SkipResidualMsDiscGLowRes.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class SkipResidualMsDiscGHighRes(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SkipResidualMsDiscGHighRes.Generator, self).__init__()
initial_filters = 32*2
self.encoder = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
ResidualBlock(initial_filters*8, 5, 1, project_shortcut=True),
]
self.decoder = [
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = x
e1 = self.encoder[0](x, training=training)
x = self.encoder[1](e1, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder[0](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder[1](x, training=training)
x = tf.concat([x, e0], axis=-1)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
class MultiscaleDisc(tf.keras.Model):
def __init__(self, config, scaling_factor, dropout, resolution):
super(SkipResidualMsDiscGHighRes.Discriminator.MultiscaleDisc, self).__init__()
assert scaling_factor > 0
if scaling_factor != 1:
size_x = int(resolution * scaling_factor)
size_y = int(resolution * scaling_factor)
tf.logging.info("Multiscale discriminator operating on resolution: {}x{}".format(size_x, size_y))
self.resize = lambda x: tf.image.resize_nearest_neighbor(x, (size_x, size_y))
else:
tf.logging.info("Multiscale discriminator operating on regular resolution")
self.resize = lambda x: x
initial_filters = 32*2
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = dropout
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training):
x = self.resize(x)
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
x = self.fc(x)
return x
def __init__(self, config):
super(SkipResidualMsDiscGHighRes.Discriminator, self).__init__()
resolution = 256
tf.logging.fatal("Using MS disc for {0}x{0} patches!".format(resolution))
self.discriminators = [SkipResidualMsDiscGHighRes.Discriminator.MultiscaleDisc(
config, factor, Dropout(0.3), resolution) for factor in [1, 0.5]]
def call(self, x, training=True):
return tf.reduce_mean(tf.concat([disc(x, training) for disc in self.discriminators], axis=-1), axis=-1)
def summary(self, line_length=None, positions=None, print_fn=None):
super(SkipResidualMsDiscGHighRes.Discriminator, self).summary(line_length, positions, print_fn)
print_fn("\nDetails:")
for discriminator in self.discriminators:
discriminator.summary(line_length, positions, print_fn)
class UNet(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(UNet.Generator, self).__init__()
initial_filters = 32*2
self.encoder_unstrided = [
ConvBlock(initial_filters*1, 5, 1),
ConvBlock(initial_filters*2, 5, 1),
ConvBlock(initial_filters*4, 5, 1),
]
self.encoder_strided = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
]
self.res = [
ResidualBlock(initial_filters*8, 5, 1),
ResidualBlock(initial_filters*8, 5, 1),
ResidualBlock(initial_filters*8, 5, 1),
ResidualBlock(initial_filters*8, 5, 1),
]
self.decoder_strided = [
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.decoder_unstrided = [
DeconvBlock(initial_filters*4, 5, 1),
DeconvBlock(initial_filters*2, 5, 1),
DeconvBlock(initial_filters*1, 5, 1),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
e0 = self.encoder_unstrided[0](x, training=training)
e1 = self.encoder_strided[0](e0, training=training)
e1 = self.encoder_unstrided[1](e1, training=training)
e2 = self.encoder_strided[1](e1, training=training)
e2 = self.encoder_unstrided[2](e2, training=training)
x = self.encoder_strided[2](e2, training=training)
for block in self.res:
x = block(x, training=training)
x = self.decoder_strided[0](x, training=training)
x = tf.concat([x, e2], axis=-1)
x = self.decoder_unstrided[0](x, training=training)
x = self.decoder_strided[1](x, training=training)
x = tf.concat([x, e1], axis=-1)
x = self.decoder_unstrided[1](x, training=training)
x = self.decoder_strided[2](x, training=training)
x = tf.concat([x, e0], axis=-1)
x = self.decoder_unstrided[2](x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(UNet.Discriminator, self).__init__()
initial_filters = 64*2
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
return self.fc(x)
class SimpleConv(Model):
class Generator(tf.keras.Model):
def __init__(self, config):
super(SimpleConv.Generator, self).__init__()
initial_filters = 32*2
self.blocks = [
ConvBlock(initial_filters*1, 5, 2),
ConvBlock(initial_filters*2, 5, 2),
ConvBlock(initial_filters*4, 5, 2),
ConvBlock(initial_filters*8, 5, 1),
ConvBlock(initial_filters*8, 5, 1),
ConvBlock(initial_filters*8, 5, 1),
ConvBlock(initial_filters*8, 5, 1),
DeconvBlock(initial_filters*4, 5, 2),
DeconvBlock(initial_filters*2, 5, 2),
DeconvBlock(initial_filters*1, 5, 2),
]
self.final_conv = Conv(3 if config.has_colored_target else 1, 5, 1)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
return tanh(self.final_conv(x))
class Discriminator(tf.keras.Model):
def __init__(self, config):
super(SimpleConv.Discriminator, self).__init__()
initial_filters = 64
self.blocks = [
ConvBlock(initial_filters*1, 4, 2),
ConvBlock(initial_filters*2, 4, 2),
ConvBlock(initial_filters*4, 4, 2),
ConvBlock(initial_filters*8, 4, 2),
ConvBlock(initial_filters*16, 4, 2),
]
self.dropout = Dropout(0.3)
self.flatten = Flatten()
self.fc = Dense(config.discriminator_classes, use_bias=False)
def call(self, x, training=True):
for block in self.blocks:
x = block(x, training=training)
x = self.dropout(x, training=training)
x = self.flatten(x)
return self.fc(x)
| 35.912197
| 116
| 0.635019
| 10,192
| 83,029
| 5.014717
| 0.020506
| 0.149286
| 0.098259
| 0.038036
| 0.948738
| 0.943221
| 0.924927
| 0.920427
| 0.918822
| 0.899237
| 0
| 0.03723
| 0.24754
| 83,029
| 2,311
| 117
| 35.927737
| 0.780844
| 0.013321
| 0
| 0.849421
| 0
| 0
| 0.026606
| 0
| 0
| 0
| 0
| 0
| 0.006619
| 1
| 0.083839
| false
| 0
| 0.002758
| 0.006619
| 0.179812
| 0.026475
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31cea93ba87683ab75d446da6c69a74bf3982b37
| 175
|
py
|
Python
|
tech_project/lib/python2.7/site-packages/formtools/wizard/storage/exceptions.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 331
|
2015-01-09T01:25:47.000Z
|
2019-10-01T01:18:13.000Z
|
tech_project/lib/python2.7/site-packages/formtools/wizard/storage/exceptions.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 97
|
2015-01-07T11:33:19.000Z
|
2019-09-29T16:41:56.000Z
|
tech_project/lib/python2.7/site-packages/formtools/wizard/storage/exceptions.py
|
priyamshah112/Project-Descripton-Blog
|
8e01016c6be79776c4f5ca75563fa3daa839e39e
|
[
"MIT"
] | 99
|
2015-01-20T13:17:28.000Z
|
2019-09-29T02:26:30.000Z
|
from django.core.exceptions import ImproperlyConfigured
class MissingStorage(ImproperlyConfigured):
pass
class NoFileStorageConfigured(ImproperlyConfigured):
pass
| 17.5
| 55
| 0.828571
| 14
| 175
| 10.357143
| 0.714286
| 0.331034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125714
| 175
| 9
| 56
| 19.444444
| 0.947712
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
9eca03ed403a1faa164427d5ffb0c6c284c59967
| 5,848
|
py
|
Python
|
modoboa/admin/postfix_maps.py
|
vinaebizs/modoboa
|
fb1e7f4c023b7eb6be3aa77174bfa12fc653670e
|
[
"0BSD"
] | null | null | null |
modoboa/admin/postfix_maps.py
|
vinaebizs/modoboa
|
fb1e7f4c023b7eb6be3aa77174bfa12fc653670e
|
[
"0BSD"
] | null | null | null |
modoboa/admin/postfix_maps.py
|
vinaebizs/modoboa
|
fb1e7f4c023b7eb6be3aa77174bfa12fc653670e
|
[
"0BSD"
] | null | null | null |
"""Map file definitions for postfix."""
class DomainsMap(object):
"""Map to list all domains."""
filename = 'sql-domains.cf'
mysql = (
"SELECT name FROM admin_domain "
"WHERE name='%s' AND type='domain' AND enabled=1"
)
postgres = (
"SELECT name FROM admin_domain "
"WHERE name='%s' AND type='domain' AND enabled"
)
sqlite = (
"SELECT name FROM admin_domain "
"WHERE name='%s' AND type='domain' AND enabled=1"
)
class DomainsAliasesMap(object):
"""Map to list all domain aliases."""
filename = 'sql-domain-aliases.cf'
mysql = (
"SELECT dom.name FROM admin_domain dom "
"INNER JOIN admin_domainalias domal ON dom.id=domal.target_id "
"WHERE domal.name='%s' AND domal.enabled=1 AND dom.enabled=1"
)
postgres = (
"SELECT dom.name FROM admin_domain dom "
"INNER JOIN admin_domainalias domal ON dom.id=domal.target_id "
"WHERE domal.name='%s' AND domal.enabled AND dom.enabled"
)
sqlite = (
"SELECT dom.name FROM admin_domain dom "
"INNER JOIN admin_domainalias domal ON dom.id=domal.target_id "
"WHERE domal.name='%s' AND domal.enabled=1 AND dom.enabled=1"
)
class AliasesMap(object):
"""A map to list all mailbox aliases."""
filename = 'sql-aliases.cf'
mysql = (
"SELECT alr.address FROM modoboa_admin_aliasrecipient AS alr "
"INNER JOIN admin_alias AS al ON alr.alias_id=al.id "
"WHERE al.enabled=1 AND al.address='%s' AND "
"(al.expire_at IS NULL OR al.expire_at>now())"
)
postgres = (
"SELECT alr.address FROM modoboa_admin_aliasrecipient AS alr "
"INNER JOIN admin_alias AS al ON alr.alias_id=al.id "
"WHERE al.enabled AND al.address='%s' AND "
"(al.expire_at IS NULL OR al.expire_at>now())"
)
sqlite = (
"SELECT alr.address FROM modoboa_admin_aliasrecipient AS alr "
"INNER JOIN admin_alias AS al ON alr.alias_id=al.id "
"WHERE al.enabled=1 AND al.address='%s' AND "
"(al.expire_at IS NULL OR al.expire_at>now())"
)
class MaintainMap(object):
"""Map files to list non available mailboxes."""
filename = 'sql-maintain.cf'
mysql = (
"SELECT '450 Requested mail action not taken: mailbox unavailable' "
"FROM admin_mailbox mb INNER JOIN admin_domain dom "
"ON mb.domain_id=dom.id INNER JOIN admin_mailboxoperation mbop "
"ON mbop.mailbox_id=mb.id WHERE dom.name='%d' AND mb.address='%u' "
"LIMIT 1"
)
postgres = (
"SELECT '450 Requested mail action not taken: mailbox unavailable' "
"FROM admin_mailbox mb INNER JOIN admin_domain dom "
"ON mb.domain_id=dom.id INNER JOIN admin_mailboxoperation mbop "
"ON mbop.mailbox_id=mb.id WHERE dom.name='%d' AND mb.address='%u' "
"LIMIT 1"
)
sqlite = (
"SELECT '450 Requested mail action not taken: mailbox unavailable' "
"FROM admin_mailbox mb INNER JOIN admin_domain dom "
"ON mb.domain_id=dom.id INNER JOIN admin_mailboxoperation mbop "
"ON mbop.mailbox_id=mb.id WHERE dom.name='%d' AND mb.address='%u' "
"LIMIT 1"
)
class SenderLoginMailboxMap(object):
"""Map file to list authorized sender addresses (from mailboxes)."""
filename = "sql-sender-login-mailboxes.cf"
mysql = (
"SELECT email FROM core_user WHERE email='%s' AND is_active=1 "
)
postgres = (
"SELECT email FROM core_user WHERE email='%s' AND is_active"
)
sqlite = (
"SELECT email FROM core_user WHERE email='%s' AND is_active=1"
)
class SenderLoginMailboxExtraMap(object):
"""Map file to list per-mailbox extra addresses."""
filename = "sql-sender-login-mailboxes-extra.cf"
# FIXME: is it necessary to filter against user status?
mysql = (
"SELECT concat(mb.address, '@', dom.name) FROM admin_mailbox mb "
"INNER JOIN admin_senderaddress sad ON sad.mailbox_id=mb.id "
"INNER JOIN admin_domain dom ON dom.id=mb.domain_id "
"WHERE sad.address='%s'"
)
postgres = (
"SELECT mb.address || '@' || dom.name FROM admin_mailbox mb "
"INNER JOIN admin_senderaddress sad ON sad.mailbox_id=mb.id "
"INNER JOIN admin_domain dom ON dom.id=mb.domain_id "
"WHERE sad.address='%s'"
)
sqlite = (
"SELECT mb.address || '@' || dom.name FROM admin_mailbox mb "
"INNER JOIN admin_senderaddress sad ON sad.mailbox_id=mb.id "
"INNER JOIN admin_domain dom ON dom.id=mb.domain_id "
"WHERE sad.address='%s'"
)
class SenderLoginAliasMap(object):
"""Map file to list authorized sender addresses (from aliases)."""
filename = "sql-sender-login-aliases.cf"
mysql = (
"SELECT concat(mb.address, '@', dom.name) FROM admin_mailbox mb "
"INNER JOIN modoboa_admin_aliasrecipient alr ON alr.r_mailbox_id=mb.id"
" INNER JOIN admin_domain dom ON dom.id=mb.domain_id"
" INNER JOIN admin_alias al ON alr.alias_id=al.id "
"WHERE al.enabled=1 AND al.address='%s'"
)
postgres = (
"SELECT mb.address || '@' || dom.name FROM admin_mailbox mb "
"INNER JOIN modoboa_admin_aliasrecipient alr ON alr.r_mailbox_id=mb.id"
" INNER JOIN admin_domain dom ON dom.id=mb.domain_id"
" INNER JOIN admin_alias al ON alr.alias_id=al.id "
"WHERE al.enabled AND al.address='%s'"
)
sqlite = (
"SELECT mb.address || '@' || dom.name FROM admin_mailbox mb "
"INNER JOIN modoboa_admin_aliasrecipient alr ON alr.r_mailbox_id=mb.id"
" INNER JOIN admin_domain dom ON dom.id=mb.domain_id"
" INNER JOIN admin_alias al ON alr.alias_id=al.id "
"WHERE al.enabled=1 AND al.address='%s'"
)
| 35.017964
| 79
| 0.633379
| 823
| 5,848
| 4.387606
| 0.117861
| 0.067294
| 0.093049
| 0.053171
| 0.836887
| 0.804486
| 0.804486
| 0.804486
| 0.804486
| 0.777901
| 0
| 0.005508
| 0.254959
| 5,848
| 166
| 80
| 35.228916
| 0.823273
| 0.067202
| 0
| 0.6875
| 0
| 0.023438
| 0.693018
| 0.099926
| 0
| 0
| 0
| 0.006024
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.273438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ef6b457989b8796f95114adbc70c4ad849b972d
| 18,369
|
py
|
Python
|
staging/staging/views.py
|
lexis-project/ddi-service-apis
|
9e96c4159154d70613b1977a8ea28374c038b463
|
[
"Apache-2.0"
] | null | null | null |
staging/staging/views.py
|
lexis-project/ddi-service-apis
|
9e96c4159154d70613b1977a8ea28374c038b463
|
[
"Apache-2.0"
] | null | null | null |
staging/staging/views.py
|
lexis-project/ddi-service-apis
|
9e96c4159154d70613b1977a8ea28374c038b463
|
[
"Apache-2.0"
] | null | null | null |
from . import errors
from . import trigger_task
from . import validate_input
from . import replication_api
from . import staging_api
from . import utils
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
import requests
import json
from celery.result import AsyncResult
from irods.connection import ExceptionOpenIDAuthUrl
import yaml
with open("/etc/staging_api/system.yml") as file:
systems = yaml.load(file, Loader=yaml.FullLoader)
def requestValidateToken(token):
req = requests.get(
systems["keycloak"]["microservice"] +
'/validate_token',
params={
'provider': 'keycloak_openid',
'access_token': token})
if req.status_code == 200:
j = req.json()
if not j['active']:
return 401
return 200
return req.status_code
def index(request):
return HttpResponse("Welcome to the staging api")
@csrf_exempt
def stage(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
if dditoken is not None:
data["token"] = dditoken
if secrets is not None:
data["secrets"] = secrets
validate_input.validate_staging_input_body(data)
staging = trigger_task.trigger_staging_task(data)
if staging.status == "FAILURE":
return errors.E403()
task_id = staging.id
response_data = {'request_id': task_id}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="201")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except Exception as e:
return errors.MalformedRequest(str(e))
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
@csrf_exempt
def check_status(request, req_id):
if request.method != 'GET':
return errors.E405()
target_path = None
try:
res = AsyncResult(str(req_id))
if str(res.state) == "PENDING":
result = "Task still in the queue, or task does not exist"
elif str(res.state) == "FAILURE":
result = "Task Failed, reason: " + str(res.info)
elif str(res.ready()) == "True" and str(res.state) == "SUCCESS":
result = "Transfer completed"
data = res.get()
target_path = data[1]
else:
result = "In progress"
response_data = {'status': result}
if target_path is not None:
response_data['target_path'] = target_path
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
except BaseException:
return errors.MalformedRequest()
@csrf_exempt
def delete_copy(request):
try:
if request.method == 'DELETE':
data = json.loads(request.body.decode('utf-8'))
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
data["token"] = dditoken
validate_input.validate_deletion_input_body(data)
delete = trigger_task.trigger_deletion_task(data)
task_id = delete.id
response_data = {'request_id': task_id}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="201")
except KeyError:
return errors.MalformedRequest("Required parameters not provided")
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
def check_deletion_status(request, req_id):
res = AsyncResult(str(req_id))
if str(res.state) == "PENDING":
result = "Task still in the queue, or task does not exist"
elif str(res.state) == "FAILURE":
result = "Task Failed, reason: " + str(res.info)
elif str(res.ready()) == "True" and str(res.state) == "SUCCESS":
result = "Data deleted"
else:
result = "In progress"
response_data = {'status': result}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
def get_targets(request):
target_systems = systems["systems"]
targets = []
for key in target_systems.keys():
targets.append(key)
return HttpResponse(
json.dumps(targets),
content_type="application/json",
status="200")
@csrf_exempt
def replicate(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
data["token"] = dditoken
validate_input.validate_replication_input_body(data)
replication = trigger_task.trigger_replication(data)
if replication.status == "FAILURE":
return errors.E403()
task_id = replication.id
response_data = {'request_id': task_id}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="201")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except KeyError:
return errors.MalformedRequest("Required parameter not found")
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
def check_replication_status(request, req_id):
if request.method != 'GET':
return errors.E405()
data = None
replication = None
try:
res = AsyncResult(str(req_id))
if str(res.state) == "PENDING":
result = "Task still in the queue, or task does not exist"
elif str(res.state) == "FAILURE":
result = "Task Failed, reason: " + str(res.info)
elif str(res.ready()) == "True" and str(res.state) == "SUCCESS":
result = "Replication completed"
data = res.get()
replication = data[1]
else:
result = "In progress"
response_data = {'status': result}
if data is not None:
response_data['PID'] = replication[1]
response_data['target_path'] = replication[0]
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
except BaseException:
return errors.MalformedRequest()
@csrf_exempt
def assign_pid(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
data["token"] = dditoken
validate_input.validate_pid_assignment_input_body(data)
pid_assignment = trigger_task.trigger_pid_assignment(data)
if pid_assignment.status == "FAILURE":
return errors.E403()
task_id = pid_assignment.id
response_data = {'request_id': task_id}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="201")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except KeyError:
return errors.MalformedRequest("Required parameter not found")
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
def check_pid_assignment_status(request, req_id):
if request.method != 'GET':
return errors.E405()
data = None
pid = None
try:
res = AsyncResult(str(req_id))
if str(res.state) == "PENDING":
result = "Task still in the queue, or task does not exist"
elif str(res.state) == "FAILURE":
result = "Task Failed, reason: " + str(res.info)
elif str(res.ready()) == "True" and str(res.state) == "SUCCESS":
result = "PID assigned successfully"
data = res.get()
pid = data[1]
else:
result = "In progress"
response_data = {'status': result}
if data is not None:
response_data['PID'] = pid
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
except BaseException:
return errors.MalformedRequest()
@csrf_exempt
def check_replication(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
response_data = {}
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
data["token"] = dditoken
validate_input.validate_replication_status_input_body(data)
status = replication_api.check_replication(data)
utils.revokeToken(token)
response_data['status'] = status
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except KeyError:
return errors.MalformedRequest("Required parameter not found")
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
@csrf_exempt
def check_flags(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
response_data = {}
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
data["token"] = dditoken
validate_input.validate_replication_status_input_body(data)
status = staging_api.get_enc_comp_flags(data)
utils.revokeToken(token)
response_data['encryption'] = status[0]
response_data['compression'] = status[1]
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except KeyError:
return errors.MalformedRequest("Required parameter not found")
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
@csrf_exempt
def get_size(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
data["token"] = dditoken
validate_input.validate_data_size_input_body(data)
size = trigger_task.trigger_get_data_size(data)
if size.status == "FAILURE":
return errors.E403()
task_id = size.id
response_data = {'request_id': task_id}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="201")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except KeyError as ke:
return errors.MalformedRequest(
"Required parameter not found: {}".format(
ke.args[0]))
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
def check_size_status(request, req_id):
size = None
res = AsyncResult(str(req_id))
if str(res.state) == "PENDING":
result = "Task still in the queue, or task does not exist"
elif str(res.state) == "FAILURE":
result = "Task Failed, reason: " + str(res.info)
elif str(res.ready()) == "True" and str(res.state) == "SUCCESS":
result = "Done"
data = res.get()
size = data[1]
else:
result = "In progress"
response_data = {'result': result}
if size is not None:
response_data['size'] = str(size[0])
response_data['totalfiles'] = str(size[1])
response_data['smallfiles'] = str(size[2])
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
@csrf_exempt
def duplicate(request):
if request.method == 'POST':
try:
data = json.loads(request.body.decode('utf-8'))
token = request.headers.get('Authorization').split(" ")[1]
(secrets, user, dditoken, refreshtoken,
error) = utils.getDDIAttributes(token)
if error is not None:
check = 401
return HttpResponse(
'{"status": "%s", "errorString": "%s"}' %
(check, error), content_type='application/json', status=check)
if dditoken is not None:
data["token"] = dditoken
if secrets is not None:
data["secrets"] = secrets
staging = trigger_task.trigger_duplication(data)
if staging.status == "FAILURE":
return errors.E403()
task_id = staging.id
response_data = {'request_id': task_id}
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="201")
except ExceptionOpenIDAuthUrl:
return errors.AuthURL()
except json.decoder.JSONDecodeError:
return errors.MalformedRequest("Invalid JSON")
except KeyError:
return errors.MalformedRequest("Required parameter not found")
except AttributeError:
return errors.NoAuth()
except IndexError:
return errors.NoAuth()
return errors.E405()
@csrf_exempt
def check_duplication_status(request, req_id):
if request.method != 'GET':
return errors.E405()
target_path = None
try:
res = AsyncResult(str(req_id))
if str(res.state) == "PENDING":
result = "Task still in the queue, or task does not exist"
elif str(res.state) == "FAILURE":
result = "Task Failed, reason: " + str(res.info)
elif str(res.ready()) == "True" and str(res.state) == "SUCCESS":
result = "Duplication completed"
data = res.get()
target_path = data[1]
else:
result = "In progress"
response_data = {'status': result}
if target_path is not None:
response_data['target_path'] = target_path
return HttpResponse(
json.dumps(response_data),
content_type="application/json",
status="200")
except BaseException:
return errors.MalformedRequest()
| 36.885542
| 82
| 0.575045
| 1,857
| 18,369
| 5.579429
| 0.092623
| 0.068333
| 0.048837
| 0.057716
| 0.817199
| 0.815172
| 0.803783
| 0.788534
| 0.780716
| 0.774829
| 0
| 0.01237
| 0.317873
| 18,369
| 497
| 83
| 36.959759
| 0.814525
| 0
| 0
| 0.781385
| 0
| 0
| 0.127552
| 0.00147
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036797
| false
| 0
| 0.028139
| 0.002165
| 0.251082
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9efad63b81fab085d87ebaebbb60590d367401f4
| 189
|
py
|
Python
|
iris_sdk/models/data/feature_lidb.py
|
NumberAI/python-bandwidth-iris
|
0e05f79d68b244812afb97e00fd65b3f46d00aa3
|
[
"MIT"
] | 2
|
2020-04-13T13:47:59.000Z
|
2022-02-23T20:32:41.000Z
|
iris_sdk/models/data/feature_lidb.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2020-09-18T20:59:24.000Z
|
2021-08-25T16:51:42.000Z
|
iris_sdk/models/data/feature_lidb.py
|
bandwidthcom/python-bandwidth-iris
|
dbcb30569631395041b92917252d913166f7d3c9
|
[
"MIT"
] | 5
|
2018-12-12T14:39:50.000Z
|
2020-11-17T21:42:29.000Z
|
#!/usr/bin/env python
from iris_sdk.models.base_resource import BaseData
from iris_sdk.models.maps.feature_lidb import FeatureLidbMap
class FeatureLidb(FeatureLidbMap, BaseData):
pass
| 27
| 60
| 0.825397
| 26
| 189
| 5.846154
| 0.730769
| 0.105263
| 0.144737
| 0.223684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100529
| 189
| 7
| 61
| 27
| 0.894118
| 0.10582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
73364968df69d9a3bb9a8adf530117abe3959f8c
| 114
|
py
|
Python
|
simuvex/simuvex/engines/vex/statements/store.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 86
|
2015-08-06T23:25:07.000Z
|
2022-02-17T14:58:22.000Z
|
simuvex/simuvex/engines/vex/statements/store.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 132
|
2015-09-10T19:06:59.000Z
|
2018-10-04T20:36:45.000Z
|
simuvex/simuvex/engines/vex/statements/store.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 80
|
2015-08-07T10:30:20.000Z
|
2020-03-21T14:45:28.000Z
|
print '... Importing simuvex/engines/vex/statements/store.py ...'
from angr.engines.vex.statements.store import *
| 38
| 65
| 0.763158
| 15
| 114
| 5.8
| 0.733333
| 0.229885
| 0.45977
| 0.574713
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 114
| 2
| 66
| 57
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.342105
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
733aa879e3bf6ea9c896e5d126cf949ba797d82f
| 5,196
|
py
|
Python
|
Lib/site-packages/requests_unixsocket/tests/test_requests_unixsocket.py
|
mchrysl/spinal-abnormality-predictor
|
90fc769d79a39d690a76aa6e7ce02df9cca8cf86
|
[
"0BSD"
] | 172
|
2015-01-16T01:39:33.000Z
|
2022-03-17T06:26:45.000Z
|
requests_unixsocket/tests/test_requests_unixsocket.py
|
openstack/deb-python-requests-unixsocket
|
0dd4da72d78eea128e4a1c38ae4bd9bba8f535df
|
[
"Apache-2.0"
] | 38
|
2015-01-16T01:39:16.000Z
|
2021-12-28T04:05:48.000Z
|
requests_unixsocket/tests/test_requests_unixsocket.py
|
openstack/deb-python-requests-unixsocket
|
0dd4da72d78eea128e4a1c38ae4bd9bba8f535df
|
[
"Apache-2.0"
] | 25
|
2015-01-16T15:22:42.000Z
|
2021-12-22T19:49:52.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for requests_unixsocket"""
import logging
import pytest
import requests
import requests_unixsocket
from requests_unixsocket.testutils import UnixSocketServerThread
logger = logging.getLogger(__name__)
def test_unix_domain_adapter_ok():
with UnixSocketServerThread() as usock_thread:
session = requests_unixsocket.Session('http+unix://')
urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
url = 'http+unix://%s/path/to/page' % urlencoded_usock
for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
'options']:
logger.debug('Calling session.%s(%r) ...', method, url)
r = getattr(session, method)(url)
logger.debug(
'Received response: %r with text: %r and headers: %r',
r, r.text, r.headers)
assert r.status_code == 200
assert r.headers['server'] == 'waitress'
assert r.headers['X-Transport'] == 'unix domain socket'
assert r.headers['X-Requested-Path'] == '/path/to/page'
assert r.headers['X-Socket-Path'] == usock_thread.usock
assert isinstance(r.connection, requests_unixsocket.UnixAdapter)
assert r.url.lower() == url.lower()
if method == 'head':
assert r.text == ''
else:
assert r.text == 'Hello world!'
def test_unix_domain_adapter_url_with_query_params():
with UnixSocketServerThread() as usock_thread:
session = requests_unixsocket.Session('http+unix://')
urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
url = ('http+unix://%s'
'/containers/nginx/logs?timestamp=true' % urlencoded_usock)
for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
'options']:
logger.debug('Calling session.%s(%r) ...', method, url)
r = getattr(session, method)(url)
logger.debug(
'Received response: %r with text: %r and headers: %r',
r, r.text, r.headers)
assert r.status_code == 200
assert r.headers['server'] == 'waitress'
assert r.headers['X-Transport'] == 'unix domain socket'
assert r.headers['X-Requested-Path'] == '/containers/nginx/logs'
assert r.headers['X-Requested-Query-String'] == 'timestamp=true'
assert r.headers['X-Socket-Path'] == usock_thread.usock
assert isinstance(r.connection, requests_unixsocket.UnixAdapter)
assert r.url.lower() == url.lower()
if method == 'head':
assert r.text == ''
else:
assert r.text == 'Hello world!'
def test_unix_domain_adapter_connection_error():
session = requests_unixsocket.Session('http+unix://')
for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
with pytest.raises(requests.ConnectionError):
getattr(session, method)(
'http+unix://socket_does_not_exist/path/to/page')
def test_unix_domain_adapter_connection_proxies_error():
session = requests_unixsocket.Session('http+unix://')
for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
with pytest.raises(ValueError) as excinfo:
getattr(session, method)(
'http+unix://socket_does_not_exist/path/to/page',
proxies={"http+unix": "http://10.10.1.10:1080"})
assert ('UnixAdapter does not support specifying proxies'
in str(excinfo.value))
def test_unix_domain_adapter_monkeypatch():
with UnixSocketServerThread() as usock_thread:
with requests_unixsocket.monkeypatch('http+unix://'):
urlencoded_usock = requests.compat.quote_plus(usock_thread.usock)
url = 'http+unix://%s/path/to/page' % urlencoded_usock
for method in ['get', 'post', 'head', 'patch', 'put', 'delete',
'options']:
logger.debug('Calling session.%s(%r) ...', method, url)
r = getattr(requests, method)(url)
logger.debug(
'Received response: %r with text: %r and headers: %r',
r, r.text, r.headers)
assert r.status_code == 200
assert r.headers['server'] == 'waitress'
assert r.headers['X-Transport'] == 'unix domain socket'
assert r.headers['X-Requested-Path'] == '/path/to/page'
assert r.headers['X-Socket-Path'] == usock_thread.usock
assert isinstance(r.connection,
requests_unixsocket.UnixAdapter)
assert r.url.lower() == url.lower()
if method == 'head':
assert r.text == ''
else:
assert r.text == 'Hello world!'
for method in ['get', 'post', 'head', 'patch', 'put', 'delete', 'options']:
with pytest.raises(requests.exceptions.InvalidSchema):
getattr(requests, method)(url)
| 42.590164
| 79
| 0.574288
| 571
| 5,196
| 5.10683
| 0.182137
| 0.060014
| 0.062414
| 0.05144
| 0.818587
| 0.780521
| 0.765432
| 0.765432
| 0.765432
| 0.765432
| 0
| 0.005663
| 0.286374
| 5,196
| 121
| 80
| 42.942149
| 0.780744
| 0.013857
| 0
| 0.708333
| 0
| 0
| 0.216533
| 0.044753
| 0
| 0
| 0
| 0
| 0.302083
| 1
| 0.052083
| false
| 0
| 0.052083
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7360603249f6640aa04229cf9c63ca83dca1c561
| 70
|
py
|
Python
|
testa.py
|
igorifaresi/md-abstractions
|
adb6ad2c16785c8ed2a33d24b98484c8255b6ec7
|
[
"MIT"
] | null | null | null |
testa.py
|
igorifaresi/md-abstractions
|
adb6ad2c16785c8ed2a33d24b98484c8255b6ec7
|
[
"MIT"
] | null | null | null |
testa.py
|
igorifaresi/md-abstractions
|
adb6ad2c16785c8ed2a33d24b98484c8255b6ec7
|
[
"MIT"
] | null | null | null |
def par(i):
return i % 2 == 0
def impar(i):
return i % 2 == 1
| 14
| 21
| 0.485714
| 14
| 70
| 2.428571
| 0.571429
| 0.411765
| 0.470588
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.342857
| 70
| 5
| 22
| 14
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
b40e302b8903dd29f8a10575231826afab1fc3ad
| 298
|
py
|
Python
|
src/momento/_authorization_interceptor.py
|
kvcache/client-sdk-python
|
433f69425ff8a6de2e56bf2689f4560a088d00dc
|
[
"Apache-2.0"
] | null | null | null |
src/momento/_authorization_interceptor.py
|
kvcache/client-sdk-python
|
433f69425ff8a6de2e56bf2689f4560a088d00dc
|
[
"Apache-2.0"
] | 10
|
2022-02-10T21:58:53.000Z
|
2022-03-29T00:58:03.000Z
|
src/momento/_authorization_interceptor.py
|
kvcache/client-sdk-python
|
433f69425ff8a6de2e56bf2689f4560a088d00dc
|
[
"Apache-2.0"
] | 1
|
2022-02-10T21:02:40.000Z
|
2022-02-10T21:02:40.000Z
|
from ._generic_client_interceptor import _GenericClientInterceptor
from . import _header_client_interceptor
def get_authorization_interceptor(auth_token: str) -> _GenericClientInterceptor:
return _header_client_interceptor.header_adder_interceptor(
"authorization", auth_token
)
| 29.8
| 80
| 0.828859
| 29
| 298
| 7.931034
| 0.517241
| 0.221739
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124161
| 298
| 9
| 81
| 33.111111
| 0.881226
| 0
| 0
| 0
| 0
| 0
| 0.043624
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
b43c8152d14a8bffc1a2f334eee6aa7d26fdb23d
| 21,488
|
py
|
Python
|
utils/spider_headers.py
|
DukeNan/Hotspot
|
68e8313ff3abcbd23e74f21e2222af14e8de64b4
|
[
"MIT"
] | 3
|
2021-01-26T20:21:18.000Z
|
2021-01-27T02:10:20.000Z
|
utils/spider_headers.py
|
DukeNan/Hotspot
|
68e8313ff3abcbd23e74f21e2222af14e8de64b4
|
[
"MIT"
] | 1
|
2022-03-02T15:00:23.000Z
|
2022-03-02T15:00:23.000Z
|
utils/spider_headers.py
|
DukeNan/Hotspot
|
68e8313ff3abcbd23e74f21e2222af14e8de64b4
|
[
"MIT"
] | null | null | null |
USER_AGENT_LIST = [
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/0.2.149.29 Safari/525.13',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/531.4 (KHTML, like Gecko) Chrome/3.0.194.0 Safari/531.4',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.50 Safari/525.19',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.211.7 Safari/532.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727; Lunascape 5.0 alpha2)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/4.0.222.7 Safari/532.2',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.10 Safari/532.0',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; Maxthon;',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/530.1 (KHTML, like Gecko) Chrome/2.0.169.0 Safari/530.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; ja-JP; rv:1.7) Gecko/20040614 Firefox/0.9',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.810.0 Safari/535.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.211.0 Safari/532.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.3.4000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.6 (KHTML, like Gecko) Chrome/7.0.500.0 Safari/534.6',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; TencentTraveler)',
'Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.4 (KHTML, like Gecko) Chrome/6.0.481.0 Safari/534.4',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.4 (KHTML, like Gecko) Chrome/5.0.370.0 Safari/533.4',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.5) Gecko/20041107 Firefox/1.0',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/0.4.154.31 Safari/525.19',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB; rv:1.9.1.17) Gecko/20110123 (like Firefox/3.x) SeaMonkey/2.0.12',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-GB) AppleWebKit/534.1 (KHTML, like Gecko) Chrome/6.0.428.0 Safari/534.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.540.0 Safari/534.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-DE) Chrome/4.0.223.3 Safari/532.2',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/12.0.702.0 Safari/534.24',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.42 Safari/525.19',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.3 (KHTML, like Gecko) Chrome/4.0.227.0 Safari/532.3',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.8 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.8',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.460.0 Safari/534.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.463.0 Safari/534.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/528.9 (KHTML, like Gecko) Chrome/2.0.157.0 Safari/528.9',
'Mozilla/5.0 (Windows NT 5.2) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.794.0 Safari/535.1',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.694.0 Safari/534.24',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5',
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:15.0) Gecko/20120427 Firefox/15.0a1',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.5) Gecko/20041107 Firefox/1.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; Maxthon; .NET CLR 1.1.4322)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/4.0.223.4 Safari/532.2',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.65 Safari/535.11',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.41 Safari/535.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.682.0 Safari/534.21',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/530.0 (KHTML, like Gecko) Chrome/2.0.182.0 Safari/531.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.9 (KHTML, like Gecko) Chrome/7.0.531.0 Safari/534.9',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0)',
'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.811.0 Safari/535.1',
'ozilla/5.0 (Windows; U; Windows NT 5.0; de-DE; rv:1.7.5) Gecko/20041108 Firefox/1.0',
'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/533.4 (KHTML, like Gecko) Chrome/5.0.375.127 Safari/533.4',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E) QQBrowser/6.9.11079.201',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/531.21.8 (KHTML, like Gecko) Version/4.0.4 Safari/531.21.10',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/15.0.874.120 Safari/535.2',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; zh-cn) Opera 8.50',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/7.0.0 Safari/700.13',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.211.4 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.53 Safari/525.19',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.6 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.1 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.5) Gecko/20041107 Firefox/0.9.2 StumbleUpon/1.994',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-GB; rv:1.9.0.11) Gecko/2009060215 Firefox/3.0.11 (.NET CLR 3.5.30729)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.7.5) Gecko/20041110 Firefox/1.0',
'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; en) Opera 8.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/13.0.782.41 Safari/535.1 QQBrowser/6.9.11079.201',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0b4pre) Gecko/20100815 Minefield/4.0b4pre',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
'Mozilla/5.0 (Windows; U; Windows NT 6.0 x64; en-US; rv:1.9pre) Gecko/2008072421 Minefield/3.0.2pre',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/530.5 (KHTML, like Gecko) Chrome/2.0.172.6 Safari/530.5',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.0.3705)',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.21 Safari/532.0',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.792.0 Safari/535.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/530.1 (KHTML, like Gecko) Chrome/2.0.168.0 Safari/530.1',
'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; rv:1.7.3) Gecko/20040913 Firefox/0.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/530.8 (KHTML, like Gecko) Chrome/2.0.177.1 Safari/530.8',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/533.17.8 (KHTML, like Gecko) Version/5.0.1 Safari/533.17.8',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/530.5 (KHTML, like Gecko) Chrome/2.0.172.40 Safari/530.5',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.24 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/528.10 (KHTML, like Gecko) Chrome/2.0.157.2 Safari/528.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/4.0.223.2 Safari/532.2',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.75 Safari/535.7',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; T312461)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.461.0 Safari/534.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; rv:1.7.3) Gecko/20041001 Firefox/0.10.1',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.1.4322)',
'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; de-DE) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.202.2 Safari/532.0',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0) Gecko/16.0 Firefox/16.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/531.3 (KHTML, like Gecko) Chrome/3.0.193.2 Safari/531.3',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; .NET CLR 1',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/15.0.864.0 Safari/535.2',
'Mozilla/5.0 (Windows NT 5.2) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.813.0 Safari/535.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.6 Safari/532.0',
'Mozilla/5.0 (Windows NT 5.1; rv:2.1.1) Gecko/20110415 Firefox/4.0.2pre Fennec/4.0.1',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.801.0 Safari/535.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.212.0 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/0.2.149.27 Safari/525.13',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.5 (KHTML, like Gecko) Chrome/4.0.249.0 Safari/532.5',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.36 Safari/535.7',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.697.0 Safari/534.24',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.652.0 Safari/534.17',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/8.0.552.224 Safari/534.10 ChromePlus/1.5.2.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.0 Safari/532.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.211.7 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/5.0.342.2 Safari/533.2',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.4 Safari/532.1',
'Mozilla/5.0 (Windows NT 6.0; rv:2.1.1) Gecko/20110415 Firefox/4.0.2pre Fennec/4.0.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/0.2.153.0 Safari/525.19',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; sv-SE; rv:1.7.5) Gecko/20041108 Firefox/1.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.462.0 Safari/534.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-DE; rv:1.7.5) Gecko/20041122 Firefox/1.0',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; SV1; uZardWeb/1.0; Server_JP)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; HCI0449; .NET CLR 1.0.3705)',
'Mozilla/4.0 (compatible; MSIE 5.0; Windows 98; DigExt); Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1);',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/530.5 (KHTML, like Gecko) Chrome/2.0.172.23 Safari/530.5',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.208.0 Safari/532.0',
'Mozilla/5.0 (Windows NT 6.0; rv:14.0) Gecko/20100101 Firefox/14.0.1',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/530.7 (KHTML, like Gecko) Chrome/2.0.176.0 Safari/530.7',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.21 Safari/532.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; InfoPath.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.55 Safari/525.19',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:7.0a1) Gecko/20110623 Firefox/7.0a1 Fennec/7.0a1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.30 (KHTML, like Gecko) Chrome/12.0.724.100 Safari/534.30',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) Chrome/6.0.472.33 Safari/534.3 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; WOW64; SV1; uZardWeb/1.0; Server_HK)',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:7.0.1) Gecko/20100101 Firefox/7.0.1',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3',
'Mozilla/5.0 (Windows NT 6.0) yi; AppleWebKit/345667.12221 (KHTML, like Gecko) Chrome/23.0.1271.26 Safari/453667.1221',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/531.2 (KHTML, like Gecko) Chrome/3.0.191.3 Safari/531.2',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/530.5 (KHTML, like Gecko) Chrome/2.0.172.39 Safari/530.5',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.1 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.38 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/3.0.195.27 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8b) Gecko/20050118 Firefox/1.0+',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; ja-JP; rv:1.7) Gecko/20040707 Firefox/0.9.2',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.202.0 Safari/532.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/530.4 (KHTML, like Gecko) Chrome/2.0.171.0 Safari/530.4',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30; .NET CLR 3.0.04506.648)',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; nl-NL; rv:1.7.5) Gecko/20041202 Firefox/1.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.204.0 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/532.2 (KHTML, like Gecko) Chrome/4.0.222.6 Safari/532.2',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/528.8 (KHTML, like Gecko) Chrome/1.0.156.0 Safari/528.8',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/6.0)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; SV1; .NET CLR 1.0.3705; .NET CLR 2.0.50727; .NET CLR 1.1.4322)',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.7 (KHTML, like Gecko) Chrome/7.0.517.43 Safari/534.7',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13',
'Mozilla/5.0 (ipad Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.6 (KHTML, like Gecko) Chrome/7.0.498.0 Safari/534.6',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/530.5 (KHTML, like Gecko) Chrome/2.0.172.43 Safari/530.5',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.208.0 Safari/532.0',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.19 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.19',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-CA) AppleWebKit/534.13 (KHTML like Gecko) Chrome/9.0.597.98 Safari/534.13',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.211.2 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.201.1 Safari/532.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/532.0 (KHTML, like Gecko) Chrome/4.0.201.1 Safari/532.0',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.213.1 Safari/532.1',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/530.6 (KHTML, like Gecko) Chrome/2.0.174.0 Safari/530.6',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/0.3.154.6 Safari/525.19',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.599.0 Safari/534.13',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.8 (KHTML, like Gecko) Chrome/7.0.521.0 Safari/534.8',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1b2pre) Gecko/20081015 Fennec/1.0a1',
'Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5'
]
| 112.502618
| 221
| 0.674283
| 4,354
| 21,488
| 3.324529
| 0.065457
| 0.100587
| 0.100725
| 0.168014
| 0.868739
| 0.839171
| 0.784732
| 0.755302
| 0.738515
| 0.69361
| 0
| 0.198061
| 0.14073
| 21,488
| 191
| 222
| 112.502618
| 0.585897
| 0
| 0
| 0
| 0
| 0.984293
| 0.928661
| 0.004328
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.